You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2010/05/07 21:26:51 UTC
svn commit: r942186 [18/18] - in /hadoop/hbase/trunk: ./
contrib/stargate/core/src/test/java/org/apache/hadoop/hbase/stargate/
core/src/main/java/org/apache/hadoop/hbase/
core/src/main/java/org/apache/hadoop/hbase/client/
core/src/main/java/org/apache/...
Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java Fri May 7 19:26:45 2010
@@ -95,7 +95,7 @@ public class TestHLog extends HBaseTestC
byte [] family = Bytes.toBytes("column");
byte [] qualifier = Bytes.toBytes(Integer.toString(j));
byte [] column = Bytes.toBytes("column:" + Integer.toString(j));
- edit.add(new KeyValue(rowName, family, qualifier,
+ edit.add(new KeyValue(rowName, family, qualifier,
System.currentTimeMillis(), column));
System.out.println("Region " + i + ": " + edit);
log.append(infos[i], tableName, edit,
@@ -223,7 +223,7 @@ public class TestHLog extends HBaseTestC
Bytes.equals(regions[i], "2".getBytes()));
}
}
-
+
private void verifySplits(List<Path> splits, final int howmany)
throws IOException {
assertEquals(howmany, splits.size());
@@ -270,7 +270,7 @@ public class TestHLog extends HBaseTestC
long timestamp = System.currentTimeMillis();
WALEdit cols = new WALEdit();
for (int i = 0; i < COL_COUNT; i++) {
- cols.add(new KeyValue(row, Bytes.toBytes("column"),
+ cols.add(new KeyValue(row, Bytes.toBytes("column"),
Bytes.toBytes(Integer.toString(i)),
timestamp, new byte[] { (byte)(i + '0') }));
}
Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java Fri May 7 19:26:45 2010
@@ -45,7 +45,7 @@ public class TestLogRolling extends HBas
private HLog log;
private String tableName;
private byte[] value;
-
+
/**
* constructor
* @throws Exception
@@ -58,14 +58,14 @@ public class TestLogRolling extends HBas
this.log = null;
this.tableName = null;
this.value = null;
-
+
String className = this.getClass().getName();
StringBuilder v = new StringBuilder(className);
while (v.length() < 1000) {
v.append(className);
}
value = Bytes.toBytes(v.toString());
-
+
} catch (Exception e) {
LOG.fatal("error in constructor", e);
throw e;
@@ -95,13 +95,13 @@ public class TestLogRolling extends HBas
// a chance to run.
conf.setInt(HConstants.THREAD_WAKE_FREQUENCY, 2 * 1000);
}
-
+
private void startAndWriteData() throws Exception {
// When the META table can be opened, the region servers are running
new HTable(conf, HConstants.META_TABLE_NAME);
this.server = cluster.getRegionServerThreads().get(0).getRegionServer();
this.log = server.getLog();
-
+
// Create the test table and open it
HTableDescriptor desc = new HTableDescriptor(tableName);
desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
@@ -125,7 +125,7 @@ public class TestLogRolling extends HBas
/**
* Tests that logs are deleted
- *
+ *
* @throws Exception
*/
public void testLogRolling() throws Exception {
@@ -133,18 +133,18 @@ public class TestLogRolling extends HBas
try {
startAndWriteData();
LOG.info("after writing there are " + log.getNumLogFiles() + " log files");
-
+
// flush all regions
-
+
List<HRegion> regions =
new ArrayList<HRegion>(server.getOnlineRegions());
for (HRegion r: regions) {
r.flushcache();
}
-
+
// Now roll the log
log.rollWriter();
-
+
int count = log.getNumLogFiles();
LOG.info("after flushing all regions and rolling logs there are " +
log.getNumLogFiles() + " log files");
Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java Fri May 7 19:26:45 2010
@@ -31,8 +31,8 @@ import org.apache.hadoop.hbase.thrift.ge
import org.apache.hadoop.hbase.util.Bytes;
/**
- * Unit testing for ThriftServer.HBaseHandler, a part of the
- * org.apache.hadoop.hbase.thrift package.
+ * Unit testing for ThriftServer.HBaseHandler, a part of the
+ * org.apache.hadoop.hbase.thrift package.
*/
public class TestThriftServer extends HBaseClusterTestCase {
@@ -49,11 +49,11 @@ public class TestThriftServer extends HB
private static byte[] valueDname = Bytes.toBytes("valueD");
/**
- * Runs all of the tests under a single JUnit test method. We
+ * Runs all of the tests under a single JUnit test method. We
* consolidate all testing to one method because HBaseClusterTestCase
- * is prone to OutOfMemoryExceptions when there are three or more
+ * is prone to OutOfMemoryExceptions when there are three or more
* JUnit test methods.
- *
+ *
* @throws Exception
*/
public void testAll() throws Exception {
@@ -65,10 +65,10 @@ public class TestThriftServer extends HB
}
/**
- * Tests for creating, enabling, disabling, and deleting tables. Also
- * tests that creating a table with an invalid column name yields an
+ * Tests for creating, enabling, disabling, and deleting tables. Also
+ * tests that creating a table with an invalid column name yields an
* IllegalArgument exception.
- *
+ *
* @throws Exception
*/
public void doTestTableCreateDrop() throws Exception {
@@ -95,10 +95,10 @@ public class TestThriftServer extends HB
}
/**
- * Tests adding a series of Mutations and BatchMutations, including a
- * delete mutation. Also tests data retrieval, and getting back multiple
- * versions.
- *
+ * Tests adding a series of Mutations and BatchMutations, including a
+ * delete mutation. Also tests data retrieval, and getting back multiple
+ * versions.
+ *
* @throws Exception
*/
public void doTestTableMutations() throws Exception {
@@ -159,10 +159,10 @@ public class TestThriftServer extends HB
}
/**
- * Similar to testTableMutations(), except Mutations are applied with
- * specific timestamps and data retrieval uses these timestamps to
- * extract specific versions of data.
- *
+ * Similar to testTableMutations(), except Mutations are applied with
+ * specific timestamps and data retrieval uses these timestamps to
+ * extract specific versions of data.
+ *
* @throws Exception
*/
public void doTestTableTimestampsAndColumns() throws Exception {
@@ -199,10 +199,10 @@ public class TestThriftServer extends HB
//assertTrue(Bytes.equals(rowResult1.columns.get(columnAname).value, valueAname));
assertTrue(Bytes.equals(rowResult1.columns.get(columnBname).value, valueBname));
assertTrue(Bytes.equals(rowResult2.columns.get(columnBname).value, valueCname));
-
+
// ColumnAname has been deleted, and will never be visible even with a getRowTs()
assertFalse(rowResult2.columns.containsKey(columnAname));
-
+
List<byte[]> columns = new ArrayList<byte[]>();
columns.add(columnBname);
@@ -213,7 +213,7 @@ public class TestThriftServer extends HB
rowResult1 = handler.getRowWithColumnsTs(tableAname, rowAname, columns, time1).get(0);
assertTrue(Bytes.equals(rowResult1.columns.get(columnBname).value, valueBname));
assertFalse(rowResult1.columns.containsKey(columnAname));
-
+
// Apply some timestamped deletes
// this actually deletes _everything_.
// nukes everything in columnB: forever.
@@ -238,9 +238,9 @@ public class TestThriftServer extends HB
}
/**
- * Tests the four different scanner-opening methods (with and without
- * a stoprow, with and without a timestamp).
- *
+ * Tests the four different scanner-opening methods (with and without
+ * a stoprow, with and without a timestamp).
+ *
* @throws Exception
*/
public void doTestTableScanners() throws Exception {
@@ -289,12 +289,12 @@ public class TestThriftServer extends HB
closeScanner(scanner2, handler);
// Test a scanner on the first row and first column only, no timestamp
- int scanner3 = handler.scannerOpenWithStop(tableAname, rowAname, rowBname,
+ int scanner3 = handler.scannerOpenWithStop(tableAname, rowAname, rowBname,
getColumnList(true, false));
closeScanner(scanner3, handler);
// Test a scanner on the first row and second column only, with timestamp
- int scanner4 = handler.scannerOpenWithStopTs(tableAname, rowAname, rowBname,
+ int scanner4 = handler.scannerOpenWithStopTs(tableAname, rowAname, rowBname,
getColumnList(false, true), time1);
TRowResult rowResult4a = handler.scannerGet(scanner4).get(0);
assertEquals(rowResult4a.columns.size(), 1);
@@ -306,8 +306,8 @@ public class TestThriftServer extends HB
}
/**
- *
- * @return a List of ColumnDescriptors for use in creating a table. Has one
+ *
+ * @return a List of ColumnDescriptors for use in creating a table. Has one
* default ColumnDescriptor and one ColumnDescriptor with fewer versions
*/
private List<ColumnDescriptor> getColumnDescriptors() {
@@ -319,7 +319,7 @@ public class TestThriftServer extends HB
cDescriptors.add(cDescA);
// A slightly customized ColumnDescriptor (only 2 versions)
- ColumnDescriptor cDescB = new ColumnDescriptor(columnBname, 2, "NONE",
+ ColumnDescriptor cDescB = new ColumnDescriptor(columnBname, 2, "NONE",
false, "NONE", 0, 0, false, -1);
cDescriptors.add(cDescB);
@@ -327,7 +327,7 @@ public class TestThriftServer extends HB
}
/**
- *
+ *
* @param includeA whether or not to include columnA
* @param includeB whether or not to include columnB
* @return a List of column names for use in retrieving a scanner
@@ -340,8 +340,8 @@ public class TestThriftServer extends HB
}
/**
- *
- * @return a List of Mutations for a row, with columnA having valueA
+ *
+ * @return a List of Mutations for a row, with columnA having valueA
* and columnB having valueB
*/
private List<Mutation> getMutations() {
@@ -352,12 +352,12 @@ public class TestThriftServer extends HB
}
/**
- *
+ *
* @return a List of BatchMutations with the following effects:
* (rowA, columnA): delete
* (rowA, columnB): place valueC
* (rowB, columnA): place valueC
- * (rowB, columnB): place valueD
+ * (rowB, columnB): place valueD
*/
private List<BatchMutation> getBatchMutations() {
List<BatchMutation> batchMutations = new ArrayList<BatchMutation>();
@@ -381,9 +381,9 @@ public class TestThriftServer extends HB
}
/**
- * Asserts that the passed scanner is exhausted, and then closes
+ * Asserts that the passed scanner is exhausted, and then closes
* the scanner.
- *
+ *
* @param scannerId the scanner to close
* @param handler the HBaseHandler interfacing to HBase
* @throws Exception
Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/SoftValueSortedMapTest.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/SoftValueSortedMapTest.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/SoftValueSortedMapTest.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/SoftValueSortedMapTest.java Fri May 7 19:26:45 2010
@@ -33,9 +33,9 @@ public class SoftValueSortedMapTest {
byte[] block = new byte[849*1024*1024]; // FindBugs DLS_DEAD_LOCAL_STORE
System.out.println(map.size());
}
-
+
public static void main(String[] args) {
testMap(new SoftValueSortedMap<Integer, Integer>());
- testMap(new TreeMap<Integer, Integer>());
+ testMap(new TreeMap<Integer, Integer>());
}
}
\ No newline at end of file
Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/TestBase64.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/TestBase64.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/TestBase64.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/TestBase64.java Fri May 7 19:26:45 2010
@@ -52,7 +52,7 @@ public class TestBase64 extends TestCase
*/
public void testBase64() throws UnsupportedEncodingException {
TreeMap<String, String> sorted = new TreeMap<String, String>();
-
+
for (int i = 0; i < uris.length; i++) {
byte[] bytes = uris[i].getBytes("UTF-8");
sorted.put(Base64.encodeBytes(bytes, Base64.ORDERED), uris[i]);
Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java Fri May 7 19:26:45 2010
@@ -134,7 +134,7 @@ public class TestBytes extends TestCase
byte [] key2 = {4,9};
byte [] key2_2 = {4};
byte [] key3 = {5,11};
-
+
assertEquals(1, Bytes.binarySearch(arr, key1, 0, 1,
Bytes.BYTES_RAWCOMPARATOR));
assertEquals(0, Bytes.binarySearch(arr, key1, 1, 1,
@@ -150,7 +150,7 @@ public class TestBytes extends TestCase
assertEquals(5, Bytes.binarySearch(arr, key3, 1, 1,
Bytes.BYTES_RAWCOMPARATOR));
}
-
+
public void testIncrementBytes() throws IOException {
assertTrue(checkTestIncrementBytes(10, 1));
@@ -171,8 +171,8 @@ public class TestBytes extends TestCase
assertTrue(checkTestIncrementBytes(-12, -34565445));
assertTrue(checkTestIncrementBytes(-1546543452, -34565445));
}
-
- private static boolean checkTestIncrementBytes(long val, long amount)
+
+ private static boolean checkTestIncrementBytes(long val, long amount)
throws IOException {
byte[] value = Bytes.toBytes(val);
byte [] testValue = {-1, -1, -1, -1, -1, -1, -1, -1};
Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/TestKeying.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/TestKeying.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/TestKeying.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/TestKeying.java Fri May 7 19:26:45 2010
@@ -52,7 +52,7 @@ public class TestKeying extends TestCase
checkTransform("filename");
}
-
+
private void checkTransform(final String u) {
String k = Keying.createKey(u);
String uri = Keying.keyToUri(k);
Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java Fri May 7 19:26:45 2010
@@ -49,13 +49,13 @@ public class TestMergeTool extends HBase
// static final byte [] COLUMN_NAME = Bytes.toBytes("contents:");
static final byte [] FAMILY = Bytes.toBytes("contents");
static final byte [] QUALIFIER = Bytes.toBytes("dc");
-
+
private final HRegionInfo[] sourceRegions = new HRegionInfo[5];
private final HRegion[] regions = new HRegion[5];
private HTableDescriptor desc;
private byte [][][] rows;
private MiniDFSCluster dfsCluster = null;
-
+
@Override
public void setUp() throws Exception {
this.conf.set("hbase.hstore.compactionThreshold", "2");
@@ -70,45 +70,45 @@ public class TestMergeTool extends HBase
// Region 0 will contain the key range [row_0200,row_0300)
sourceRegions[0] = new HRegionInfo(this.desc, Bytes.toBytes("row_0200"),
Bytes.toBytes("row_0300"));
-
+
// Region 1 will contain the key range [row_0250,row_0400) and overlaps
// with Region 0
sourceRegions[1] =
new HRegionInfo(this.desc, Bytes.toBytes("row_0250"),
Bytes.toBytes("row_0400"));
-
+
// Region 2 will contain the key range [row_0100,row_0200) and is adjacent
// to Region 0 or the region resulting from the merge of Regions 0 and 1
sourceRegions[2] =
- new HRegionInfo(this.desc, Bytes.toBytes("row_0100"),
+ new HRegionInfo(this.desc, Bytes.toBytes("row_0100"),
Bytes.toBytes("row_0200"));
-
+
// Region 3 will contain the key range [row_0500,row_0600) and is not
// adjacent to any of Regions 0, 1, 2 or the merged result of any or all
// of those regions
sourceRegions[3] =
- new HRegionInfo(this.desc, Bytes.toBytes("row_0500"),
+ new HRegionInfo(this.desc, Bytes.toBytes("row_0500"),
Bytes.toBytes("row_0600"));
-
+
// Region 4 will have empty start and end keys and overlaps all regions.
sourceRegions[4] =
- new HRegionInfo(this.desc, HConstants.EMPTY_BYTE_ARRAY,
+ new HRegionInfo(this.desc, HConstants.EMPTY_BYTE_ARRAY,
HConstants.EMPTY_BYTE_ARRAY);
-
+
/*
* Now create some row keys
*/
this.rows = new byte [5][][];
this.rows[0] = Bytes.toByteArrays(new String[] { "row_0210", "row_0280" });
- this.rows[1] = Bytes.toByteArrays(new String[] { "row_0260", "row_0350",
+ this.rows[1] = Bytes.toByteArrays(new String[] { "row_0260", "row_0350",
"row_035" });
- this.rows[2] = Bytes.toByteArrays(new String[] { "row_0110", "row_0175",
+ this.rows[2] = Bytes.toByteArrays(new String[] { "row_0110", "row_0175",
"row_0175", "row_0175"});
- this.rows[3] = Bytes.toByteArrays(new String[] { "row_0525", "row_0560",
+ this.rows[3] = Bytes.toByteArrays(new String[] { "row_0525", "row_0560",
"row_0560", "row_0560", "row_0560"});
- this.rows[4] = Bytes.toByteArrays(new String[] { "row_0050", "row_1000",
+ this.rows[4] = Bytes.toByteArrays(new String[] { "row_0050", "row_1000",
"row_1000", "row_1000", "row_1000", "row_1000" });
-
+
// Start up dfs
this.dfsCluster = new MiniDFSCluster(conf, 2, true, (String[])null);
this.fs = this.dfsCluster.getFileSystem();
@@ -121,7 +121,7 @@ public class TestMergeTool extends HBase
// Note: we must call super.setUp after starting the mini cluster or
// we will end up with a local file system
-
+
super.setUp();
try {
// Create root and meta regions
@@ -145,7 +145,7 @@ public class TestMergeTool extends HBase
}
// Close root and meta regions
closeRootAndMeta();
-
+
} catch (Exception e) {
shutdownDfs(dfsCluster);
throw e;
@@ -157,7 +157,7 @@ public class TestMergeTool extends HBase
super.tearDown();
shutdownDfs(dfsCluster);
}
-
+
/*
* @param msg Message that describes this merge
* @param regionName1
@@ -178,7 +178,7 @@ public class TestMergeTool extends HBase
);
assertTrue("'" + msg + "' failed", errCode == 0);
HRegionInfo mergedInfo = merger.getMergedHRegionInfo();
-
+
// Now verify that we can read all the rows from regions 0, 1
// in the new merged region.
HRegion merged =
@@ -188,7 +188,7 @@ public class TestMergeTool extends HBase
LOG.info("Verified " + msg);
return merged;
}
-
+
private void verifyMerge(final HRegion merged, final int upperbound)
throws IOException {
//Test
@@ -207,9 +207,9 @@ public class TestMergeTool extends HBase
} finally {
scanner.close();
}
-
+
//!Test
-
+
for (int i = 0; i < upperbound; i++) {
for (int j = 0; j < rows[i].length; j++) {
Get get = new Get(rows[i][j]);
@@ -265,7 +265,7 @@ public class TestMergeTool extends HBase
merged = mergeAndVerify("merging regions 0+1+2 and 3",
merged.getRegionInfo().getRegionNameAsString(),
this.sourceRegions[3].getRegionNameAsString(), log, 4);
-
+
// Merge the result of merging regions 0, 1, 2 and 3 with region 4
merged = mergeAndVerify("merging regions 0+1+2+3 and 4",
merged.getRegionInfo().getRegionNameAsString(),
Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/TestRootPath.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/TestRootPath.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/TestRootPath.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/util/TestRootPath.java Fri May 7 19:26:45 2010
@@ -34,7 +34,7 @@ import org.apache.hadoop.fs.Path;
*/
public class TestRootPath extends TestCase {
private static final Log LOG = LogFactory.getLog(TestRootPath.class);
-
+
/** The test */
public void testRootPath() {
try {
Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/zookeeper/TestHQuorumPeer.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/zookeeper/TestHQuorumPeer.java?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/zookeeper/TestHQuorumPeer.java (original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/zookeeper/TestHQuorumPeer.java Fri May 7 19:26:45 2010
@@ -120,7 +120,7 @@ public class TestHQuorumPeer extends HBa
server = servers.get(Long.valueOf(0));
assertEquals("foo.bar", server.addr.getHostName());
}
-
+
/**
* Test Case for HBASE-2305
*/
Modified: hadoop/hbase/trunk/core/src/test/resources/log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/resources/log4j.properties?rev=942186&r1=942185&r2=942186&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/resources/log4j.properties (original)
+++ hadoop/hbase/trunk/core/src/test/resources/log4j.properties Fri May 7 19:26:45 2010
@@ -31,7 +31,7 @@ log4j.appender.DRFA.layout.ConversionPat
#
# console
-# Add "console" to rootlogger above if you want to use this
+# Add "console" to rootlogger above if you want to use this
#
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err