You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ji...@apache.org on 2007/08/23 01:59:31 UTC
svn commit: r568776 - in /lucene/hadoop/trunk/src/contrib/hbase: ./
src/java/org/apache/hadoop/hbase/ src/java/org/apache/hadoop/hbase/io/
src/java/org/apache/hadoop/hbase/mapred/
Author: jimk
Date: Wed Aug 22 16:59:30 2007
New Revision: 568776
URL: http://svn.apache.org/viewvc?rev=568776&view=rev
Log:
HADOOP-1746 Clean up findbugs warnings
Modified:
lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HAbstractScanner.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLog.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLogEdit.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMemcache.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegiondirReader.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStore.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Leases.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/TableMap.java
Modified: lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt?rev=568776&r1=568775&r2=568776&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt Wed Aug 22 16:59:30 2007
@@ -19,6 +19,7 @@
IMPROVEMENTS
HADOOP-1737 Make HColumnDescriptor data publically members settable
+ HADOOP-1746 Clean up findbugs warnings
Below are the list of changes before 2007-08-18
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HAbstractScanner.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HAbstractScanner.java?rev=568776&r1=568775&r2=568776&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HAbstractScanner.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HAbstractScanner.java Wed Aug 22 16:59:30 2007
@@ -185,9 +185,6 @@
/** Mechanism used by concrete implementation to shut down a particular scanner */
abstract void closeSubScanner(int i);
- /** Mechanism used to shut down the whole scan */
- public abstract void close();
-
/** {@inheritDoc} */
public boolean isWildcardScanner() {
return this.wildcardMatch;
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java?rev=568776&r1=568775&r2=568776&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java Wed Aug 22 16:59:30 2007
@@ -37,7 +37,7 @@
// For future backward compatibility
private static final byte COLUMN_DESCRIPTOR_VERSION = (byte)1;
- // Legal family names can only contain 'word characters' and end in a colon.
+ /** Legal family names can only contain 'word characters' and end in a colon. */
public static final Pattern LEGAL_FAMILY_NAME = Pattern.compile("\\w+:");
/**
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLog.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLog.java?rev=568776&r1=568775&r2=568776&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLog.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLog.java Wed Aug 22 16:59:30 2007
@@ -83,7 +83,7 @@
long filenum = 0;
AtomicInteger numEntries = new AtomicInteger(0);
- Integer rollLock = Integer.valueOf(0);
+ Integer rollLock = new Integer(0);
/**
* Split up a bunch of log files, that are no longer being written to,
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLogEdit.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLogEdit.java?rev=568776&r1=568775&r2=568776&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLogEdit.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLogEdit.java Wed Aug 22 16:59:30 2007
@@ -34,7 +34,7 @@
private Text column = new Text();
private byte [] val;
private long timestamp;
- private final int MAX_VALUE_LEN = 128;
+ private static final int MAX_VALUE_LEN = 128;
/**
* Default constructor used by Writable
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java?rev=568776&r1=568775&r2=568776&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java Wed Aug 22 16:59:30 2007
@@ -67,6 +67,7 @@
public class HMaster implements HConstants, HMasterInterface,
HMasterRegionInterface, Runnable {
+ /** {@inheritDoc} */
public long getProtocolVersion(String protocol,
@SuppressWarnings("unused") long clientVersion) throws IOException {
@@ -322,7 +323,7 @@
LOG.warn("Deletion of " + parent.getRegionName() + " failed");
}
- BatchUpdate b = new BatchUpdate();
+ BatchUpdate b = new BatchUpdate(rand.nextLong());
long lockid = b.startUpdate(parent.getRegionName());
b.delete(lockid, COL_REGIONINFO);
b.delete(lockid, COL_SERVER);
@@ -388,7 +389,7 @@
+" no longer has references to " + parent.toString());
}
- BatchUpdate b = new BatchUpdate();
+ BatchUpdate b = new BatchUpdate(rand.nextLong());
long lockid = b.startUpdate(parent);
b.delete(lockid, splitColumn);
srvr.batchUpdate(metaRegionName, System.currentTimeMillis(), b);
@@ -542,7 +543,7 @@
private RootScanner rootScanner;
private Thread rootScannerThread;
- Integer rootScannerLock = Integer.valueOf(0);
+ Integer rootScannerLock = new Integer(0);
@SuppressWarnings("unchecked")
static class MetaRegion implements Comparable {
@@ -731,7 +732,7 @@
MetaScanner metaScanner;
private Thread metaScannerThread;
- Integer metaScannerLock = Integer.valueOf(0);
+ Integer metaScannerLock = new Integer(0);
/**
* The 'unassignedRegions' table maps from a region name to a HRegionInfo
@@ -938,6 +939,7 @@
Thread.currentThread().setName("HMaster");
try {
// Start things up
+ this.serverLeases.start();
this.rootScannerThread.start();
this.metaScannerThread.start();
@@ -1824,7 +1826,7 @@
// Remove server from root/meta entries
for (ToDoEntry e: toDoList) {
- BatchUpdate b = new BatchUpdate();
+ BatchUpdate b = new BatchUpdate(rand.nextLong());
long lockid = b.startUpdate(e.row);
if (e.deleteRegion) {
@@ -2094,7 +2096,7 @@
}
try {
- BatchUpdate b = new BatchUpdate();
+ BatchUpdate b = new BatchUpdate(rand.nextLong());
long lockid = b.startUpdate(regionInfo.regionName);
if (deleteRegion) {
@@ -2239,7 +2241,7 @@
serverAddress.toString());
try {
- BatchUpdate b = new BatchUpdate();
+ BatchUpdate b = new BatchUpdate(rand.nextLong());
long lockid = b.startUpdate(region.getRegionName());
b.put(lockid, COL_SERVER,
Writables.stringToBytes(serverAddress.toString()));
@@ -2404,7 +2406,7 @@
HRegionInfo info = region.getRegionInfo();
Text regionName = region.getRegionName();
- BatchUpdate b = new BatchUpdate();
+ BatchUpdate b = new BatchUpdate(rand.nextLong());
long lockid = b.startUpdate(regionName);
b.put(lockid, COL_REGIONINFO, Writables.getBytes(info));
server.batchUpdate(metaRegionName, System.currentTimeMillis(), b);
@@ -2689,7 +2691,7 @@
LOG.debug("updating columns in row: " + i.regionName);
}
- BatchUpdate b = new BatchUpdate();
+ BatchUpdate b = new BatchUpdate(rand.nextLong());
lockid = b.startUpdate(i.regionName);
updateRegionInfo(b, i);
b.delete(lockid, COL_SERVER);
@@ -2846,7 +2848,7 @@
protected void updateRegionInfo(HRegionInterface server, Text regionName,
HRegionInfo i) throws IOException {
- BatchUpdate b = new BatchUpdate();
+ BatchUpdate b = new BatchUpdate(rand.nextLong());
long lockid = b.startUpdate(i.regionName);
b.put(lockid, COL_REGIONINFO, Writables.getBytes(i));
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMemcache.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMemcache.java?rev=568776&r1=568775&r2=568776&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMemcache.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMemcache.java Wed Aug 22 16:59:30 2007
@@ -380,7 +380,6 @@
}
/** Shut down map iterators, and release the lock */
- @Override
public void close() {
if(! scannerClosed) {
try {
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java?rev=568776&r1=568775&r2=568776&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java Wed Aug 22 16:59:30 2007
@@ -74,7 +74,6 @@
public class HRegion implements HConstants {
static String SPLITDIR = "splits";
static String MERGEDIR = "merges";
- static String TMPREGION_PREFIX = "tmpregion_";
static final Random rand = new Random();
static final Log LOG = LogFactory.getLog(HRegion.class);
final AtomicBoolean closed = new AtomicBoolean(false);
@@ -991,7 +990,7 @@
storelist.add(stores.get(family));
}
return new HScanner(cols, firstRow, timestamp, memcache,
- storelist.toArray(new HStore [] {}), filter);
+ storelist.toArray(new HStore [storelist.size()]), filter);
} finally {
lock.releaseReadLock();
}
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java?rev=568776&r1=568775&r2=568776&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java Wed Aug 22 16:59:30 2007
@@ -577,6 +577,7 @@
logRollerThread.start();
leases = new Leases(conf.getLong("hbase.regionserver.lease.period",
3 * 60 * 1000), threadWakeFrequency);
+ leases.start();
// Server
@@ -1310,7 +1311,7 @@
s = scanners.remove(scannerName);
}
if(s == null) {
- throw new UnknownScannerException(scannerName.toString());
+ throw new UnknownScannerException(scannerName);
}
s.close();
leases.cancelLease(scannerId, scannerId);
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegiondirReader.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegiondirReader.java?rev=568776&r1=568775&r2=568776&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegiondirReader.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegiondirReader.java Wed Aug 22 16:59:30 2007
@@ -141,14 +141,14 @@
Text family = new Text(d.getName() + ":");
families.add(family);
}
- return families.toArray(new Text [] {});
+ return families.toArray(new Text [families.size()]);
}
List <HRegionInfo> getRegions() {
return this.infos;
}
- HRegionInfo getRegionInfo(final String tableName) {
+ HRegionInfo getRegionInfo(final Text tableName) {
HRegionInfo result = null;
for(HRegionInfo i: getRegions()) {
if(i.tableDesc.getName().equals(tableName)) {
@@ -163,14 +163,15 @@
return result;
}
- private void dump(final String tableName) throws IOException {
+ private void dump(final Text tableName) throws IOException {
dump(getRegionInfo(tableName));
}
private void dump(final HRegionInfo info) throws IOException {
HRegion r = new HRegion(this.parentdir, null,
FileSystem.get(this.conf), conf, info, null);
- Text [] families = info.tableDesc.families().keySet().toArray(new Text [] {});
+ Text [] families = info.tableDesc.families().keySet().toArray(
+ new Text [info.tableDesc.families.size()]);
HInternalScannerInterface scanner =
r.getScanner(families, new Text(), System.currentTimeMillis(), null);
@@ -224,7 +225,7 @@
}
} else {
for (int i = 1; i < args.length; i++) {
- reader.dump(args[i]);
+ reader.dump(new Text(args[i]));
}
}
}
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStore.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStore.java?rev=568776&r1=568775&r2=568776&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStore.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStore.java Wed Aug 22 16:59:30 2007
@@ -742,7 +742,7 @@
}
/** A compaction reader for MapFile */
- class MapFileCompactionReader implements CompactionReader {
+ static class MapFileCompactionReader implements CompactionReader {
final MapFile.Reader reader;
MapFileCompactionReader(final MapFile.Reader r) {
@@ -1114,7 +1114,7 @@
/*
* Data structure to hold result of a look at store file sizes.
*/
- class HStoreSize {
+ static class HStoreSize {
final long aggregate;
final long largest;
boolean splitable;
@@ -1361,7 +1361,6 @@
}
/** Shut it down! */
- @Override
public void close() {
if(! scannerClosed) {
try {
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java?rev=568776&r1=568775&r2=568776&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java Wed Aug 22 16:59:30 2007
@@ -26,6 +26,7 @@
import java.util.Random;
import java.util.SortedMap;
import java.util.TreeMap;
+import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.atomic.AtomicReferenceArray;
import org.apache.commons.logging.Log;
@@ -52,7 +53,7 @@
protected final int numRetries;
protected Random rand;
protected volatile SortedMap<Text, HRegionLocation> tableServers;
- protected BatchUpdate batch;
+ protected AtomicReference<BatchUpdate> batch;
// For row mutation operations
@@ -79,7 +80,7 @@
this.numRetries = conf.getInt("hbase.client.retries.number", 5);
this.rand = new Random();
tableServers = connection.getTableServers(tableName);
- this.batch = null;
+ this.batch = new AtomicReference<BatchUpdate>();
closed = false;
}
@@ -113,7 +114,7 @@
public synchronized void close() {
closed = true;
tableServers = null;
- batch = null;
+ batch.set(null);
connection.close(tableName);
}
@@ -136,11 +137,11 @@
*/
private void updateInProgress(boolean updateMustBeInProgress) {
if (updateMustBeInProgress) {
- if (batch == null) {
+ if (batch.get() == null) {
throw new IllegalStateException("no update in progress");
}
} else {
- if (batch != null) {
+ if (batch.get() != null) {
throw new IllegalStateException("update in progress");
}
}
@@ -508,8 +509,8 @@
public synchronized long startUpdate(final Text row) {
checkClosed();
updateInProgress(false);
- batch = new BatchUpdate();
- return batch.startUpdate(row);
+ batch.set(new BatchUpdate(rand.nextLong()));
+ return batch.get().startUpdate(row);
}
/**
@@ -526,7 +527,7 @@
throw new IllegalArgumentException("value cannot be null");
}
updateInProgress(true);
- batch.put(lockid, column, val);
+ batch.get().put(lockid, column, val);
}
/**
@@ -538,7 +539,7 @@
public void delete(long lockid, Text column) {
checkClosed();
updateInProgress(true);
- batch.delete(lockid, column);
+ batch.get().delete(lockid, column);
}
/**
@@ -549,10 +550,10 @@
public synchronized void abort(long lockid) {
checkClosed();
updateInProgress(true);
- if (batch.getLockid() != lockid) {
+ if (batch.get().getLockid() != lockid) {
throw new IllegalArgumentException("invalid lock id " + lockid);
}
- batch = null;
+ batch.set(null);
}
/**
@@ -577,18 +578,18 @@
checkClosed();
updateInProgress(true);
- if (batch.getLockid() != lockid) {
+ if (batch.get().getLockid() != lockid) {
throw new IllegalArgumentException("invalid lock id " + lockid);
}
try {
for (int tries = 0; tries < numRetries; tries++) {
- HRegionLocation r = getRegionLocation(batch.getRow());
+ HRegionLocation r = getRegionLocation(batch.get().getRow());
HRegionInterface server =
connection.getHRegionConnection(r.getServerAddress());
try {
server.batchUpdate(r.getRegionInfo().getRegionName(), timestamp,
- batch);
+ batch.get());
break;
} catch (IOException e) {
if (e instanceof RemoteException) {
@@ -612,7 +613,7 @@
}
}
} finally {
- batch = null;
+ batch.set(null);
}
}
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Leases.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Leases.java?rev=568776&r1=568775&r2=568776&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Leases.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Leases.java Wed Aug 22 16:59:30 2007
@@ -63,6 +63,10 @@
this.leaseMonitor = new LeaseMonitor();
this.leaseMonitorThread = new Thread(leaseMonitor);
this.leaseMonitorThread.setName("Lease.monitor");
+ }
+
+ /** Starts the lease monitor */
+ public void start() {
leaseMonitorThread.start();
}
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java?rev=568776&r1=568775&r2=568776&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java Wed Aug 22 16:59:30 2007
@@ -24,7 +24,6 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
-import java.util.Random;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
@@ -38,25 +37,33 @@
*/
public class BatchUpdate implements Writable, Iterable<BatchOperation> {
- // used to generate lock ids
- private Random rand;
-
// the row being updated
private Text row;
- // the lockid
- private long lockid;
+ // the lockid - not used on server side
+ private transient long lockid;
// the batched operations
private ArrayList<BatchOperation> operations;
- /** constructor */
+ /** Default constructor - used by Writable. */
public BatchUpdate() {
- this.rand = new Random();
this.row = new Text();
this.lockid = -1L;
this.operations = new ArrayList<BatchOperation>();
}
+
+ /**
+ * Client side constructor. Clients need to provide the lockid by some means
+ * such as Random.nextLong()
+ *
+ * @param lockid
+ */
+ public BatchUpdate(long lockid) {
+ this.row = new Text();
+ this.lockid = Long.valueOf(Math.abs(lockid));
+ this.operations = new ArrayList<BatchOperation>();
+ }
/** @return the lock id */
public long getLockid() {
@@ -84,7 +91,6 @@
*/
public synchronized long startUpdate(final Text row) {
this.row = row;
- this.lockid = Long.valueOf(Math.abs(rand.nextLong()));
return this.lockid;
}
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/TableMap.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/TableMap.java?rev=568776&r1=568775&r2=568776&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/TableMap.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/TableMap.java Wed Aug 22 16:59:30 2007
@@ -71,12 +71,6 @@
job.set(TableInputFormat.COLUMN_LIST, columns);
}
- /** {@inheritDoc} */
- @Override
- public void configure(JobConf job) {
- super.configure(job);
- }
-
/**
* Input:
* @param key is of type HStoreKey