You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2010/01/04 01:23:12 UTC
svn commit: r895515 - in /hadoop/hbase/trunk: ./
src/java/org/apache/hadoop/hbase/ src/java/org/apache/hadoop/hbase/mapreduce/
src/java/org/apache/hadoop/hbase/regionserver/wal/
Author: stack
Date: Mon Jan 4 00:23:11 2010
New Revision: 895515
URL: http://svn.apache.org/viewvc?rev=895515&view=rev
Log:
HBASE-2086 Job(configuration,String) deprecated
Modified:
hadoop/hbase/trunk/CHANGES.txt
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HBaseConfiguration.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapreduce/BuildTableIndex.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapreduce/Export.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapreduce/Import.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java
Modified: hadoop/hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/CHANGES.txt?rev=895515&r1=895514&r2=895515&view=diff
==============================================================================
--- hadoop/hbase/trunk/CHANGES.txt (original)
+++ hadoop/hbase/trunk/CHANGES.txt Mon Jan 4 00:23:11 2010
@@ -279,6 +279,7 @@
HBASE-2085 StringBuffer -> StringBuilder - conversion of references as necessary
(Kay Kay via Stack)
HBASE-2052 Upper bound of outstanding WALs can be overrun
+ HBASE-2086 Job(configuration,String) deprecated (Kay Kay via Stack)
NEW FEATURES
HBASE-1901 "General" partitioner for "hbase-48" bulk (behind the api, write
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HBaseConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HBaseConfiguration.java?rev=895515&r1=895514&r2=895515&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HBaseConfiguration.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HBaseConfiguration.java Mon Jan 4 00:23:11 2010
@@ -47,7 +47,7 @@
}
/**
- * Instantinating HBaseConfiguration() is deprecated. Please use
+ * Instantiating HBaseConfiguration() is deprecated. Please use
* HBaseConfiguration#create(conf) to construct a plain Configuration
*/
@Deprecated
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapreduce/BuildTableIndex.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapreduce/BuildTableIndex.java?rev=895515&r1=895514&r2=895515&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapreduce/BuildTableIndex.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapreduce/BuildTableIndex.java Mon Jan 4 00:23:11 2010
@@ -29,6 +29,7 @@
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.mapreduce.Cluster;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
@@ -123,8 +124,9 @@
iconf.addFromXML(content);
conf.set("hbase.index.conf", content);
}
-
- Job job = new Job(conf, "build index for table " + tableName);
+ Cluster mrCluster = new Cluster(conf);
+ Job job = Job.getInstance(mrCluster, conf);
+ job.setJobName("build index for table " + tableName);
// number of indexes to partition into
job.setNumReduceTasks(numReduceTasks);
Scan scan = new Scan();
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapreduce/Export.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapreduce/Export.java?rev=895515&r1=895514&r2=895515&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapreduce/Export.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapreduce/Export.java Mon Jan 4 00:23:11 2010
@@ -27,6 +27,7 @@
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.mapreduce.Cluster;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
@@ -78,7 +79,9 @@
throws IOException {
String tableName = args[0];
Path outputDir = new Path(args[1]);
- Job job = new Job(conf, NAME + "_" + tableName);
+ Cluster mrCluster = new Cluster(conf);
+ Job job = Job.getInstance(mrCluster, conf);
+ job.setJobName(NAME + "_" + tableName);
job.setJarByClass(Exporter.class);
// TODO: Allow passing filter and subset of rows/columns.
Scan s = new Scan();
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapreduce/Import.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapreduce/Import.java?rev=895515&r1=895514&r2=895515&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapreduce/Import.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapreduce/Import.java Mon Jan 4 00:23:11 2010
@@ -28,6 +28,7 @@
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.mapreduce.Cluster;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
@@ -85,7 +86,10 @@
throws IOException {
String tableName = args[0];
Path inputDir = new Path(args[1]);
- Job job = new Job(conf, NAME + "_" + tableName);
+ Cluster mrCluster = new Cluster(conf);
+ Job job = Job.getInstance(mrCluster, conf);
+ job.setJobName(NAME + "_" + tableName);
+
job.setJarByClass(Importer.class);
FileInputFormat.setInputPaths(job, inputDir);
job.setInputFormatClass(SequenceFileInputFormat.class);
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java?rev=895515&r1=895514&r2=895515&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java Mon Jan 4 00:23:11 2010
@@ -29,6 +29,7 @@
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.mapreduce.Cluster;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
@@ -85,7 +86,9 @@
public static Job createSubmittableJob(Configuration conf, String[] args)
throws IOException {
String tableName = args[0];
- Job job = new Job(conf, NAME + "_" + tableName);
+ Cluster mrCluster = new Cluster(conf);
+ Job job = Job.getInstance(mrCluster, conf);
+ job.setJobName(NAME + "_" + tableName);
job.setJarByClass(RowCounter.class);
// Columns are space delimited
StringBuilder sb = new StringBuilder();
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java?rev=895515&r1=895514&r2=895515&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java Mon Jan 4 00:23:11 2010
@@ -348,7 +348,7 @@
Path oldFile = cleanupCurrentWriter(this.filenum);
this.filenum = System.currentTimeMillis();
Path newPath = computeFilename(this.filenum);
- this.writer = createWriter(fs, newPath, new HBaseConfiguration(conf));
+ this.writer = createWriter(fs, newPath, HBaseConfiguration.create(conf));
LOG.info((oldFile != null?
"Roll " + FSUtils.getPath(oldFile) + ", entries=" +
this.numEntries.get() +