You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by gi...@apache.org on 2018/08/16 14:48:50 UTC
[48/51] [partial] hbase-site git commit: Published site at
092efb42749bf7fc6ad338c96aae8e7b9d3a2c74.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f3d62514/apidocs/src-html/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.html
----------------------------------------------------------------------
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.html b/apidocs/src-html/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.html
index 3e3acbe..e2dc8f5 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.html
@@ -48,376 +48,379 @@
<span class="sourceLineNo">040</span>import org.apache.hadoop.mapred.OutputFormat;<a name="line.40"></a>
<span class="sourceLineNo">041</span>import org.apache.hadoop.mapred.TextInputFormat;<a name="line.41"></a>
<span class="sourceLineNo">042</span>import org.apache.hadoop.mapred.TextOutputFormat;<a name="line.42"></a>
-<span class="sourceLineNo">043</span><a name="line.43"></a>
-<span class="sourceLineNo">044</span>import java.io.IOException;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import java.util.Collection;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import java.util.Map;<a name="line.46"></a>
-<span class="sourceLineNo">047</span><a name="line.47"></a>
-<span class="sourceLineNo">048</span>/**<a name="line.48"></a>
-<span class="sourceLineNo">049</span> * Utility for {@link TableMap} and {@link TableReduce}<a name="line.49"></a>
-<span class="sourceLineNo">050</span> */<a name="line.50"></a>
-<span class="sourceLineNo">051</span>@InterfaceAudience.Public<a name="line.51"></a>
-<span class="sourceLineNo">052</span>@SuppressWarnings({ "rawtypes", "unchecked" })<a name="line.52"></a>
-<span class="sourceLineNo">053</span>public class TableMapReduceUtil {<a name="line.53"></a>
-<span class="sourceLineNo">054</span><a name="line.54"></a>
-<span class="sourceLineNo">055</span> /**<a name="line.55"></a>
-<span class="sourceLineNo">056</span> * Use this before submitting a TableMap job. It will<a name="line.56"></a>
-<span class="sourceLineNo">057</span> * appropriately set up the JobConf.<a name="line.57"></a>
-<span class="sourceLineNo">058</span> *<a name="line.58"></a>
-<span class="sourceLineNo">059</span> * @param table The table name to read from.<a name="line.59"></a>
-<span class="sourceLineNo">060</span> * @param columns The columns to scan.<a name="line.60"></a>
-<span class="sourceLineNo">061</span> * @param mapper The mapper class to use.<a name="line.61"></a>
-<span class="sourceLineNo">062</span> * @param outputKeyClass The class of the output key.<a name="line.62"></a>
-<span class="sourceLineNo">063</span> * @param outputValueClass The class of the output value.<a name="line.63"></a>
-<span class="sourceLineNo">064</span> * @param job The current job configuration to adjust.<a name="line.64"></a>
-<span class="sourceLineNo">065</span> */<a name="line.65"></a>
-<span class="sourceLineNo">066</span> public static void initTableMapJob(String table, String columns,<a name="line.66"></a>
-<span class="sourceLineNo">067</span> Class<? extends TableMap> mapper,<a name="line.67"></a>
-<span class="sourceLineNo">068</span> Class<?> outputKeyClass,<a name="line.68"></a>
-<span class="sourceLineNo">069</span> Class<?> outputValueClass, JobConf job) {<a name="line.69"></a>
-<span class="sourceLineNo">070</span> initTableMapJob(table, columns, mapper, outputKeyClass, outputValueClass, job,<a name="line.70"></a>
-<span class="sourceLineNo">071</span> true, TableInputFormat.class);<a name="line.71"></a>
-<span class="sourceLineNo">072</span> }<a name="line.72"></a>
-<span class="sourceLineNo">073</span><a name="line.73"></a>
-<span class="sourceLineNo">074</span> public static void initTableMapJob(String table, String columns,<a name="line.74"></a>
-<span class="sourceLineNo">075</span> Class<? extends TableMap> mapper,<a name="line.75"></a>
-<span class="sourceLineNo">076</span> Class<?> outputKeyClass,<a name="line.76"></a>
-<span class="sourceLineNo">077</span> Class<?> outputValueClass, JobConf job, boolean addDependencyJars) {<a name="line.77"></a>
-<span class="sourceLineNo">078</span> initTableMapJob(table, columns, mapper, outputKeyClass, outputValueClass, job,<a name="line.78"></a>
-<span class="sourceLineNo">079</span> addDependencyJars, TableInputFormat.class);<a name="line.79"></a>
-<span class="sourceLineNo">080</span> }<a name="line.80"></a>
-<span class="sourceLineNo">081</span><a name="line.81"></a>
-<span class="sourceLineNo">082</span> /**<a name="line.82"></a>
-<span class="sourceLineNo">083</span> * Use this before submitting a TableMap job. It will<a name="line.83"></a>
-<span class="sourceLineNo">084</span> * appropriately set up the JobConf.<a name="line.84"></a>
-<span class="sourceLineNo">085</span> *<a name="line.85"></a>
-<span class="sourceLineNo">086</span> * @param table The table name to read from.<a name="line.86"></a>
-<span class="sourceLineNo">087</span> * @param columns The columns to scan.<a name="line.87"></a>
-<span class="sourceLineNo">088</span> * @param mapper The mapper class to use.<a name="line.88"></a>
-<span class="sourceLineNo">089</span> * @param outputKeyClass The class of the output key.<a name="line.89"></a>
-<span class="sourceLineNo">090</span> * @param outputValueClass The class of the output value.<a name="line.90"></a>
-<span class="sourceLineNo">091</span> * @param job The current job configuration to adjust.<a name="line.91"></a>
-<span class="sourceLineNo">092</span> * @param addDependencyJars upload HBase jars and jars for any of the configured<a name="line.92"></a>
-<span class="sourceLineNo">093</span> * job classes via the distributed cache (tmpjars).<a name="line.93"></a>
-<span class="sourceLineNo">094</span> */<a name="line.94"></a>
-<span class="sourceLineNo">095</span> public static void initTableMapJob(String table, String columns,<a name="line.95"></a>
-<span class="sourceLineNo">096</span> Class<? extends TableMap> mapper,<a name="line.96"></a>
-<span class="sourceLineNo">097</span> Class<?> outputKeyClass,<a name="line.97"></a>
-<span class="sourceLineNo">098</span> Class<?> outputValueClass, JobConf job, boolean addDependencyJars,<a name="line.98"></a>
-<span class="sourceLineNo">099</span> Class<? extends InputFormat> inputFormat) {<a name="line.99"></a>
-<span class="sourceLineNo">100</span><a name="line.100"></a>
-<span class="sourceLineNo">101</span> job.setInputFormat(inputFormat);<a name="line.101"></a>
-<span class="sourceLineNo">102</span> job.setMapOutputValueClass(outputValueClass);<a name="line.102"></a>
-<span class="sourceLineNo">103</span> job.setMapOutputKeyClass(outputKeyClass);<a name="line.103"></a>
-<span class="sourceLineNo">104</span> job.setMapperClass(mapper);<a name="line.104"></a>
-<span class="sourceLineNo">105</span> job.setStrings("io.serializations", job.get("io.serializations"),<a name="line.105"></a>
-<span class="sourceLineNo">106</span> MutationSerialization.class.getName(), ResultSerialization.class.getName());<a name="line.106"></a>
-<span class="sourceLineNo">107</span> FileInputFormat.addInputPaths(job, table);<a name="line.107"></a>
-<span class="sourceLineNo">108</span> job.set(TableInputFormat.COLUMN_LIST, columns);<a name="line.108"></a>
-<span class="sourceLineNo">109</span> if (addDependencyJars) {<a name="line.109"></a>
-<span class="sourceLineNo">110</span> try {<a name="line.110"></a>
-<span class="sourceLineNo">111</span> addDependencyJars(job);<a name="line.111"></a>
-<span class="sourceLineNo">112</span> } catch (IOException e) {<a name="line.112"></a>
-<span class="sourceLineNo">113</span> e.printStackTrace();<a name="line.113"></a>
-<span class="sourceLineNo">114</span> }<a name="line.114"></a>
-<span class="sourceLineNo">115</span> }<a name="line.115"></a>
-<span class="sourceLineNo">116</span> try {<a name="line.116"></a>
-<span class="sourceLineNo">117</span> initCredentials(job);<a name="line.117"></a>
-<span class="sourceLineNo">118</span> } catch (IOException ioe) {<a name="line.118"></a>
-<span class="sourceLineNo">119</span> // just spit out the stack trace? really?<a name="line.119"></a>
-<span class="sourceLineNo">120</span> ioe.printStackTrace();<a name="line.120"></a>
-<span class="sourceLineNo">121</span> }<a name="line.121"></a>
-<span class="sourceLineNo">122</span> }<a name="line.122"></a>
-<span class="sourceLineNo">123</span><a name="line.123"></a>
-<span class="sourceLineNo">124</span> /**<a name="line.124"></a>
-<span class="sourceLineNo">125</span> * Sets up the job for reading from one or more multiple table snapshots, with one or more scans<a name="line.125"></a>
-<span class="sourceLineNo">126</span> * per snapshot.<a name="line.126"></a>
-<span class="sourceLineNo">127</span> * It bypasses hbase servers and read directly from snapshot files.<a name="line.127"></a>
-<span class="sourceLineNo">128</span> *<a name="line.128"></a>
-<span class="sourceLineNo">129</span> * @param snapshotScans map of snapshot name to scans on that snapshot.<a name="line.129"></a>
-<span class="sourceLineNo">130</span> * @param mapper The mapper class to use.<a name="line.130"></a>
-<span class="sourceLineNo">131</span> * @param outputKeyClass The class of the output key.<a name="line.131"></a>
-<span class="sourceLineNo">132</span> * @param outputValueClass The class of the output value.<a name="line.132"></a>
-<span class="sourceLineNo">133</span> * @param job The current job to adjust. Make sure the passed job is<a name="line.133"></a>
-<span class="sourceLineNo">134</span> * carrying all necessary HBase configuration.<a name="line.134"></a>
-<span class="sourceLineNo">135</span> * @param addDependencyJars upload HBase jars and jars for any of the configured<a name="line.135"></a>
-<span class="sourceLineNo">136</span> * job classes via the distributed cache (tmpjars).<a name="line.136"></a>
-<span class="sourceLineNo">137</span> */<a name="line.137"></a>
-<span class="sourceLineNo">138</span> public static void initMultiTableSnapshotMapperJob(Map<String, Collection<Scan>> snapshotScans,<a name="line.138"></a>
-<span class="sourceLineNo">139</span> Class<? extends TableMap> mapper, Class<?> outputKeyClass, Class<?> outputValueClass,<a name="line.139"></a>
-<span class="sourceLineNo">140</span> JobConf job, boolean addDependencyJars, Path tmpRestoreDir) throws IOException {<a name="line.140"></a>
-<span class="sourceLineNo">141</span> MultiTableSnapshotInputFormat.setInput(job, snapshotScans, tmpRestoreDir);<a name="line.141"></a>
-<span class="sourceLineNo">142</span><a name="line.142"></a>
-<span class="sourceLineNo">143</span> job.setInputFormat(MultiTableSnapshotInputFormat.class);<a name="line.143"></a>
-<span class="sourceLineNo">144</span> if (outputValueClass != null) {<a name="line.144"></a>
-<span class="sourceLineNo">145</span> job.setMapOutputValueClass(outputValueClass);<a name="line.145"></a>
-<span class="sourceLineNo">146</span> }<a name="line.146"></a>
-<span class="sourceLineNo">147</span> if (outputKeyClass != null) {<a name="line.147"></a>
-<span class="sourceLineNo">148</span> job.setMapOutputKeyClass(outputKeyClass);<a name="line.148"></a>
+<span class="sourceLineNo">043</span>import org.slf4j.Logger;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.slf4j.LoggerFactory;<a name="line.44"></a>
+<span class="sourceLineNo">045</span><a name="line.45"></a>
+<span class="sourceLineNo">046</span>import java.io.IOException;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import java.util.Collection;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import java.util.Map;<a name="line.48"></a>
+<span class="sourceLineNo">049</span><a name="line.49"></a>
+<span class="sourceLineNo">050</span>/**<a name="line.50"></a>
+<span class="sourceLineNo">051</span> * Utility for {@link TableMap} and {@link TableReduce}<a name="line.51"></a>
+<span class="sourceLineNo">052</span> */<a name="line.52"></a>
+<span class="sourceLineNo">053</span>@InterfaceAudience.Public<a name="line.53"></a>
+<span class="sourceLineNo">054</span>@SuppressWarnings({ "rawtypes", "unchecked" })<a name="line.54"></a>
+<span class="sourceLineNo">055</span>public class TableMapReduceUtil {<a name="line.55"></a>
+<span class="sourceLineNo">056</span> private static final Logger LOG = LoggerFactory.getLogger(TableMapReduceUtil.class);<a name="line.56"></a>
+<span class="sourceLineNo">057</span><a name="line.57"></a>
+<span class="sourceLineNo">058</span> /**<a name="line.58"></a>
+<span class="sourceLineNo">059</span> * Use this before submitting a TableMap job. It will<a name="line.59"></a>
+<span class="sourceLineNo">060</span> * appropriately set up the JobConf.<a name="line.60"></a>
+<span class="sourceLineNo">061</span> *<a name="line.61"></a>
+<span class="sourceLineNo">062</span> * @param table The table name to read from.<a name="line.62"></a>
+<span class="sourceLineNo">063</span> * @param columns The columns to scan.<a name="line.63"></a>
+<span class="sourceLineNo">064</span> * @param mapper The mapper class to use.<a name="line.64"></a>
+<span class="sourceLineNo">065</span> * @param outputKeyClass The class of the output key.<a name="line.65"></a>
+<span class="sourceLineNo">066</span> * @param outputValueClass The class of the output value.<a name="line.66"></a>
+<span class="sourceLineNo">067</span> * @param job The current job configuration to adjust.<a name="line.67"></a>
+<span class="sourceLineNo">068</span> */<a name="line.68"></a>
+<span class="sourceLineNo">069</span> public static void initTableMapJob(String table, String columns,<a name="line.69"></a>
+<span class="sourceLineNo">070</span> Class<? extends TableMap> mapper,<a name="line.70"></a>
+<span class="sourceLineNo">071</span> Class<?> outputKeyClass,<a name="line.71"></a>
+<span class="sourceLineNo">072</span> Class<?> outputValueClass, JobConf job) {<a name="line.72"></a>
+<span class="sourceLineNo">073</span> initTableMapJob(table, columns, mapper, outputKeyClass, outputValueClass, job,<a name="line.73"></a>
+<span class="sourceLineNo">074</span> true, TableInputFormat.class);<a name="line.74"></a>
+<span class="sourceLineNo">075</span> }<a name="line.75"></a>
+<span class="sourceLineNo">076</span><a name="line.76"></a>
+<span class="sourceLineNo">077</span> public static void initTableMapJob(String table, String columns,<a name="line.77"></a>
+<span class="sourceLineNo">078</span> Class<? extends TableMap> mapper,<a name="line.78"></a>
+<span class="sourceLineNo">079</span> Class<?> outputKeyClass,<a name="line.79"></a>
+<span class="sourceLineNo">080</span> Class<?> outputValueClass, JobConf job, boolean addDependencyJars) {<a name="line.80"></a>
+<span class="sourceLineNo">081</span> initTableMapJob(table, columns, mapper, outputKeyClass, outputValueClass, job,<a name="line.81"></a>
+<span class="sourceLineNo">082</span> addDependencyJars, TableInputFormat.class);<a name="line.82"></a>
+<span class="sourceLineNo">083</span> }<a name="line.83"></a>
+<span class="sourceLineNo">084</span><a name="line.84"></a>
+<span class="sourceLineNo">085</span> /**<a name="line.85"></a>
+<span class="sourceLineNo">086</span> * Use this before submitting a TableMap job. It will<a name="line.86"></a>
+<span class="sourceLineNo">087</span> * appropriately set up the JobConf.<a name="line.87"></a>
+<span class="sourceLineNo">088</span> *<a name="line.88"></a>
+<span class="sourceLineNo">089</span> * @param table The table name to read from.<a name="line.89"></a>
+<span class="sourceLineNo">090</span> * @param columns The columns to scan.<a name="line.90"></a>
+<span class="sourceLineNo">091</span> * @param mapper The mapper class to use.<a name="line.91"></a>
+<span class="sourceLineNo">092</span> * @param outputKeyClass The class of the output key.<a name="line.92"></a>
+<span class="sourceLineNo">093</span> * @param outputValueClass The class of the output value.<a name="line.93"></a>
+<span class="sourceLineNo">094</span> * @param job The current job configuration to adjust.<a name="line.94"></a>
+<span class="sourceLineNo">095</span> * @param addDependencyJars upload HBase jars and jars for any of the configured<a name="line.95"></a>
+<span class="sourceLineNo">096</span> * job classes via the distributed cache (tmpjars).<a name="line.96"></a>
+<span class="sourceLineNo">097</span> */<a name="line.97"></a>
+<span class="sourceLineNo">098</span> public static void initTableMapJob(String table, String columns,<a name="line.98"></a>
+<span class="sourceLineNo">099</span> Class<? extends TableMap> mapper,<a name="line.99"></a>
+<span class="sourceLineNo">100</span> Class<?> outputKeyClass,<a name="line.100"></a>
+<span class="sourceLineNo">101</span> Class<?> outputValueClass, JobConf job, boolean addDependencyJars,<a name="line.101"></a>
+<span class="sourceLineNo">102</span> Class<? extends InputFormat> inputFormat) {<a name="line.102"></a>
+<span class="sourceLineNo">103</span><a name="line.103"></a>
+<span class="sourceLineNo">104</span> job.setInputFormat(inputFormat);<a name="line.104"></a>
+<span class="sourceLineNo">105</span> job.setMapOutputValueClass(outputValueClass);<a name="line.105"></a>
+<span class="sourceLineNo">106</span> job.setMapOutputKeyClass(outputKeyClass);<a name="line.106"></a>
+<span class="sourceLineNo">107</span> job.setMapperClass(mapper);<a name="line.107"></a>
+<span class="sourceLineNo">108</span> job.setStrings("io.serializations", job.get("io.serializations"),<a name="line.108"></a>
+<span class="sourceLineNo">109</span> MutationSerialization.class.getName(), ResultSerialization.class.getName());<a name="line.109"></a>
+<span class="sourceLineNo">110</span> FileInputFormat.addInputPaths(job, table);<a name="line.110"></a>
+<span class="sourceLineNo">111</span> job.set(TableInputFormat.COLUMN_LIST, columns);<a name="line.111"></a>
+<span class="sourceLineNo">112</span> if (addDependencyJars) {<a name="line.112"></a>
+<span class="sourceLineNo">113</span> try {<a name="line.113"></a>
+<span class="sourceLineNo">114</span> addDependencyJars(job);<a name="line.114"></a>
+<span class="sourceLineNo">115</span> } catch (IOException e) {<a name="line.115"></a>
+<span class="sourceLineNo">116</span> LOG.error("IOException encountered while adding dependency jars", e);<a name="line.116"></a>
+<span class="sourceLineNo">117</span> }<a name="line.117"></a>
+<span class="sourceLineNo">118</span> }<a name="line.118"></a>
+<span class="sourceLineNo">119</span> try {<a name="line.119"></a>
+<span class="sourceLineNo">120</span> initCredentials(job);<a name="line.120"></a>
+<span class="sourceLineNo">121</span> } catch (IOException ioe) {<a name="line.121"></a>
+<span class="sourceLineNo">122</span> // just spit out the stack trace? really?<a name="line.122"></a>
+<span class="sourceLineNo">123</span> LOG.error("IOException encountered while initializing credentials", ioe);<a name="line.123"></a>
+<span class="sourceLineNo">124</span> }<a name="line.124"></a>
+<span class="sourceLineNo">125</span> }<a name="line.125"></a>
+<span class="sourceLineNo">126</span><a name="line.126"></a>
+<span class="sourceLineNo">127</span> /**<a name="line.127"></a>
+<span class="sourceLineNo">128</span> * Sets up the job for reading from one or more multiple table snapshots, with one or more scans<a name="line.128"></a>
+<span class="sourceLineNo">129</span> * per snapshot.<a name="line.129"></a>
+<span class="sourceLineNo">130</span> * It bypasses hbase servers and read directly from snapshot files.<a name="line.130"></a>
+<span class="sourceLineNo">131</span> *<a name="line.131"></a>
+<span class="sourceLineNo">132</span> * @param snapshotScans map of snapshot name to scans on that snapshot.<a name="line.132"></a>
+<span class="sourceLineNo">133</span> * @param mapper The mapper class to use.<a name="line.133"></a>
+<span class="sourceLineNo">134</span> * @param outputKeyClass The class of the output key.<a name="line.134"></a>
+<span class="sourceLineNo">135</span> * @param outputValueClass The class of the output value.<a name="line.135"></a>
+<span class="sourceLineNo">136</span> * @param job The current job to adjust. Make sure the passed job is<a name="line.136"></a>
+<span class="sourceLineNo">137</span> * carrying all necessary HBase configuration.<a name="line.137"></a>
+<span class="sourceLineNo">138</span> * @param addDependencyJars upload HBase jars and jars for any of the configured<a name="line.138"></a>
+<span class="sourceLineNo">139</span> * job classes via the distributed cache (tmpjars).<a name="line.139"></a>
+<span class="sourceLineNo">140</span> */<a name="line.140"></a>
+<span class="sourceLineNo">141</span> public static void initMultiTableSnapshotMapperJob(Map<String, Collection<Scan>> snapshotScans,<a name="line.141"></a>
+<span class="sourceLineNo">142</span> Class<? extends TableMap> mapper, Class<?> outputKeyClass, Class<?> outputValueClass,<a name="line.142"></a>
+<span class="sourceLineNo">143</span> JobConf job, boolean addDependencyJars, Path tmpRestoreDir) throws IOException {<a name="line.143"></a>
+<span class="sourceLineNo">144</span> MultiTableSnapshotInputFormat.setInput(job, snapshotScans, tmpRestoreDir);<a name="line.144"></a>
+<span class="sourceLineNo">145</span><a name="line.145"></a>
+<span class="sourceLineNo">146</span> job.setInputFormat(MultiTableSnapshotInputFormat.class);<a name="line.146"></a>
+<span class="sourceLineNo">147</span> if (outputValueClass != null) {<a name="line.147"></a>
+<span class="sourceLineNo">148</span> job.setMapOutputValueClass(outputValueClass);<a name="line.148"></a>
<span class="sourceLineNo">149</span> }<a name="line.149"></a>
-<span class="sourceLineNo">150</span> job.setMapperClass(mapper);<a name="line.150"></a>
-<span class="sourceLineNo">151</span> if (addDependencyJars) {<a name="line.151"></a>
-<span class="sourceLineNo">152</span> addDependencyJars(job);<a name="line.152"></a>
-<span class="sourceLineNo">153</span> }<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span> org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.resetCacheConfig(job);<a name="line.155"></a>
-<span class="sourceLineNo">156</span> }<a name="line.156"></a>
+<span class="sourceLineNo">150</span> if (outputKeyClass != null) {<a name="line.150"></a>
+<span class="sourceLineNo">151</span> job.setMapOutputKeyClass(outputKeyClass);<a name="line.151"></a>
+<span class="sourceLineNo">152</span> }<a name="line.152"></a>
+<span class="sourceLineNo">153</span> job.setMapperClass(mapper);<a name="line.153"></a>
+<span class="sourceLineNo">154</span> if (addDependencyJars) {<a name="line.154"></a>
+<span class="sourceLineNo">155</span> addDependencyJars(job);<a name="line.155"></a>
+<span class="sourceLineNo">156</span> }<a name="line.156"></a>
<span class="sourceLineNo">157</span><a name="line.157"></a>
-<span class="sourceLineNo">158</span> /**<a name="line.158"></a>
-<span class="sourceLineNo">159</span> * Sets up the job for reading from a table snapshot. It bypasses hbase servers<a name="line.159"></a>
-<span class="sourceLineNo">160</span> * and read directly from snapshot files.<a name="line.160"></a>
-<span class="sourceLineNo">161</span> *<a name="line.161"></a>
-<span class="sourceLineNo">162</span> * @param snapshotName The name of the snapshot (of a table) to read from.<a name="line.162"></a>
-<span class="sourceLineNo">163</span> * @param columns The columns to scan.<a name="line.163"></a>
-<span class="sourceLineNo">164</span> * @param mapper The mapper class to use.<a name="line.164"></a>
-<span class="sourceLineNo">165</span> * @param outputKeyClass The class of the output key.<a name="line.165"></a>
-<span class="sourceLineNo">166</span> * @param outputValueClass The class of the output value.<a name="line.166"></a>
-<span class="sourceLineNo">167</span> * @param job The current job to adjust. Make sure the passed job is<a name="line.167"></a>
-<span class="sourceLineNo">168</span> * carrying all necessary HBase configuration.<a name="line.168"></a>
-<span class="sourceLineNo">169</span> * @param addDependencyJars upload HBase jars and jars for any of the configured<a name="line.169"></a>
-<span class="sourceLineNo">170</span> * job classes via the distributed cache (tmpjars).<a name="line.170"></a>
-<span class="sourceLineNo">171</span> * @param tmpRestoreDir a temporary directory to copy the snapshot files into. Current user should<a name="line.171"></a>
-<span class="sourceLineNo">172</span> * have write permissions to this directory, and this should not be a subdirectory of rootdir.<a name="line.172"></a>
-<span class="sourceLineNo">173</span> * After the job is finished, restore directory can be deleted.<a name="line.173"></a>
-<span class="sourceLineNo">174</span> * @throws IOException When setting up the details fails.<a name="line.174"></a>
-<span class="sourceLineNo">175</span> * @see TableSnapshotInputFormat<a name="line.175"></a>
-<span class="sourceLineNo">176</span> */<a name="line.176"></a>
-<span class="sourceLineNo">177</span> public static void initTableSnapshotMapJob(String snapshotName, String columns,<a name="line.177"></a>
-<span class="sourceLineNo">178</span> Class<? extends TableMap> mapper,<a name="line.178"></a>
-<span class="sourceLineNo">179</span> Class<?> outputKeyClass,<a name="line.179"></a>
-<span class="sourceLineNo">180</span> Class<?> outputValueClass, JobConf job,<a name="line.180"></a>
-<span class="sourceLineNo">181</span> boolean addDependencyJars, Path tmpRestoreDir)<a name="line.181"></a>
-<span class="sourceLineNo">182</span> throws IOException {<a name="line.182"></a>
-<span class="sourceLineNo">183</span> TableSnapshotInputFormat.setInput(job, snapshotName, tmpRestoreDir);<a name="line.183"></a>
-<span class="sourceLineNo">184</span> initTableMapJob(snapshotName, columns, mapper, outputKeyClass, outputValueClass, job,<a name="line.184"></a>
-<span class="sourceLineNo">185</span> addDependencyJars, TableSnapshotInputFormat.class);<a name="line.185"></a>
-<span class="sourceLineNo">186</span> org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.resetCacheConfig(job);<a name="line.186"></a>
-<span class="sourceLineNo">187</span> }<a name="line.187"></a>
-<span class="sourceLineNo">188</span><a name="line.188"></a>
-<span class="sourceLineNo">189</span> /**<a name="line.189"></a>
-<span class="sourceLineNo">190</span> * Sets up the job for reading from a table snapshot. It bypasses hbase servers<a name="line.190"></a>
-<span class="sourceLineNo">191</span> * and read directly from snapshot files.<a name="line.191"></a>
-<span class="sourceLineNo">192</span> *<a name="line.192"></a>
-<span class="sourceLineNo">193</span> * @param snapshotName The name of the snapshot (of a table) to read from.<a name="line.193"></a>
-<span class="sourceLineNo">194</span> * @param columns The columns to scan.<a name="line.194"></a>
-<span class="sourceLineNo">195</span> * @param mapper The mapper class to use.<a name="line.195"></a>
-<span class="sourceLineNo">196</span> * @param outputKeyClass The class of the output key.<a name="line.196"></a>
-<span class="sourceLineNo">197</span> * @param outputValueClass The class of the output value.<a name="line.197"></a>
-<span class="sourceLineNo">198</span> * @param jobConf The current job to adjust. Make sure the passed job is<a name="line.198"></a>
-<span class="sourceLineNo">199</span> * carrying all necessary HBase configuration.<a name="line.199"></a>
-<span class="sourceLineNo">200</span> * @param addDependencyJars upload HBase jars and jars for any of the configured<a name="line.200"></a>
-<span class="sourceLineNo">201</span> * job classes via the distributed cache (tmpjars).<a name="line.201"></a>
-<span class="sourceLineNo">202</span> * @param tmpRestoreDir a temporary directory to copy the snapshot files into. Current user should<a name="line.202"></a>
-<span class="sourceLineNo">203</span> * have write permissions to this directory, and this should not be a subdirectory of rootdir.<a name="line.203"></a>
-<span class="sourceLineNo">204</span> * After the job is finished, restore directory can be deleted.<a name="line.204"></a>
-<span class="sourceLineNo">205</span> * @param splitAlgo algorithm to split<a name="line.205"></a>
-<span class="sourceLineNo">206</span> * @param numSplitsPerRegion how many input splits to generate per one region<a name="line.206"></a>
-<span class="sourceLineNo">207</span> * @throws IOException When setting up the details fails.<a name="line.207"></a>
-<span class="sourceLineNo">208</span> * @see TableSnapshotInputFormat<a name="line.208"></a>
-<span class="sourceLineNo">209</span> */<a name="line.209"></a>
-<span class="sourceLineNo">210</span> public static void initTableSnapshotMapJob(String snapshotName, String columns,<a name="line.210"></a>
-<span class="sourceLineNo">211</span> Class<? extends TableMap> mapper,<a name="line.211"></a>
-<span class="sourceLineNo">212</span> Class<?> outputKeyClass,<a name="line.212"></a>
-<span class="sourceLineNo">213</span> Class<?> outputValueClass, JobConf jobConf,<a name="line.213"></a>
-<span class="sourceLineNo">214</span> boolean addDependencyJars, Path tmpRestoreDir,<a name="line.214"></a>
-<span class="sourceLineNo">215</span> RegionSplitter.SplitAlgorithm splitAlgo,<a name="line.215"></a>
-<span class="sourceLineNo">216</span> int numSplitsPerRegion)<a name="line.216"></a>
-<span class="sourceLineNo">217</span> throws IOException {<a name="line.217"></a>
-<span class="sourceLineNo">218</span> TableSnapshotInputFormat.setInput(jobConf, snapshotName, tmpRestoreDir, splitAlgo,<a name="line.218"></a>
-<span class="sourceLineNo">219</span> numSplitsPerRegion);<a name="line.219"></a>
-<span class="sourceLineNo">220</span> initTableMapJob(snapshotName, columns, mapper, outputKeyClass, outputValueClass, jobConf,<a name="line.220"></a>
-<span class="sourceLineNo">221</span> addDependencyJars, TableSnapshotInputFormat.class);<a name="line.221"></a>
-<span class="sourceLineNo">222</span> org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.resetCacheConfig(jobConf);<a name="line.222"></a>
-<span class="sourceLineNo">223</span> }<a name="line.223"></a>
-<span class="sourceLineNo">224</span><a name="line.224"></a>
-<span class="sourceLineNo">225</span><a name="line.225"></a>
-<span class="sourceLineNo">226</span> /**<a name="line.226"></a>
-<span class="sourceLineNo">227</span> * Use this before submitting a TableReduce job. It will<a name="line.227"></a>
-<span class="sourceLineNo">228</span> * appropriately set up the JobConf.<a name="line.228"></a>
-<span class="sourceLineNo">229</span> *<a name="line.229"></a>
-<span class="sourceLineNo">230</span> * @param table The output table.<a name="line.230"></a>
-<span class="sourceLineNo">231</span> * @param reducer The reducer class to use.<a name="line.231"></a>
-<span class="sourceLineNo">232</span> * @param job The current job configuration to adjust.<a name="line.232"></a>
-<span class="sourceLineNo">233</span> * @throws IOException When determining the region count fails.<a name="line.233"></a>
-<span class="sourceLineNo">234</span> */<a name="line.234"></a>
-<span class="sourceLineNo">235</span> public static void initTableReduceJob(String table,<a name="line.235"></a>
-<span class="sourceLineNo">236</span> Class<? extends TableReduce> reducer, JobConf job)<a name="line.236"></a>
-<span class="sourceLineNo">237</span> throws IOException {<a name="line.237"></a>
-<span class="sourceLineNo">238</span> initTableReduceJob(table, reducer, job, null);<a name="line.238"></a>
-<span class="sourceLineNo">239</span> }<a name="line.239"></a>
-<span class="sourceLineNo">240</span><a name="line.240"></a>
-<span class="sourceLineNo">241</span> /**<a name="line.241"></a>
-<span class="sourceLineNo">242</span> * Use this before submitting a TableReduce job. It will<a name="line.242"></a>
-<span class="sourceLineNo">243</span> * appropriately set up the JobConf.<a name="line.243"></a>
-<span class="sourceLineNo">244</span> *<a name="line.244"></a>
-<span class="sourceLineNo">245</span> * @param table The output table.<a name="line.245"></a>
-<span class="sourceLineNo">246</span> * @param reducer The reducer class to use.<a name="line.246"></a>
-<span class="sourceLineNo">247</span> * @param job The current job configuration to adjust.<a name="line.247"></a>
-<span class="sourceLineNo">248</span> * @param partitioner Partitioner to use. Pass <code>null</code> to use<a name="line.248"></a>
-<span class="sourceLineNo">249</span> * default partitioner.<a name="line.249"></a>
-<span class="sourceLineNo">250</span> * @throws IOException When determining the region count fails.<a name="line.250"></a>
-<span class="sourceLineNo">251</span> */<a name="line.251"></a>
-<span class="sourceLineNo">252</span> public static void initTableReduceJob(String table,<a name="line.252"></a>
-<span class="sourceLineNo">253</span> Class<? extends TableReduce> reducer, JobConf job, Class partitioner)<a name="line.253"></a>
-<span class="sourceLineNo">254</span> throws IOException {<a name="line.254"></a>
-<span class="sourceLineNo">255</span> initTableReduceJob(table, reducer, job, partitioner, true);<a name="line.255"></a>
-<span class="sourceLineNo">256</span> }<a name="line.256"></a>
-<span class="sourceLineNo">257</span><a name="line.257"></a>
-<span class="sourceLineNo">258</span> /**<a name="line.258"></a>
-<span class="sourceLineNo">259</span> * Use this before submitting a TableReduce job. It will<a name="line.259"></a>
-<span class="sourceLineNo">260</span> * appropriately set up the JobConf.<a name="line.260"></a>
-<span class="sourceLineNo">261</span> *<a name="line.261"></a>
-<span class="sourceLineNo">262</span> * @param table The output table.<a name="line.262"></a>
-<span class="sourceLineNo">263</span> * @param reducer The reducer class to use.<a name="line.263"></a>
-<span class="sourceLineNo">264</span> * @param job The current job configuration to adjust.<a name="line.264"></a>
-<span class="sourceLineNo">265</span> * @param partitioner Partitioner to use. Pass <code>null</code> to use<a name="line.265"></a>
-<span class="sourceLineNo">266</span> * default partitioner.<a name="line.266"></a>
-<span class="sourceLineNo">267</span> * @param addDependencyJars upload HBase jars and jars for any of the configured<a name="line.267"></a>
-<span class="sourceLineNo">268</span> * job classes via the distributed cache (tmpjars).<a name="line.268"></a>
-<span class="sourceLineNo">269</span> * @throws IOException When determining the region count fails.<a name="line.269"></a>
-<span class="sourceLineNo">270</span> */<a name="line.270"></a>
-<span class="sourceLineNo">271</span> public static void initTableReduceJob(String table,<a name="line.271"></a>
-<span class="sourceLineNo">272</span> Class<? extends TableReduce> reducer, JobConf job, Class partitioner,<a name="line.272"></a>
-<span class="sourceLineNo">273</span> boolean addDependencyJars) throws IOException {<a name="line.273"></a>
-<span class="sourceLineNo">274</span> job.setOutputFormat(TableOutputFormat.class);<a name="line.274"></a>
-<span class="sourceLineNo">275</span> job.setReducerClass(reducer);<a name="line.275"></a>
-<span class="sourceLineNo">276</span> job.set(TableOutputFormat.OUTPUT_TABLE, table);<a name="line.276"></a>
-<span class="sourceLineNo">277</span> job.setOutputKeyClass(ImmutableBytesWritable.class);<a name="line.277"></a>
-<span class="sourceLineNo">278</span> job.setOutputValueClass(Put.class);<a name="line.278"></a>
-<span class="sourceLineNo">279</span> job.setStrings("io.serializations", job.get("io.serializations"),<a name="line.279"></a>
-<span class="sourceLineNo">280</span> MutationSerialization.class.getName(), ResultSerialization.class.getName());<a name="line.280"></a>
-<span class="sourceLineNo">281</span> if (partitioner == HRegionPartitioner.class) {<a name="line.281"></a>
-<span class="sourceLineNo">282</span> job.setPartitionerClass(HRegionPartitioner.class);<a name="line.282"></a>
-<span class="sourceLineNo">283</span> int regions =<a name="line.283"></a>
-<span class="sourceLineNo">284</span> MetaTableAccessor.getRegionCount(HBaseConfiguration.create(job), TableName.valueOf(table));<a name="line.284"></a>
-<span class="sourceLineNo">285</span> if (job.getNumReduceTasks() > regions) {<a name="line.285"></a>
-<span class="sourceLineNo">286</span> job.setNumReduceTasks(regions);<a name="line.286"></a>
-<span class="sourceLineNo">287</span> }<a name="line.287"></a>
-<span class="sourceLineNo">288</span> } else if (partitioner != null) {<a name="line.288"></a>
-<span class="sourceLineNo">289</span> job.setPartitionerClass(partitioner);<a name="line.289"></a>
-<span class="sourceLineNo">290</span> }<a name="line.290"></a>
-<span class="sourceLineNo">291</span> if (addDependencyJars) {<a name="line.291"></a>
-<span class="sourceLineNo">292</span> addDependencyJars(job);<a name="line.292"></a>
+<span class="sourceLineNo">158</span> org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.resetCacheConfig(job);<a name="line.158"></a>
+<span class="sourceLineNo">159</span> }<a name="line.159"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span> /**<a name="line.161"></a>
+<span class="sourceLineNo">162</span> * Sets up the job for reading from a table snapshot. It bypasses hbase servers<a name="line.162"></a>
+<span class="sourceLineNo">163</span> * and read directly from snapshot files.<a name="line.163"></a>
+<span class="sourceLineNo">164</span> *<a name="line.164"></a>
+<span class="sourceLineNo">165</span> * @param snapshotName The name of the snapshot (of a table) to read from.<a name="line.165"></a>
+<span class="sourceLineNo">166</span> * @param columns The columns to scan.<a name="line.166"></a>
+<span class="sourceLineNo">167</span> * @param mapper The mapper class to use.<a name="line.167"></a>
+<span class="sourceLineNo">168</span> * @param outputKeyClass The class of the output key.<a name="line.168"></a>
+<span class="sourceLineNo">169</span> * @param outputValueClass The class of the output value.<a name="line.169"></a>
+<span class="sourceLineNo">170</span> * @param job The current job to adjust. Make sure the passed job is<a name="line.170"></a>
+<span class="sourceLineNo">171</span> * carrying all necessary HBase configuration.<a name="line.171"></a>
+<span class="sourceLineNo">172</span> * @param addDependencyJars upload HBase jars and jars for any of the configured<a name="line.172"></a>
+<span class="sourceLineNo">173</span> * job classes via the distributed cache (tmpjars).<a name="line.173"></a>
+<span class="sourceLineNo">174</span> * @param tmpRestoreDir a temporary directory to copy the snapshot files into. Current user should<a name="line.174"></a>
+<span class="sourceLineNo">175</span> * have write permissions to this directory, and this should not be a subdirectory of rootdir.<a name="line.175"></a>
+<span class="sourceLineNo">176</span> * After the job is finished, restore directory can be deleted.<a name="line.176"></a>
+<span class="sourceLineNo">177</span> * @throws IOException When setting up the details fails.<a name="line.177"></a>
+<span class="sourceLineNo">178</span> * @see TableSnapshotInputFormat<a name="line.178"></a>
+<span class="sourceLineNo">179</span> */<a name="line.179"></a>
+<span class="sourceLineNo">180</span> public static void initTableSnapshotMapJob(String snapshotName, String columns,<a name="line.180"></a>
+<span class="sourceLineNo">181</span> Class<? extends TableMap> mapper,<a name="line.181"></a>
+<span class="sourceLineNo">182</span> Class<?> outputKeyClass,<a name="line.182"></a>
+<span class="sourceLineNo">183</span> Class<?> outputValueClass, JobConf job,<a name="line.183"></a>
+<span class="sourceLineNo">184</span> boolean addDependencyJars, Path tmpRestoreDir)<a name="line.184"></a>
+<span class="sourceLineNo">185</span> throws IOException {<a name="line.185"></a>
+<span class="sourceLineNo">186</span> TableSnapshotInputFormat.setInput(job, snapshotName, tmpRestoreDir);<a name="line.186"></a>
+<span class="sourceLineNo">187</span> initTableMapJob(snapshotName, columns, mapper, outputKeyClass, outputValueClass, job,<a name="line.187"></a>
+<span class="sourceLineNo">188</span> addDependencyJars, TableSnapshotInputFormat.class);<a name="line.188"></a>
+<span class="sourceLineNo">189</span> org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.resetCacheConfig(job);<a name="line.189"></a>
+<span class="sourceLineNo">190</span> }<a name="line.190"></a>
+<span class="sourceLineNo">191</span><a name="line.191"></a>
+<span class="sourceLineNo">192</span> /**<a name="line.192"></a>
+<span class="sourceLineNo">193</span> * Sets up the job for reading from a table snapshot. It bypasses hbase servers<a name="line.193"></a>
+<span class="sourceLineNo">194</span> * and read directly from snapshot files.<a name="line.194"></a>
+<span class="sourceLineNo">195</span> *<a name="line.195"></a>
+<span class="sourceLineNo">196</span> * @param snapshotName The name of the snapshot (of a table) to read from.<a name="line.196"></a>
+<span class="sourceLineNo">197</span> * @param columns The columns to scan.<a name="line.197"></a>
+<span class="sourceLineNo">198</span> * @param mapper The mapper class to use.<a name="line.198"></a>
+<span class="sourceLineNo">199</span> * @param outputKeyClass The class of the output key.<a name="line.199"></a>
+<span class="sourceLineNo">200</span> * @param outputValueClass The class of the output value.<a name="line.200"></a>
+<span class="sourceLineNo">201</span> * @param jobConf The current job to adjust. Make sure the passed job is<a name="line.201"></a>
+<span class="sourceLineNo">202</span> * carrying all necessary HBase configuration.<a name="line.202"></a>
+<span class="sourceLineNo">203</span> * @param addDependencyJars upload HBase jars and jars for any of the configured<a name="line.203"></a>
+<span class="sourceLineNo">204</span> * job classes via the distributed cache (tmpjars).<a name="line.204"></a>
+<span class="sourceLineNo">205</span> * @param tmpRestoreDir a temporary directory to copy the snapshot files into. Current user should<a name="line.205"></a>
+<span class="sourceLineNo">206</span> * have write permissions to this directory, and this should not be a subdirectory of rootdir.<a name="line.206"></a>
+<span class="sourceLineNo">207</span> * After the job is finished, restore directory can be deleted.<a name="line.207"></a>
+<span class="sourceLineNo">208</span> * @param splitAlgo algorithm to split<a name="line.208"></a>
+<span class="sourceLineNo">209</span> * @param numSplitsPerRegion how many input splits to generate per one region<a name="line.209"></a>
+<span class="sourceLineNo">210</span> * @throws IOException When setting up the details fails.<a name="line.210"></a>
+<span class="sourceLineNo">211</span> * @see TableSnapshotInputFormat<a name="line.211"></a>
+<span class="sourceLineNo">212</span> */<a name="line.212"></a>
+<span class="sourceLineNo">213</span> public static void initTableSnapshotMapJob(String snapshotName, String columns,<a name="line.213"></a>
+<span class="sourceLineNo">214</span> Class<? extends TableMap> mapper,<a name="line.214"></a>
+<span class="sourceLineNo">215</span> Class<?> outputKeyClass,<a name="line.215"></a>
+<span class="sourceLineNo">216</span> Class<?> outputValueClass, JobConf jobConf,<a name="line.216"></a>
+<span class="sourceLineNo">217</span> boolean addDependencyJars, Path tmpRestoreDir,<a name="line.217"></a>
+<span class="sourceLineNo">218</span> RegionSplitter.SplitAlgorithm splitAlgo,<a name="line.218"></a>
+<span class="sourceLineNo">219</span> int numSplitsPerRegion)<a name="line.219"></a>
+<span class="sourceLineNo">220</span> throws IOException {<a name="line.220"></a>
+<span class="sourceLineNo">221</span> TableSnapshotInputFormat.setInput(jobConf, snapshotName, tmpRestoreDir, splitAlgo,<a name="line.221"></a>
+<span class="sourceLineNo">222</span> numSplitsPerRegion);<a name="line.222"></a>
+<span class="sourceLineNo">223</span> initTableMapJob(snapshotName, columns, mapper, outputKeyClass, outputValueClass, jobConf,<a name="line.223"></a>
+<span class="sourceLineNo">224</span> addDependencyJars, TableSnapshotInputFormat.class);<a name="line.224"></a>
+<span class="sourceLineNo">225</span> org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.resetCacheConfig(jobConf);<a name="line.225"></a>
+<span class="sourceLineNo">226</span> }<a name="line.226"></a>
+<span class="sourceLineNo">227</span><a name="line.227"></a>
+<span class="sourceLineNo">228</span><a name="line.228"></a>
+<span class="sourceLineNo">229</span> /**<a name="line.229"></a>
+<span class="sourceLineNo">230</span> * Use this before submitting a TableReduce job. It will<a name="line.230"></a>
+<span class="sourceLineNo">231</span> * appropriately set up the JobConf.<a name="line.231"></a>
+<span class="sourceLineNo">232</span> *<a name="line.232"></a>
+<span class="sourceLineNo">233</span> * @param table The output table.<a name="line.233"></a>
+<span class="sourceLineNo">234</span> * @param reducer The reducer class to use.<a name="line.234"></a>
+<span class="sourceLineNo">235</span> * @param job The current job configuration to adjust.<a name="line.235"></a>
+<span class="sourceLineNo">236</span> * @throws IOException When determining the region count fails.<a name="line.236"></a>
+<span class="sourceLineNo">237</span> */<a name="line.237"></a>
+<span class="sourceLineNo">238</span> public static void initTableReduceJob(String table,<a name="line.238"></a>
+<span class="sourceLineNo">239</span> Class<? extends TableReduce> reducer, JobConf job)<a name="line.239"></a>
+<span class="sourceLineNo">240</span> throws IOException {<a name="line.240"></a>
+<span class="sourceLineNo">241</span> initTableReduceJob(table, reducer, job, null);<a name="line.241"></a>
+<span class="sourceLineNo">242</span> }<a name="line.242"></a>
+<span class="sourceLineNo">243</span><a name="line.243"></a>
+<span class="sourceLineNo">244</span> /**<a name="line.244"></a>
+<span class="sourceLineNo">245</span> * Use this before submitting a TableReduce job. It will<a name="line.245"></a>
+<span class="sourceLineNo">246</span> * appropriately set up the JobConf.<a name="line.246"></a>
+<span class="sourceLineNo">247</span> *<a name="line.247"></a>
+<span class="sourceLineNo">248</span> * @param table The output table.<a name="line.248"></a>
+<span class="sourceLineNo">249</span> * @param reducer The reducer class to use.<a name="line.249"></a>
+<span class="sourceLineNo">250</span> * @param job The current job configuration to adjust.<a name="line.250"></a>
+<span class="sourceLineNo">251</span> * @param partitioner Partitioner to use. Pass <code>null</code> to use<a name="line.251"></a>
+<span class="sourceLineNo">252</span> * default partitioner.<a name="line.252"></a>
+<span class="sourceLineNo">253</span> * @throws IOException When determining the region count fails.<a name="line.253"></a>
+<span class="sourceLineNo">254</span> */<a name="line.254"></a>
+<span class="sourceLineNo">255</span> public static void initTableReduceJob(String table,<a name="line.255"></a>
+<span class="sourceLineNo">256</span> Class<? extends TableReduce> reducer, JobConf job, Class partitioner)<a name="line.256"></a>
+<span class="sourceLineNo">257</span> throws IOException {<a name="line.257"></a>
+<span class="sourceLineNo">258</span> initTableReduceJob(table, reducer, job, partitioner, true);<a name="line.258"></a>
+<span class="sourceLineNo">259</span> }<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span> /**<a name="line.261"></a>
+<span class="sourceLineNo">262</span> * Use this before submitting a TableReduce job. It will<a name="line.262"></a>
+<span class="sourceLineNo">263</span> * appropriately set up the JobConf.<a name="line.263"></a>
+<span class="sourceLineNo">264</span> *<a name="line.264"></a>
+<span class="sourceLineNo">265</span> * @param table The output table.<a name="line.265"></a>
+<span class="sourceLineNo">266</span> * @param reducer The reducer class to use.<a name="line.266"></a>
+<span class="sourceLineNo">267</span> * @param job The current job configuration to adjust.<a name="line.267"></a>
+<span class="sourceLineNo">268</span> * @param partitioner Partitioner to use. Pass <code>null</code> to use<a name="line.268"></a>
+<span class="sourceLineNo">269</span> * default partitioner.<a name="line.269"></a>
+<span class="sourceLineNo">270</span> * @param addDependencyJars upload HBase jars and jars for any of the configured<a name="line.270"></a>
+<span class="sourceLineNo">271</span> * job classes via the distributed cache (tmpjars).<a name="line.271"></a>
+<span class="sourceLineNo">272</span> * @throws IOException When determining the region count fails.<a name="line.272"></a>
+<span class="sourceLineNo">273</span> */<a name="line.273"></a>
+<span class="sourceLineNo">274</span> public static void initTableReduceJob(String table,<a name="line.274"></a>
+<span class="sourceLineNo">275</span> Class<? extends TableReduce> reducer, JobConf job, Class partitioner,<a name="line.275"></a>
+<span class="sourceLineNo">276</span> boolean addDependencyJars) throws IOException {<a name="line.276"></a>
+<span class="sourceLineNo">277</span> job.setOutputFormat(TableOutputFormat.class);<a name="line.277"></a>
+<span class="sourceLineNo">278</span> job.setReducerClass(reducer);<a name="line.278"></a>
+<span class="sourceLineNo">279</span> job.set(TableOutputFormat.OUTPUT_TABLE, table);<a name="line.279"></a>
+<span class="sourceLineNo">280</span> job.setOutputKeyClass(ImmutableBytesWritable.class);<a name="line.280"></a>
+<span class="sourceLineNo">281</span> job.setOutputValueClass(Put.class);<a name="line.281"></a>
+<span class="sourceLineNo">282</span> job.setStrings("io.serializations", job.get("io.serializations"),<a name="line.282"></a>
+<span class="sourceLineNo">283</span> MutationSerialization.class.getName(), ResultSerialization.class.getName());<a name="line.283"></a>
+<span class="sourceLineNo">284</span> if (partitioner == HRegionPartitioner.class) {<a name="line.284"></a>
+<span class="sourceLineNo">285</span> job.setPartitionerClass(HRegionPartitioner.class);<a name="line.285"></a>
+<span class="sourceLineNo">286</span> int regions =<a name="line.286"></a>
+<span class="sourceLineNo">287</span> MetaTableAccessor.getRegionCount(HBaseConfiguration.create(job), TableName.valueOf(table));<a name="line.287"></a>
+<span class="sourceLineNo">288</span> if (job.getNumReduceTasks() > regions) {<a name="line.288"></a>
+<span class="sourceLineNo">289</span> job.setNumReduceTasks(regions);<a name="line.289"></a>
+<span class="sourceLineNo">290</span> }<a name="line.290"></a>
+<span class="sourceLineNo">291</span> } else if (partitioner != null) {<a name="line.291"></a>
+<span class="sourceLineNo">292</span> job.setPartitionerClass(partitioner);<a name="line.292"></a>
<span class="sourceLineNo">293</span> }<a name="line.293"></a>
-<span class="sourceLineNo">294</span> initCredentials(job);<a name="line.294"></a>
-<span class="sourceLineNo">295</span> }<a name="line.295"></a>
-<span class="sourceLineNo">296</span><a name="line.296"></a>
-<span class="sourceLineNo">297</span> public static void initCredentials(JobConf job) throws IOException {<a name="line.297"></a>
-<span class="sourceLineNo">298</span> UserProvider userProvider = UserProvider.instantiate(job);<a name="line.298"></a>
-<span class="sourceLineNo">299</span> if (userProvider.isHadoopSecurityEnabled()) {<a name="line.299"></a>
-<span class="sourceLineNo">300</span> // propagate delegation related props from launcher job to MR job<a name="line.300"></a>
-<span class="sourceLineNo">301</span> if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span> job.set("mapreduce.job.credentials.binary", System.getenv("HADOOP_TOKEN_FILE_LOCATION"));<a name="line.302"></a>
-<span class="sourceLineNo">303</span> }<a name="line.303"></a>
-<span class="sourceLineNo">304</span> }<a name="line.304"></a>
-<span class="sourceLineNo">305</span><a name="line.305"></a>
-<span class="sourceLineNo">306</span> if (userProvider.isHBaseSecurityEnabled()) {<a name="line.306"></a>
-<span class="sourceLineNo">307</span> Connection conn = ConnectionFactory.createConnection(job);<a name="line.307"></a>
-<span class="sourceLineNo">308</span> try {<a name="line.308"></a>
-<span class="sourceLineNo">309</span> // login the server principal (if using secure Hadoop)<a name="line.309"></a>
-<span class="sourceLineNo">310</span> User user = userProvider.getCurrent();<a name="line.310"></a>
-<span class="sourceLineNo">311</span> TokenUtil.addTokenForJob(conn, job, user);<a name="line.311"></a>
-<span class="sourceLineNo">312</span> } catch (InterruptedException ie) {<a name="line.312"></a>
-<span class="sourceLineNo">313</span> ie.printStackTrace();<a name="line.313"></a>
-<span class="sourceLineNo">314</span> Thread.currentThread().interrupt();<a name="line.314"></a>
-<span class="sourceLineNo">315</span> } finally {<a name="line.315"></a>
-<span class="sourceLineNo">316</span> conn.close();<a name="line.316"></a>
-<span class="sourceLineNo">317</span> }<a name="line.317"></a>
-<span class="sourceLineNo">318</span> }<a name="line.318"></a>
-<span class="sourceLineNo">319</span> }<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span> /**<a name="line.321"></a>
-<span class="sourceLineNo">322</span> * Ensures that the given number of reduce tasks for the given job<a name="line.322"></a>
-<span class="sourceLineNo">323</span> * configuration does not exceed the number of regions for the given table.<a name="line.323"></a>
-<span class="sourceLineNo">324</span> *<a name="line.324"></a>
-<span class="sourceLineNo">325</span> * @param table The table to get the region count for.<a name="line.325"></a>
-<span class="sourceLineNo">326</span> * @param job The current job configuration to adjust.<a name="line.326"></a>
-<span class="sourceLineNo">327</span> * @throws IOException When retrieving the table details fails.<a name="line.327"></a>
-<span class="sourceLineNo">328</span> */<a name="line.328"></a>
-<span class="sourceLineNo">329</span> // Used by tests.<a name="line.329"></a>
-<span class="sourceLineNo">330</span> public static void limitNumReduceTasks(String table, JobConf job)<a name="line.330"></a>
-<span class="sourceLineNo">331</span> throws IOException {<a name="line.331"></a>
-<span class="sourceLineNo">332</span> int regions =<a name="line.332"></a>
-<span class="sourceLineNo">333</span> MetaTableAccessor.getRegionCount(HBaseConfiguration.create(job), TableName.valueOf(table));<a name="line.333"></a>
-<span class="sourceLineNo">334</span> if (job.getNumReduceTasks() > regions)<a name="line.334"></a>
-<span class="sourceLineNo">335</span> job.setNumReduceTasks(regions);<a name="line.335"></a>
-<span class="sourceLineNo">336</span> }<a name="line.336"></a>
-<span class="sourceLineNo">337</span><a name="line.337"></a>
-<span class="sourceLineNo">338</span> /**<a name="line.338"></a>
-<span class="sourceLineNo">339</span> * Ensures that the given number of map tasks for the given job<a name="line.339"></a>
-<span class="sourceLineNo">340</span> * configuration does not exceed the number of regions for the given table.<a name="line.340"></a>
-<span class="sourceLineNo">341</span> *<a name="line.341"></a>
-<span class="sourceLineNo">342</span> * @param table The table to get the region count for.<a name="line.342"></a>
-<span class="sourceLineNo">343</span> * @param job The current job configuration to adjust.<a name="line.343"></a>
-<span class="sourceLineNo">344</span> * @throws IOException When retrieving the table details fails.<a name="line.344"></a>
-<span class="sourceLineNo">345</span> */<a name="line.345"></a>
-<span class="sourceLineNo">346</span> // Used by tests.<a name="line.346"></a>
-<span class="sourceLineNo">347</span> public static void limitNumMapTasks(String table, JobConf job)<a name="line.347"></a>
-<span class="sourceLineNo">348</span> throws IOException {<a name="line.348"></a>
-<span class="sourceLineNo">349</span> int regions =<a name="line.349"></a>
-<span class="sourceLineNo">350</span> MetaTableAccessor.getRegionCount(HBaseConfiguration.create(job), TableName.valueOf(table));<a name="line.350"></a>
-<span class="sourceLineNo">351</span> if (job.getNumMapTasks() > regions)<a name="line.351"></a>
-<span class="sourceLineNo">352</span> job.setNumMapTasks(regions);<a name="line.352"></a>
-<span class="sourceLineNo">353</span> }<a name="line.353"></a>
-<span class="sourceLineNo">354</span><a name="line.354"></a>
-<span class="sourceLineNo">355</span> /**<a name="line.355"></a>
-<span class="sourceLineNo">356</span> * Sets the number of reduce tasks for the given job configuration to the<a name="line.356"></a>
-<span class="sourceLineNo">357</span> * number of regions the given table has.<a name="line.357"></a>
-<span class="sourceLineNo">358</span> *<a name="line.358"></a>
-<span class="sourceLineNo">359</span> * @param table The table to get the region count for.<a name="line.359"></a>
-<span class="sourceLineNo">360</span> * @param job The current job configuration to adjust.<a name="line.360"></a>
-<span class="sourceLineNo">361</span> * @throws IOException When retrieving the table details fails.<a name="line.361"></a>
-<span class="sourceLineNo">362</span> */<a name="line.362"></a>
-<span class="sourceLineNo">363</span> public static void setNumReduceTasks(String table, JobConf job)<a name="line.363"></a>
-<span class="sourceLineNo">364</span> throws IOException {<a name="line.364"></a>
-<span class="sourceLineNo">365</span> job.setNumReduceTasks(MetaTableAccessor.getRegionCount(HBaseConfiguration.create(job),<a name="line.365"></a>
-<span class="sourceLineNo">366</span> TableName.valueOf(table)));<a name="line.366"></a>
-<span class="sourceLineNo">367</span> }<a name="line.367"></a>
-<span class="sourceLineNo">368</span><a name="line.368"></a>
-<span class="sourceLineNo">369</span> /**<a name="line.369"></a>
-<span class="sourceLineNo">370</span> * Sets the number of map tasks for the given job configuration to the<a name="line.370"></a>
-<span class="sourceLineNo">371</span> * number of regions the given table has.<a name="line.371"></a>
-<span class="sourceLineNo">372</span> *<a name="line.372"></a>
-<span class="sourceLineNo">373</span> * @param table The table to get the region count for.<a name="line.373"></a>
-<span class="sourceLineNo">374</span> * @param job The current job configuration to adjust.<a name="line.374"></a>
-<span class="sourceLineNo">375</span> * @throws IOException When retrieving the table details fails.<a name="line.375"></a>
-<span class="sourceLineNo">376</span> */<a name="line.376"></a>
-<span class="sourceLineNo">377</span> public static void setNumMapTasks(String table, JobConf job)<a name="line.377"></a>
-<span class="sourceLineNo">378</span> throws IOException {<a name="line.378"></a>
-<span class="sourceLineNo">379</span> job.setNumMapTasks(MetaTableAccessor.getRegionCount(HBaseConfiguration.create(job),<a name="line.379"></a>
-<span class="sourceLineNo">380</span> TableName.valueOf(table)));<a name="line.380"></a>
-<span class="sourceLineNo">381</span> }<a name="line.381"></a>
-<span class="sourceLineNo">382</span><a name="line.382"></a>
-<span class="sourceLineNo">383</span> /**<a name="line.383"></a>
-<span class="sourceLineNo">384</span> * Sets the number of rows to return and cache with each scanner iteration.<a name="line.384"></a>
-<span class="sourceLineNo">385</span> * Higher caching values will enable faster mapreduce jobs at the expense of<a name="line.385"></a>
-<span class="sourceLineNo">386</span> * requiring more heap to contain the cached rows.<a name="line.386"></a>
-<span class="sourceLineNo">387</span> *<a name="line.387"></a>
-<span class="sourceLineNo">388</span> * @param job The current job configuration to adjust.<a name="line.388"></a>
-<span class="sourceLineNo">389</span> * @param batchSize The number of rows to return in batch with each scanner<a name="line.389"></a>
-<span class="sourceLineNo">390</span> * iteration.<a name="line.390"></a>
-<span class="sourceLineNo">391</span> */<a name="line.391"></a>
-<span class="sourceLineNo">392</span> public static void setScannerCaching(JobConf job, int batchSize) {<a name="line.392"></a>
-<span class="sourceLineNo">393</span> job.setInt("hbase.client.scanner.caching", batchSize);<a name="line.393"></a>
-<span class="sourceLineNo">394</span> }<a name="line.394"></a>
-<span class="sourceLineNo">395</span><a name="line.395"></a>
-<span class="sourceLineNo">396</span> /**<a name="line.396"></a>
-<span class="sourceLineNo">397</span> * @see org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil#addDependencyJars(org.apache.hadoop.mapreduce.Job)<a name="line.397"></a>
-<span class="sourceLineNo">398</span> */<a name="line.398"></a>
-<span class="sourceLineNo">399</span> public static void addDependencyJars(JobConf job) throws IOException {<a name="line.399"></a>
-<span class="sourceLineNo">400</span> org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.addHBaseDependencyJars(job);<a name="line.400"></a>
-<span class="sourceLineNo">401</span> org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.addDependencyJarsForClasses(<a name="line.401"></a>
-<span class="sourceLineNo">402</span> job,<a name="line.402"></a>
-<span class="sourceLineNo">403</span> job.getMapOutputKeyClass(),<a name="line.403"></a>
-<span class="sourceLineNo">404</span> job.getMapOutputValueClass(),<a name="line.404"></a>
-<span class="sourceLineNo">405</span> job.getOutputKeyClass(),<a name="line.405"></a>
-<span class="sourceLineNo">406</span> job.getOutputValueClass(),<a name="line.406"></a>
-<span class="sourceLineNo">407</span> job.getPartitionerClass(),<a name="line.407"></a>
-<span class="sourceLineNo">408</span> job.getClass("mapred.input.format.class", TextInputFormat.class, InputFormat.class),<a name="line.408"></a>
-<span class="sourceLineNo">409</span> job.getClass("mapred.output.format.class", TextOutputFormat.class, OutputFormat.class),<a name="line.409"></a>
-<span class="sourceLineNo">410</span> job.getCombinerClass());<a name="line.410"></a>
-<span class="sourceLineNo">411</span> }<a name="line.411"></a>
-<span class="sourceLineNo">412</span>}<a name="line.412"></a>
+<span class="sourceLineNo">294</span> if (addDependencyJars) {<a name="line.294"></a>
+<span class="sourceLineNo">295</span> addDependencyJars(job);<a name="line.295"></a>
+<span class="sourceLineNo">296</span> }<a name="line.296"></a>
+<span class="sourceLineNo">297</span> initCredentials(job);<a name="line.297"></a>
+<span class="sourceLineNo">298</span> }<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span> public static void initCredentials(JobConf job) throws IOException {<a name="line.300"></a>
+<span class="sourceLineNo">301</span> UserProvider userProvider = UserProvider.instantiate(job);<a name="line.301"></a>
+<span class="sourceLineNo">302</span> if (userProvider.isHadoopSecurityEnabled()) {<a name="line.302"></a>
+<span class="sourceLineNo">303</span> // propagate delegation related props from launcher job to MR job<a name="line.303"></a>
+<span class="sourceLineNo">304</span> if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) {<a name="line.304"></a>
+<span class="sourceLineNo">305</span> job.set("mapreduce.job.credentials.binary", System.getenv("HADOOP_TOKEN_FILE_LOCATION"));<a name="line.305"></a>
+<span class="sourceLineNo">306</span> }<a name="line.306"></a>
+<span class="sourceLineNo">307</span> }<a name="line.307"></a>
+<span class="sourceLineNo">308</span><a name="line.308"></a>
+<span class="sourceLineNo">309</span> if (userProvider.isHBaseSecurityEnabled()) {<a name="line.309"></a>
+<span class="sourceLineNo">310</span> Connection conn = ConnectionFactory.createConnection(job);<a name="line.310"></a>
+<span class="sourceLineNo">311</span> try {<a name="line.311"></a>
+<span class="sourceLineNo">312</span> // login the server principal (if using secure Hadoop)<a name="line.312"></a>
+<span class="sourceLineNo">313</span> User user = userProvider.getCurrent();<a name="line.313"></a>
+<span class="sourceLineNo">314</span> TokenUtil.addTokenForJob(conn, job, user);<a name="line.314"></a>
+<span class="sourceLineNo">315</span> } catch (InterruptedException ie) {<a name="line.315"></a>
+<span class="sourceLineNo">316</span> LOG.error("Interrupted obtaining user authentication token", ie);<a name="line.316"></a>
+<span class="sourceLineNo">317</span> Thread.currentThread().interrupt();<a name="line.317"></a>
+<span class="sourceLineNo">318</span> } finally {<a name="line.318"></a>
+<span class="sourceLineNo">319</span> conn.close();<a name="line.319"></a>
+<span class="sourceLineNo">320</span> }<a name="line.320"></a>
+<span class="sourceLineNo">321</span> }<a name="line.321"></a>
+<span class="sourceLineNo">322</span> }<a name="line.322"></a>
+<span class="sourceLineNo">323</span><a name="line.323"></a>
+<span class="sourceLineNo">324</span> /**<a name="line.324"></a>
+<span class="sourceLineNo">325</span> * Ensures that the given number of reduce tasks for the given job<a name="line.325"></a>
+<span class="sourceLineNo">326</span> * configuration does not exceed the number of regions for the given table.<a name="line.326"></a>
+<span class="sourceLineNo">327</span> *<a name="line.327"></a>
+<span class="sourceLineNo">328</span> * @param table The table to get the region count for.<a name="line.328"></a>
+<span class="sourceLineNo">329</span> * @param job The current job configuration to adjust.<a name="line.329"></a>
+<span class="sourceLineNo">330</span> * @throws IOException When retrieving the table details fails.<a name="line.330"></a>
+<span class="sourceLineNo">331</span> */<a name="line.331"></a>
+<span class="sourceLineNo">332</span> // Used by tests.<a name="line.332"></a>
+<span class="sourceLineNo">333</span> public static void limitNumReduceTasks(String table, JobConf job)<a name="line.333"></a>
+<span class="sourceLineNo">334</span> throws IOException {<a name="line.334"></a>
+<span class="sourceLineNo">335</span> int regions =<a name="line.335"></a>
+<span class="sourceLineNo">336</span> MetaTableAccessor.getRegionCount(HBaseConfiguration.create(job), TableName.valueOf(table));<a name="line.336"></a>
+<span class="sourceLineNo">337</span> if (job.getNumReduceTasks() > regions)<a name="line.337"></a>
+<span class="sourceLineNo">338</span> job.setNumReduceTasks(regions);<a name="line.338"></a>
+<span class="sourceLineNo">339</span> }<a name="line.339"></a>
+<span class="sourceLineNo">340</span><a name="line.340"></a>
+<span class="sourceLineNo">341</span> /**<a name="line.341"></a>
+<span class="sourceLineNo">342</span> * Ensures that the given number of map tasks for the given job<a name="line.342"></a>
+<span class="sourceLineNo">343</span> * configuration does not exceed the number of regions for the given table.<a name="line.343"></a>
+<span class="sourceLineNo">344</span> *<a name="line.344"></a>
+<span class="sourceLineNo">345</span> * @param table The table to get the region count for.<a name="line.345"></a>
+<span class="sourceLineNo">346</span> * @param job The current job configuration to adjust.<a name="line.346"></a>
+<span class="sourceLineNo">347</span> * @throws IOException When retrieving the table details fails.<a name="line.347"></a>
+<span class="sourceLineNo">348</span> */<a name="line.348"></a>
+<span class="sourceLineNo">349</span> // Used by tests.<a name="line.349"></a>
+<span class="sourceLineNo">350</span> public static void limitNumMapTasks(String table, JobConf job)<a name="line.350"></a>
+<span class="sourceLineNo">351</span> throws IOException {<a name="line.351"></a>
+<span class="sourceLineNo">352</span> int regions =<a name="line.352"></a>
+<span class="sourceLineNo">353</span> MetaTableAccessor.getRegionCount(HBaseConfiguration.create(job), TableName.valueOf(table));<a name="line.353"></a>
+<span class="sourceLineNo">354</span> if (job.getNumMapTasks() > regions)<a name="line.354"></a>
+<span class="sourceLineNo">355</span> job.setNumMapTasks(regions);<a name="line.355"></a>
+<span class="sourceLineNo">356</span> }<a name="line.356"></a>
+<span class="sourceLineNo">357</span><a name="line.357"></a>
+<span class="sourceLineNo">358</span> /**<a name="line.358"></a>
+<span class="sourceLineNo">359</span> * Sets the number of reduce tasks for the given job configuration to the<a name="line.359"></a>
+<span class="sourceLineNo">360</span> * number of regions the given table has.<a name="line.360"></a>
+<span class="sourceLineNo">361</span> *<a name="line.361"></a>
+<span class="sourceLineNo">362</span> * @param table The table to get the region count for.<a name="line.362"></a>
+<span class="sourceLineNo">363</span> * @param job The current job configuration to adjust.<a name="line.363"></a>
+<span class="sourceLineNo">364</span> * @throws IOException When retrieving the table details fails.<a name="line.364"></a>
+<span class="sourceLineNo">365</span> */<a name="line.365"></a>
+<span class="sourceLineNo">366</span> public static void setNumReduceTasks(String table, JobConf job)<a name="line.366"></a>
+<span class="sourceLineNo">367</span> throws IOException {<a name="line.367"></a>
+<span class="sourceLineNo">368</span> job.setNumReduceTasks(MetaTableAccessor.getRegionCount(HBaseConfiguration.create(job),<a name="line.368"></a>
+<span class="sourceLineNo">369</span> TableName.valueOf(table)));<a name="line.369"></a>
+<span class="sourceLineNo">370</span> }<a name="line.370"></a>
+<span class="sourceLineNo">371</span><a name="line.371"></a>
+<span class="sourceLineNo">372</span> /**<a name="line.372"></a>
+<span class="sourceLineNo">373</span> * Sets the number of map tasks for the given job configuration to the<a name="line.373"></a>
+<span class="sourceLineNo">374</span> * number of regions the given table has.<a name="line.374"></a>
+<span class="sourceLineNo">375</span> *<a name="line.375"></a>
+<span class="sourceLineNo">376</span> * @param table The table to get the region count for.<a name="line.376"></a>
+<span class="sourceLineNo">377</span> * @param job The current job configuration to adjust.<a name="line.377"></a>
+<span class="sourceLineNo">378</span> * @throws IOException When retrieving the table details fails.<a name="line.378"></a>
+<span class="sourceLineNo">379</span> */<a name="line.379"></a>
+<span class="sourceLineNo">380</span> public static void setNumMapTasks(String table, JobConf job)<a name="line.380"></a>
+<span class="sourceLineNo">381</span> throws IOException {<a name="line.381"></a>
+<span class="sourceLineNo">382</span> job.setNumMapTasks(MetaTableAccessor.getRegionCount(HBaseConfiguration.create(job),<a name="line.382"></a>
+<span class="sourceLineNo">383</span> TableName.valueOf(table)));<a name="line.383"></a>
+<span class="sourceLineNo">384</span> }<a name="line.384"></a>
+<span class="sourceLineNo">385</span><a name="line.385"></a>
+<span class="sourceLineNo">386</span> /**<a name="line.386"></a>
+<span class="sourceLineNo">387</span> * Sets the number of rows to return and cache with each scanner iteration.<a name="line.387"></a>
+<span class="sourceLineNo">388</span> * Higher caching values will enable faster mapreduce jobs at the expense of<a name="line.388"></a>
+<span class="sourceLineNo">389</span> * requiring more heap to contain the cached rows.<a name="line.389"></a>
+<span class="sourceLineNo">390</span> *<a name="line.390"></a>
+<span class="sourceLineNo">391</span> * @param job The current job configuration to adjust.<a name="line.391"></a>
+<span class="sourceLineNo">392</span> * @param batchSize The number of rows to return in batch with each scanner<a name="line.392"></a>
+<span class="sourceLineNo">393</span> * iteration.<a name="line.393"></a>
+<span class="sourceLineNo">394</span> */<a name="line.394"></a>
+<span class="sourceLineNo">395</span> public static void setScannerCaching(JobConf job, int batchSize) {<a name="line.395"></a>
+<span class="sourceLineNo">396</span> job.setInt("hbase.client.scanner.caching", batchSize);<a name="line.396"></a>
+<span class="sourceLineNo">397</span> }<a name="line.397"></a>
+<span class="sourceLineNo">398</span><a name="line.398"></a>
+<span class="sourceLineNo">399</span> /**<a name="line.399"></a>
+<span class="sourceLineNo">400</span> * @see org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil#addDependencyJars(org.apache.hadoop.mapreduce.Job)<a name="line.400"></a>
+<span class="sourceLineNo">401</span> */<a name="line.401"></a>
+<span class="sourceLineNo">402</span> public static void addDependencyJars(JobConf job) throws IOException {<a name="line.402"></a>
+<span class="sourceLineNo">403</span> org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.addHBaseDependencyJars(job);<a name="line.403"></a>
+<span class="sourceLineNo">404</span> org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.addDependencyJarsForClasses(<a name="line.404"></a>
+<span class="sourceLineNo">405</span> job,<a name="line.405"></a>
+<span class="sourceLineNo">406</span> job.getMapOutputKeyClass(),<a name="line.406"></a>
+<span class="sourceLineNo">407</span> job.getMapOutputValueClass(),<a name="line.407"></a>
+<span class="sourceLineNo">408</span> job.getOutputKeyClass(),<a name="line.408"></a>
+<span class="sourceLineNo">409</span> job.getOutputValueClass(),<a name="line.409"></a>
+<span class="sourceLineNo">410</span> job.getPartitionerClass(),<a name="line.410"></a>
+<span class="sourceLineNo">411</span> job.getClass("mapred.input.format.class", TextInputFormat.class, InputFormat.class),<a name="line.411"></a>
+<span class="sourceLineNo">412</span> job.getClass("mapred.output.format.class", TextOutputFormat.class, OutputFormat.class),<a name="line.412"></a>
+<span class="sourceLineNo">413</span> job.getCombinerClass());<a name="line.413"></a>
+<span class="sourceLineNo">414</span> }<a name="line.414"></a>
+<span class="sourceLineNo">415</span>}<a name="line.415"></a>