You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2010/02/09 08:55:50 UTC
svn commit: r907950 [11/15] - in /hadoop/hive/trunk: ./ checkstyle/
cli/src/java/org/apache/hadoop/hive/cli/
common/src/java/org/apache/hadoop/hive/common/
common/src/java/org/apache/hadoop/hive/conf/
contrib/src/java/org/apache/hadoop/hive/contrib/fil...
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java Tue Feb 9 07:55:30 2010
@@ -36,7 +36,6 @@
import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.ql.plan.PlanUtils;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -46,6 +45,7 @@
import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
import org.apache.hadoop.hive.ql.plan.FilterDesc;
import org.apache.hadoop.hive.ql.plan.MapredWork;
+import org.apache.hadoop.hive.ql.plan.PlanUtils;
import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
import org.apache.hadoop.hive.ql.plan.ScriptDesc;
import org.apache.hadoop.hive.ql.plan.SelectDesc;
@@ -55,18 +55,18 @@
/**
* Mimics the actual query compiler in generating end to end plans and testing
- * them out
+ * them out.
*
*/
public class TestExecDriver extends TestCase {
static HiveConf conf;
- static private String tmpdir = "/tmp/" + System.getProperty("user.name")
+ private static String tmpdir = "/tmp/" + System.getProperty("user.name")
+ "/";
- static private Path tmppath = new Path(tmpdir);
- static private Hive db;
- static private FileSystem fs;
+ private static Path tmppath = new Path(tmpdir);
+ private static Hive db;
+ private static FileSystem fs;
static {
try {
@@ -96,7 +96,7 @@
// copy the test files into hadoop if required.
int i = 0;
Path[] hadoopDataFile = new Path[2];
- String[] testFiles = { "kv1.txt", "kv2.txt" };
+ String[] testFiles = {"kv1.txt", "kv2.txt"};
String testFileDir = "file://"
+ conf.get("test.data.files").replace('\\', '/').replace("c:", "");
for (String oneFile : testFiles) {
@@ -109,7 +109,7 @@
// load the test files into tables
i = 0;
db = Hive.get(conf);
- String[] srctables = { "src", "src2" };
+ String[] srctables = {"src", "src2"};
LinkedList<String> cols = new LinkedList<String>();
cols.add("key");
cols.add("value");
@@ -133,8 +133,7 @@
mr = PlanUtils.getMapRedWork();
}
- private static void fileDiff(String datafile, String testdir)
- throws Exception {
+ private static void fileDiff(String datafile, String testdir) throws Exception {
String testFileDir = conf.get("test.data.files");
System.out.println(testFileDir);
FileInputStream fi_gold = new FileInputStream(new File(testFileDir,
@@ -163,14 +162,14 @@
column, "", false));
ExprNodeDesc lhs = new ExprNodeGenericFuncDesc(
TypeInfoFactory.doubleTypeInfo, FunctionRegistry.getFunctionInfo(
- Constants.DOUBLE_TYPE_NAME).getGenericUDF(), children1);
+ Constants.DOUBLE_TYPE_NAME).getGenericUDF(), children1);
ArrayList<ExprNodeDesc> children2 = new ArrayList<ExprNodeDesc>();
children2.add(new ExprNodeConstantDesc(TypeInfoFactory.longTypeInfo, Long
.valueOf(100)));
ExprNodeDesc rhs = new ExprNodeGenericFuncDesc(
TypeInfoFactory.doubleTypeInfo, FunctionRegistry.getFunctionInfo(
- Constants.DOUBLE_TYPE_NAME).getGenericUDF(), children2);
+ Constants.DOUBLE_TYPE_NAME).getGenericUDF(), children2);
ArrayList<ExprNodeDesc> children3 = new ArrayList<ExprNodeDesc>();
children3.add(lhs);
@@ -178,7 +177,7 @@
ExprNodeDesc desc = new ExprNodeGenericFuncDesc(
TypeInfoFactory.booleanTypeInfo, FunctionRegistry.getFunctionInfo("<")
- .getGenericUDF(), children3);
+ .getGenericUDF(), children3);
return new FilterDesc(desc, false);
}
@@ -205,7 +204,7 @@
Operator<ScriptDesc> op2 = OperatorFactory.get(new ScriptDesc("/bin/cat",
PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"),
TextRecordWriter.class, PlanUtils.getDefaultTableDesc(""
- + Utilities.tabCode, "key,value"), TextRecordReader.class), op3);
+ + Utilities.tabCode, "key,value"), TextRecordReader.class), op3);
Operator<FilterDesc> op1 = OperatorFactory.get(getTestFilterDesc("key"),
op2);
@@ -224,8 +223,8 @@
// map-side work
Operator<ReduceSinkDesc> op1 = OperatorFactory.get(PlanUtils
.getReduceSinkDesc(Utilities.makeList(getStringColumn("key")),
- Utilities.makeList(getStringColumn("value")), outputColumns, true,
- -1, 1, -1));
+ Utilities.makeList(getStringColumn("value")), outputColumns, true,
+ -1, 1, -1));
Utilities.addMapWork(mr, src, "a", op1);
mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
@@ -251,9 +250,9 @@
// map-side work
Operator<ReduceSinkDesc> op1 = OperatorFactory.get(PlanUtils
.getReduceSinkDesc(Utilities.makeList(getStringColumn("key")),
- Utilities
- .makeList(getStringColumn("key"), getStringColumn("value")),
- outputColumns, false, -1, 1, -1));
+ Utilities
+ .makeList(getStringColumn("key"), getStringColumn("value")),
+ outputColumns, false, -1, 1, -1));
Utilities.addMapWork(mr, src, "a", op1);
mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
@@ -272,7 +271,7 @@
}
/**
- * test reduce with multiple tagged inputs
+ * test reduce with multiple tagged inputs.
*/
@SuppressWarnings("unchecked")
private void populateMapRedPlan3(Table src, Table src2) {
@@ -285,8 +284,8 @@
// map-side work
Operator<ReduceSinkDesc> op1 = OperatorFactory.get(PlanUtils
.getReduceSinkDesc(Utilities.makeList(getStringColumn("key")),
- Utilities.makeList(getStringColumn("value")), outputColumns, true,
- Byte.valueOf((byte) 0), 1, -1));
+ Utilities.makeList(getStringColumn("value")), outputColumns, true,
+ Byte.valueOf((byte) 0), 1, -1));
Utilities.addMapWork(mr, src, "a", op1);
mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
@@ -294,8 +293,8 @@
Operator<ReduceSinkDesc> op2 = OperatorFactory.get(PlanUtils
.getReduceSinkDesc(Utilities.makeList(getStringColumn("key")),
- Utilities.makeList(getStringColumn("key")), outputColumns, true,
- Byte.valueOf((byte) 1), Integer.MAX_VALUE, -1));
+ Utilities.makeList(getStringColumn("key")), outputColumns, true,
+ Byte.valueOf((byte) 1), Integer.MAX_VALUE, -1));
Utilities.addMapWork(mr, src2, "b", op2);
mr.getTagToValueDesc().add(op2.getConf().getValueSerializeInfo());
@@ -306,11 +305,11 @@
Operator<SelectDesc> op5 = OperatorFactory.get(new SelectDesc(Utilities
.makeList(getStringColumn(Utilities.ReduceField.ALIAS.toString()),
- new ExprNodeFieldDesc(TypeInfoFactory.stringTypeInfo,
- new ExprNodeColumnDesc(TypeInfoFactory
- .getListTypeInfo(TypeInfoFactory.stringTypeInfo),
- Utilities.ReduceField.VALUE.toString(), "", false), "0",
- false)), outputColumns), op4);
+ new ExprNodeFieldDesc(TypeInfoFactory.stringTypeInfo,
+ new ExprNodeColumnDesc(TypeInfoFactory
+ .getListTypeInfo(TypeInfoFactory.stringTypeInfo),
+ Utilities.ReduceField.VALUE.toString(), "", false), "0",
+ false)), outputColumns), op4);
mr.setReducer(op5);
}
@@ -326,13 +325,13 @@
}
Operator<ReduceSinkDesc> op1 = OperatorFactory.get(PlanUtils
.getReduceSinkDesc(Utilities.makeList(getStringColumn("tkey")),
- Utilities.makeList(getStringColumn("tkey"),
- getStringColumn("tvalue")), outputColumns, false, -1, 1, -1));
+ Utilities.makeList(getStringColumn("tkey"),
+ getStringColumn("tvalue")), outputColumns, false, -1, 1, -1));
Operator<ScriptDesc> op0 = OperatorFactory.get(new ScriptDesc("/bin/cat",
PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"),
TextRecordWriter.class, PlanUtils.getDefaultTableDesc(""
- + Utilities.tabCode, "tkey,tvalue"), TextRecordReader.class), op1);
+ + Utilities.tabCode, "tkey,tvalue"), TextRecordReader.class), op1);
Operator<SelectDesc> op4 = OperatorFactory.get(new SelectDesc(Utilities
.makeList(getStringColumn("key"), getStringColumn("value")),
@@ -368,8 +367,8 @@
}
Operator<ReduceSinkDesc> op0 = OperatorFactory.get(PlanUtils
.getReduceSinkDesc(Utilities.makeList(getStringColumn("0")), Utilities
- .makeList(getStringColumn("0"), getStringColumn("1")),
- outputColumns, false, -1, 1, -1));
+ .makeList(getStringColumn("0"), getStringColumn("1")),
+ outputColumns, false, -1, 1, -1));
Operator<SelectDesc> op4 = OperatorFactory.get(new SelectDesc(Utilities
.makeList(getStringColumn("key"), getStringColumn("value")),
@@ -400,13 +399,13 @@
}
Operator<ReduceSinkDesc> op1 = OperatorFactory.get(PlanUtils
.getReduceSinkDesc(Utilities.makeList(getStringColumn("tkey")),
- Utilities.makeList(getStringColumn("tkey"),
- getStringColumn("tvalue")), outputColumns, false, -1, 1, -1));
+ Utilities.makeList(getStringColumn("tkey"),
+ getStringColumn("tvalue")), outputColumns, false, -1, 1, -1));
Operator<ScriptDesc> op0 = OperatorFactory.get(new ScriptDesc(
"\'/bin/cat\'", PlanUtils.getDefaultTableDesc("" + Utilities.tabCode,
- "tkey,tvalue"), TextRecordWriter.class, PlanUtils
- .getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue"),
+ "tkey,tvalue"), TextRecordWriter.class, PlanUtils
+ .getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue"),
TextRecordReader.class), op1);
Operator<SelectDesc> op4 = OperatorFactory.get(new SelectDesc(Utilities
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java Tue Feb 9 07:55:30 2010
@@ -39,6 +39,10 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.Text;
+/**
+ * TestExpressionEvaluator.
+ *
+ */
public class TestExpressionEvaluator extends TestCase {
// this is our row to test expressions on
@@ -166,7 +170,7 @@
Object resultO = eval.evaluate(r.o);
assertEquals(Double.valueOf("1"), ObjectInspectorUtils
.copyToStandardObject(resultO, resultOI,
- ObjectInspectorCopyOption.JAVA));
+ ObjectInspectorCopyOption.JAVA));
System.out.println("testExprNodeConversionEvaluator ok");
} catch (Throwable e) {
e.printStackTrace();
@@ -199,92 +203,92 @@
int basetimes = 100000;
measureSpeed("1 + 2", basetimes * 100, ExprNodeEvaluatorFactory
.get(TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc(
- "+", new ExprNodeConstantDesc(1), new ExprNodeConstantDesc(2))),
+ "+", new ExprNodeConstantDesc(1), new ExprNodeConstantDesc(2))),
r, Integer.valueOf(1 + 2));
measureSpeed("1 + 2 - 3", basetimes * 100, ExprNodeEvaluatorFactory
.get(TypeCheckProcFactory.DefaultExprProcessor
- .getFuncExprNodeDesc("-",
- TypeCheckProcFactory.DefaultExprProcessor
- .getFuncExprNodeDesc("+", new ExprNodeConstantDesc(1),
- new ExprNodeConstantDesc(2)),
- new ExprNodeConstantDesc(3))), r, Integer.valueOf(1 + 2 - 3));
+ .getFuncExprNodeDesc("-",
+ TypeCheckProcFactory.DefaultExprProcessor
+ .getFuncExprNodeDesc("+", new ExprNodeConstantDesc(1),
+ new ExprNodeConstantDesc(2)),
+ new ExprNodeConstantDesc(3))), r, Integer.valueOf(1 + 2 - 3));
measureSpeed("1 + 2 - 3 + 4", basetimes * 100, ExprNodeEvaluatorFactory
.get(TypeCheckProcFactory.DefaultExprProcessor
- .getFuncExprNodeDesc("+",
- TypeCheckProcFactory.DefaultExprProcessor
- .getFuncExprNodeDesc("-",
- TypeCheckProcFactory.DefaultExprProcessor
- .getFuncExprNodeDesc("+",
- new ExprNodeConstantDesc(1),
- new ExprNodeConstantDesc(2)),
- new ExprNodeConstantDesc(3)),
- new ExprNodeConstantDesc(4))), r, Integer
+ .getFuncExprNodeDesc("+",
+ TypeCheckProcFactory.DefaultExprProcessor
+ .getFuncExprNodeDesc("-",
+ TypeCheckProcFactory.DefaultExprProcessor
+ .getFuncExprNodeDesc("+",
+ new ExprNodeConstantDesc(1),
+ new ExprNodeConstantDesc(2)),
+ new ExprNodeConstantDesc(3)),
+ new ExprNodeConstantDesc(4))), r, Integer
.valueOf(1 + 2 - 3 + 4));
measureSpeed("concat(\"1\", \"2\")", basetimes * 100,
ExprNodeEvaluatorFactory
- .get(TypeCheckProcFactory.DefaultExprProcessor
- .getFuncExprNodeDesc("concat", new ExprNodeConstantDesc("1"),
- new ExprNodeConstantDesc("2"))), r, "12");
+ .get(TypeCheckProcFactory.DefaultExprProcessor
+ .getFuncExprNodeDesc("concat", new ExprNodeConstantDesc("1"),
+ new ExprNodeConstantDesc("2"))), r, "12");
measureSpeed("concat(concat(\"1\", \"2\"), \"3\")", basetimes * 100,
ExprNodeEvaluatorFactory
- .get(TypeCheckProcFactory.DefaultExprProcessor
- .getFuncExprNodeDesc("concat",
- TypeCheckProcFactory.DefaultExprProcessor
- .getFuncExprNodeDesc("concat",
- new ExprNodeConstantDesc("1"),
- new ExprNodeConstantDesc("2")),
- new ExprNodeConstantDesc("3"))), r, "123");
+ .get(TypeCheckProcFactory.DefaultExprProcessor
+ .getFuncExprNodeDesc("concat",
+ TypeCheckProcFactory.DefaultExprProcessor
+ .getFuncExprNodeDesc("concat",
+ new ExprNodeConstantDesc("1"),
+ new ExprNodeConstantDesc("2")),
+ new ExprNodeConstantDesc("3"))), r, "123");
measureSpeed("concat(concat(concat(\"1\", \"2\"), \"3\"), \"4\")",
basetimes * 100, ExprNodeEvaluatorFactory
- .get(TypeCheckProcFactory.DefaultExprProcessor
- .getFuncExprNodeDesc("concat",
- TypeCheckProcFactory.DefaultExprProcessor
- .getFuncExprNodeDesc("concat",
- TypeCheckProcFactory.DefaultExprProcessor
- .getFuncExprNodeDesc("concat",
- new ExprNodeConstantDesc("1"),
- new ExprNodeConstantDesc("2")),
- new ExprNodeConstantDesc("3")),
- new ExprNodeConstantDesc("4"))), r, "1234");
+ .get(TypeCheckProcFactory.DefaultExprProcessor
+ .getFuncExprNodeDesc("concat",
+ TypeCheckProcFactory.DefaultExprProcessor
+ .getFuncExprNodeDesc("concat",
+ TypeCheckProcFactory.DefaultExprProcessor
+ .getFuncExprNodeDesc("concat",
+ new ExprNodeConstantDesc("1"),
+ new ExprNodeConstantDesc("2")),
+ new ExprNodeConstantDesc("3")),
+ new ExprNodeConstantDesc("4"))), r, "1234");
ExprNodeDesc constant1 = new ExprNodeConstantDesc(1);
ExprNodeDesc constant2 = new ExprNodeConstantDesc(2);
measureSpeed("concat(col1[1], cola[1])", basetimes * 10,
ExprNodeEvaluatorFactory
- .get(TypeCheckProcFactory.DefaultExprProcessor
- .getFuncExprNodeDesc("concat", getListIndexNode(
- new ExprNodeColumnDesc(col1Type, "col1", "", false),
- constant1), getListIndexNode(new ExprNodeColumnDesc(
- colaType, "cola", "", false), constant1))), r, "1b");
+ .get(TypeCheckProcFactory.DefaultExprProcessor
+ .getFuncExprNodeDesc("concat", getListIndexNode(
+ new ExprNodeColumnDesc(col1Type, "col1", "", false),
+ constant1), getListIndexNode(new ExprNodeColumnDesc(
+ colaType, "cola", "", false), constant1))), r, "1b");
measureSpeed("concat(concat(col1[1], cola[1]), col1[2])", basetimes * 10,
ExprNodeEvaluatorFactory
- .get(TypeCheckProcFactory.DefaultExprProcessor
- .getFuncExprNodeDesc("concat",
- TypeCheckProcFactory.DefaultExprProcessor
- .getFuncExprNodeDesc("concat", getListIndexNode(
- new ExprNodeColumnDesc(col1Type, "col1", "",
- false), constant1), getListIndexNode(
- new ExprNodeColumnDesc(colaType, "cola", "",
- false), constant1)), getListIndexNode(
- new ExprNodeColumnDesc(col1Type, "col1", "", false),
- constant2))), r, "1b2");
+ .get(TypeCheckProcFactory.DefaultExprProcessor
+ .getFuncExprNodeDesc("concat",
+ TypeCheckProcFactory.DefaultExprProcessor
+ .getFuncExprNodeDesc("concat", getListIndexNode(
+ new ExprNodeColumnDesc(col1Type, "col1", "",
+ false), constant1), getListIndexNode(
+ new ExprNodeColumnDesc(colaType, "cola", "",
+ false), constant1)), getListIndexNode(
+ new ExprNodeColumnDesc(col1Type, "col1", "", false),
+ constant2))), r, "1b2");
measureSpeed(
"concat(concat(concat(col1[1], cola[1]), col1[2]), cola[2])",
basetimes * 10, ExprNodeEvaluatorFactory
- .get(TypeCheckProcFactory.DefaultExprProcessor
- .getFuncExprNodeDesc("concat",
- TypeCheckProcFactory.DefaultExprProcessor
- .getFuncExprNodeDesc("concat",
- TypeCheckProcFactory.DefaultExprProcessor
- .getFuncExprNodeDesc("concat",
- getListIndexNode(new ExprNodeColumnDesc(
- col1Type, "col1", "", false),
- constant1), getListIndexNode(
- new ExprNodeColumnDesc(colaType,
- "cola", "", false), constant1)),
- getListIndexNode(new ExprNodeColumnDesc(col1Type,
- "col1", "", false), constant2)),
- getListIndexNode(new ExprNodeColumnDesc(colaType, "cola",
- "", false), constant2))), r, "1b2c");
+ .get(TypeCheckProcFactory.DefaultExprProcessor
+ .getFuncExprNodeDesc("concat",
+ TypeCheckProcFactory.DefaultExprProcessor
+ .getFuncExprNodeDesc("concat",
+ TypeCheckProcFactory.DefaultExprProcessor
+ .getFuncExprNodeDesc("concat",
+ getListIndexNode(new ExprNodeColumnDesc(
+ col1Type, "col1", "", false),
+ constant1), getListIndexNode(
+ new ExprNodeColumnDesc(colaType,
+ "cola", "", false), constant1)),
+ getListIndexNode(new ExprNodeColumnDesc(col1Type,
+ "col1", "", false), constant2)),
+ getListIndexNode(new ExprNodeColumnDesc(colaType, "cola",
+ "", false), constant2))), r, "1b2c");
} catch (Throwable e) {
e.printStackTrace();
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestHashMapWrapper.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestHashMapWrapper.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestHashMapWrapper.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestHashMapWrapper.java Tue Feb 9 07:55:30 2010
@@ -26,6 +26,10 @@
import org.apache.hadoop.hive.ql.exec.persistence.HashMapWrapper;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+/**
+ * TestHashMapWrapper.
+ *
+ */
public class TestHashMapWrapper extends TestCase {
public void testHashMapWrapper() throws Exception {
@@ -38,8 +42,7 @@
try {
// NO cache
- HashMapWrapper<String, String> wrapper = new HashMapWrapper<String, String>(
- 0);
+ HashMapWrapper<String, String> wrapper = new HashMapWrapper<String, String>(0);
insertAll(wrapper, mem_map);
checkAll(wrapper, mem_map);
wrapper.close(); // clean up temporary files
@@ -192,10 +195,10 @@
String val = map.get(k);
assertTrue(
"some HashMapWrapper value is not in main memory HashMap: map_val = "
- + map_val + "; val = " + val, map_val != null && val != null);
+ + map_val + "; val = " + val, map_val != null && val != null);
assertTrue(
"value in HashMapWrapper is not the same as MM HashMap: map_val = "
- + map_val + "; val = " + val, val.equals(map_val));
+ + map_val + "; val = " + val, val.equals(map_val));
}
// check all inserted elements are in HashMapWrapper
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java Tue Feb 9 07:55:30 2010
@@ -28,13 +28,13 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
-import org.apache.hadoop.hive.ql.plan.PlanUtils;
import org.apache.hadoop.hive.ql.plan.CollectDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.FilterDesc;
import org.apache.hadoop.hive.ql.plan.MapredWork;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
+import org.apache.hadoop.hive.ql.plan.PlanUtils;
import org.apache.hadoop.hive.ql.plan.ScriptDesc;
import org.apache.hadoop.hive.ql.plan.SelectDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
@@ -48,6 +48,10 @@
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
+/**
+ * TestOperators.
+ *
+ */
public class TestOperators extends TestCase {
// this is our row to test expressions on
@@ -106,7 +110,7 @@
// runtime initialization
op.initialize(new JobConf(TestOperators.class),
- new ObjectInspector[] { r[0].oi });
+ new ObjectInspector[] {r[0].oi});
for (InspectableObject oner : r) {
op.process(oner.o, 0);
@@ -165,7 +169,7 @@
// Operator<fileSinkDesc> flop = OperatorFactory.getAndMakeChild(fsd, op);
op.initialize(new JobConf(TestOperators.class),
- new ObjectInspector[] { r[0].oi });
+ new ObjectInspector[] {r[0].oi});
// evaluate on row
for (int i = 0; i < 5; i++) {
@@ -220,7 +224,7 @@
cd, sop);
op.initialize(new JobConf(TestOperators.class),
- new ObjectInspector[] { r[0].oi });
+ new ObjectInspector[] {r[0].oi});
// evaluate on row
for (int i = 0; i < 5; i++) {
@@ -265,14 +269,16 @@
ArrayList<String> aliases = new ArrayList<String>();
aliases.add("a");
aliases.add("b");
- LinkedHashMap<String, ArrayList<String>> pathToAliases = new LinkedHashMap<String, ArrayList<String>>();
+ LinkedHashMap<String, ArrayList<String>> pathToAliases =
+ new LinkedHashMap<String, ArrayList<String>>();
pathToAliases.put("/testDir", aliases);
// initialize pathToTableInfo
// Default: treat the table as a single column "col"
TableDesc td = Utilities.defaultTd;
PartitionDesc pd = new PartitionDesc(td, null);
- LinkedHashMap<String, org.apache.hadoop.hive.ql.plan.PartitionDesc> pathToPartitionInfo = new LinkedHashMap<String, org.apache.hadoop.hive.ql.plan.PartitionDesc>();
+ LinkedHashMap<String, org.apache.hadoop.hive.ql.plan.PartitionDesc> pathToPartitionInfo =
+ new LinkedHashMap<String, org.apache.hadoop.hive.ql.plan.PartitionDesc>();
pathToPartitionInfo.put("/testDir", pd);
// initialize aliasToWork
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java Tue Feb 9 07:55:30 2010
@@ -35,6 +35,10 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.mapred.JobConf;
+/**
+ * TestPlan.
+ *
+ */
public class TestPlan extends TestCase {
public void testPlan() throws Exception {
@@ -65,7 +69,8 @@
LinkedHashMap<String, PartitionDesc> pt = new LinkedHashMap<String, PartitionDesc>();
pt.put("/tmp/testfolder", partDesc);
- LinkedHashMap<String, Operator<? extends Serializable>> ao = new LinkedHashMap<String, Operator<? extends Serializable>>();
+ LinkedHashMap<String, Operator<? extends Serializable>> ao =
+ new LinkedHashMap<String, Operator<? extends Serializable>>();
ao.put("a", op);
MapredWork mrwork = new MapredWork();
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java Tue Feb 9 07:55:30 2010
@@ -40,15 +40,19 @@
import org.apache.hadoop.hive.ql.tools.LineageInfo;
import org.apache.hadoop.mapred.TextInputFormat;
+/**
+ * TestHiveHistory.
+ *
+ */
public class TestHiveHistory extends TestCase {
static HiveConf conf;
- static private String tmpdir = "/tmp/" + System.getProperty("user.name")
+ private static String tmpdir = "/tmp/" + System.getProperty("user.name")
+ "/";
- static private Path tmppath = new Path(tmpdir);
- static private Hive db;
- static private FileSystem fs;
+ private static Path tmppath = new Path(tmpdir);
+ private static Hive db;
+ private static FileSystem fs;
/*
* intialize the tables
@@ -74,7 +78,7 @@
// copy the test files into hadoop if required.
int i = 0;
Path[] hadoopDataFile = new Path[2];
- String[] testFiles = { "kv1.txt", "kv2.txt" };
+ String[] testFiles = {"kv1.txt", "kv2.txt"};
String testFileDir = "file://"
+ conf.get("test.data.files").replace('\\', '/').replace("c:", "");
for (String oneFile : testFiles) {
@@ -87,7 +91,7 @@
// load the test files into tables
i = 0;
db = Hive.get(conf);
- String[] srctables = { "src", "src2" };
+ String[] srctables = {"src", "src2"};
LinkedList<String> cols = new LinkedList<String>();
cols.add("key");
cols.add("value");
@@ -106,7 +110,7 @@
}
/**
- * check history file output for this query.als
+ * Check history file output for this query.
*/
public void testSimpleQuery() {
new LineageInfo();
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/JavaTestObjFlatFileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/JavaTestObjFlatFileInputFormat.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/JavaTestObjFlatFileInputFormat.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/JavaTestObjFlatFileInputFormat.java Tue Feb 9 07:55:30 2010
@@ -21,7 +21,7 @@
import java.io.Serializable;
/**
- * Simple test object
+ * Simple test object.
*/
public class JavaTestObjFlatFileInputFormat implements Serializable {
public String s;
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java Tue Feb 9 07:55:30 2010
@@ -18,6 +18,10 @@
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.DefaultCodec;
+/**
+ * PerformTestRCFileAndSeqFile.
+ *
+ */
public class PerformTestRCFileAndSeqFile extends TestCase {
private final Configuration conf = new Configuration();
@@ -257,8 +261,7 @@
return fs.getUri().toString().startsWith("file://");
}
- public void performSequenceFileRead(FileSystem fs, int count, Path file)
- throws IOException {
+ public void performSequenceFileRead(FileSystem fs, int count, Path file) throws IOException {
SequenceFile.Reader reader = new SequenceFile.Reader(fs, file, conf);
ByteWritable key = new ByteWritable();
BytesRefArrayWritable val = new BytesRefArrayWritable();
@@ -329,7 +332,7 @@
ok = ok && (checkRow.get(0).equals(cols.get(0)));
ok = ok
&& checkRow.get(allColumnsNumber - 1).equals(
- cols.get(allColumnsNumber - 1));
+ cols.get(allColumnsNumber - 1));
}
if (!ok) {
throw new IllegalStateException("Compare read and write error.");
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java Tue Feb 9 07:55:30 2010
@@ -38,6 +38,10 @@
//import org.apache.hadoop.contrib.serialization.thrift.*;
+/**
+ * TestFlatFileInputFormat.
+ *
+ */
public class TestFlatFileInputFormat extends TestCase {
public void testFlatFileInputJava() throws Exception {
@@ -67,8 +71,8 @@
job
.setClass(
- FlatFileInputFormat.SerializationContextFromConf.SerializationSubclassKey,
- JavaTestObjFlatFileInputFormat.class, java.io.Serializable.class);
+ FlatFileInputFormat.SerializationContextFromConf.SerializationSubclassKey,
+ JavaTestObjFlatFileInputFormat.class, java.io.Serializable.class);
//
// Write some data out to a flat file
@@ -88,7 +92,8 @@
//
// Construct the reader
//
- FileInputFormat<Void, FlatFileInputFormat.RowContainer<Serializable>> format = new FlatFileInputFormat<Serializable>();
+ FileInputFormat<Void, FlatFileInputFormat.RowContainer<Serializable>> format =
+ new FlatFileInputFormat<Serializable>();
InputSplit[] splits = format.getSplits(job, 1);
// construct the record reader
@@ -148,8 +153,8 @@
job
.setClass(
- FlatFileInputFormat.SerializationContextFromConf.SerializationSubclassKey,
- RecordTestObj.class, Writable.class);
+ FlatFileInputFormat.SerializationContextFromConf.SerializationSubclassKey,
+ RecordTestObj.class, Writable.class);
//
// Write some data out to a flat file
@@ -170,7 +175,8 @@
//
// Construct the reader
//
- FileInputFormat<Void, FlatFileInputFormat.RowContainer<Writable>> format = new FlatFileInputFormat<Writable>();
+ FileInputFormat<Void, FlatFileInputFormat.RowContainer<Writable>> format =
+ new FlatFileInputFormat<Writable>();
InputSplit[] splits = format.getSplits(job, 1);
// construct the record reader
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveInputOutputBuffer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveInputOutputBuffer.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveInputOutputBuffer.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveInputOutputBuffer.java Tue Feb 9 07:55:30 2010
@@ -21,6 +21,10 @@
import junit.framework.TestCase;
+/**
+ * TestHiveInputOutputBuffer.
+ *
+ */
public class TestHiveInputOutputBuffer extends TestCase {
public void testReadAndWrite() throws IOException {
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java Tue Feb 9 07:55:30 2010
@@ -30,7 +30,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
import org.apache.hadoop.hive.serde2.SerDeException;
@@ -55,6 +54,10 @@
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
+/**
+ * TestRCFile.
+ *
+ */
public class TestRCFile extends TestCase {
private static final Log LOG = LogFactory.getLog(TestRCFile.class);
@@ -89,10 +92,10 @@
private static Writable[] expectedFieldsData = {
new ByteWritable((byte) 123), new ShortWritable((short) 456),
new IntWritable(789), new LongWritable(1000), new DoubleWritable(5.3),
- new Text("hive and hadoop"), null, null };
+ new Text("hive and hadoop"), null, null};
- private static Object[] expectedPartitalFieldsData = { null, null,
- new IntWritable(789), new LongWritable(1000), null, null, null, null };
+ private static Object[] expectedPartitalFieldsData = {null, null,
+ new IntWritable(789), new LongWritable(1000), null, null, null, null};
private static BytesRefArrayWritable patialS = new BytesRefArrayWritable();
private static byte[][] bytesArray = null;
@@ -100,11 +103,11 @@
private static BytesRefArrayWritable s = null;
static {
try {
- bytesArray = new byte[][] { "123".getBytes("UTF-8"),
+ bytesArray = new byte[][] {"123".getBytes("UTF-8"),
"456".getBytes("UTF-8"), "789".getBytes("UTF-8"),
"1000".getBytes("UTF-8"), "5.3".getBytes("UTF-8"),
"hive and hadoop".getBytes("UTF-8"), new byte[0],
- "NULL".getBytes("UTF-8") };
+ "NULL".getBytes("UTF-8")};
s = new BytesRefArrayWritable(bytesArray.length);
s.set(0, new BytesRefWritable("123".getBytes("UTF-8")));
s.set(1, new BytesRefWritable("456".getBytes("UTF-8")));
@@ -132,14 +135,14 @@
public void testSimpleReadAndWrite() throws IOException, SerDeException {
fs.delete(file, true);
- byte[][] record_1 = { "123".getBytes("UTF-8"), "456".getBytes("UTF-8"),
+ byte[][] record_1 = {"123".getBytes("UTF-8"), "456".getBytes("UTF-8"),
"789".getBytes("UTF-8"), "1000".getBytes("UTF-8"),
"5.3".getBytes("UTF-8"), "hive and hadoop".getBytes("UTF-8"),
- new byte[0], "NULL".getBytes("UTF-8") };
- byte[][] record_2 = { "100".getBytes("UTF-8"), "200".getBytes("UTF-8"),
+ new byte[0], "NULL".getBytes("UTF-8")};
+ byte[][] record_2 = {"100".getBytes("UTF-8"), "200".getBytes("UTF-8"),
"123".getBytes("UTF-8"), "1000".getBytes("UTF-8"),
"5.3".getBytes("UTF-8"), "hive and hadoop".getBytes("UTF-8"),
- new byte[0], "NULL".getBytes("UTF-8") };
+ new byte[0], "NULL".getBytes("UTF-8")};
RCFileOutputFormat.setColumnNumber(conf, expectedFieldsData.length);
RCFile.Writer writer = new RCFile.Writer(fs, conf, file, null,
@@ -160,15 +163,15 @@
writer.append(bytes);
writer.close();
- Object[] expectedRecord_1 = { new ByteWritable((byte) 123),
+ Object[] expectedRecord_1 = {new ByteWritable((byte) 123),
new ShortWritable((short) 456), new IntWritable(789),
new LongWritable(1000), new DoubleWritable(5.3),
- new Text("hive and hadoop"), null, null };
+ new Text("hive and hadoop"), null, null};
- Object[] expectedRecord_2 = { new ByteWritable((byte) 100),
+ Object[] expectedRecord_2 = {new ByteWritable((byte) 100),
new ShortWritable((short) 200), new IntWritable(123),
new LongWritable(1000), new DoubleWritable(5.3),
- new Text("hive and hadoop"), null, null };
+ new Text("hive and hadoop"), null, null};
RCFile.Reader reader = new RCFile.Reader(fs, file, conf);
@@ -189,7 +192,7 @@
Object fieldData = oi.getStructFieldData(row, fieldRefs.get(j));
Object standardWritableData = ObjectInspectorUtils
.copyToStandardObject(fieldData, fieldRefs.get(j)
- .getFieldObjectInspector(), ObjectInspectorCopyOption.WRITABLE);
+ .getFieldObjectInspector(), ObjectInspectorCopyOption.WRITABLE);
if (i == 0) {
assertEquals("Field " + i, standardWritableData, expectedRecord_1[j]);
} else {
@@ -318,7 +321,7 @@
Object fieldData = oi.getStructFieldData(row, fieldRefs.get(i));
Object standardWritableData = ObjectInspectorUtils
.copyToStandardObject(fieldData, fieldRefs.get(i)
- .getFieldObjectInspector(), ObjectInspectorCopyOption.WRITABLE);
+ .getFieldObjectInspector(), ObjectInspectorCopyOption.WRITABLE);
assertEquals("Field " + i, standardWritableData, expectedFieldsData[i]);
}
// Serialize
@@ -364,7 +367,7 @@
Object fieldData = oi.getStructFieldData(row, fieldRefs.get(i));
Object standardWritableData = ObjectInspectorUtils
.copyToStandardObject(fieldData, fieldRefs.get(i)
- .getFieldObjectInspector(), ObjectInspectorCopyOption.WRITABLE);
+ .getFieldObjectInspector(), ObjectInspectorCopyOption.WRITABLE);
assertEquals("Field " + i, standardWritableData,
expectedPartitalFieldsData[i]);
}
@@ -380,7 +383,7 @@
long cost = System.currentTimeMillis() - start;
LOG.debug("reading fully costs:" + cost + " milliseconds");
}
-
+
public void testSynAndSplit() throws IOException {
splitBeforeSync();
splitRightBeforeSync();
@@ -392,20 +395,20 @@
private void splitBeforeSync() throws IOException {
writeThenReadByRecordReader(600, 1000, 2, 1, null);
}
-
+
private void splitRightBeforeSync() throws IOException {
writeThenReadByRecordReader(500, 1000, 2, 17750, null);
}
-
+
private void splitInMiddleOfSync() throws IOException {
writeThenReadByRecordReader(500, 1000, 2, 17760, null);
-
+
}
-
+
private void splitRightAfterSync() throws IOException {
writeThenReadByRecordReader(500, 1000, 2, 17770, null);
}
-
+
private void splitAfterSync() throws IOException {
writeThenReadByRecordReader(500, 1000, 2, 19950, null);
}
@@ -413,7 +416,8 @@
private void writeThenReadByRecordReader(int intervalRecordCount,
int writeCount, int splitNumber, long minSplitSize, CompressionCodec codec)
throws IOException {
- Path testDir = new Path(System.getProperty("test.data.dir", ".") + "/mapred/testsmallfirstsplit");
+ Path testDir = new Path(System.getProperty("test.data.dir", ".")
+ + "/mapred/testsmallfirstsplit");
Path testFile = new Path(testDir, "test_rcfile");
fs.delete(testFile, true);
Configuration cloneConf = new Configuration(conf);
@@ -429,12 +433,13 @@
bytes.set(i, cu);
}
for (int i = 0; i < writeCount; i++) {
- if(i == intervalRecordCount)
+ if (i == intervalRecordCount) {
System.out.println("write position:" + writer.getLength());
+ }
writer.append(bytes);
}
writer.close();
-
+
RCFileInputFormat inputFormat = new RCFileInputFormat();
JobConf jonconf = new JobConf(cloneConf);
jonconf.set("mapred.input.dir", testDir.toString());
@@ -447,12 +452,13 @@
RecordReader rr = inputFormat.getRecordReader(splits[i], jonconf, Reporter.NULL);
Object key = rr.createKey();
Object value = rr.createValue();
- while(rr.next(key, value))
- readCount ++;
+ while (rr.next(key, value)) {
+ readCount++;
+ }
System.out.println("The " + i + "th split read "
+ (readCount - previousReadCount));
}
assertEquals("readCount should be equal to writeCount", readCount, writeCount);
}
-
+
}
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java Tue Feb 9 07:55:30 2010
@@ -41,6 +41,10 @@
import org.apache.hadoop.util.StringUtils;
import org.apache.thrift.protocol.TBinaryProtocol;
+/**
+ * TestHive.
+ *
+ */
public class TestHive extends TestCase {
private Hive hm;
private HiveConf hiveConf;
@@ -57,7 +61,7 @@
System.err.println(StringUtils.stringifyException(e));
System.err
.println("Unable to initialize Hive Metastore using configruation: \n "
- + hiveConf);
+ + hiveConf);
throw e;
}
}
@@ -71,7 +75,7 @@
System.err.println(StringUtils.stringifyException(e));
System.err
.println("Unable to close Hive Metastore using configruation: \n "
- + hiveConf);
+ + hiveConf);
throw e;
}
}
@@ -115,9 +119,9 @@
List<FieldSchema> partCols = new ArrayList<FieldSchema>();
partCols
.add(new FieldSchema(
- "ds",
- Constants.STRING_TYPE_NAME,
- "partition column, date but in string format as date type is not yet supported in QL"));
+ "ds",
+ Constants.STRING_TYPE_NAME,
+ "partition column, date but in string format as date type is not yet supported in QL"));
tbl.setPartCols(partCols);
tbl.setNumBuckets((short) 512);
@@ -156,7 +160,7 @@
tbl.getRetention(), ft.getRetention());
assertEquals("Data location is not set correctly", wh
.getDefaultTablePath(MetaStoreUtils.DEFAULT_DATABASE_NAME,
- tableName).toString(), ft.getDataLocation().toString());
+ tableName).toString(), ft.getDataLocation().toString());
// now that URI is set correctly, set the original table's uri and then
// compare the two tables
tbl.setDataLocation(ft.getDataLocation());
@@ -188,7 +192,7 @@
}
/**
- * Tests create and fetch of a thrift based table
+ * Tests create and fetch of a thrift based table.
*
* @throws Throwable
*/
@@ -229,7 +233,7 @@
tbl.getRetention(), ft.getRetention());
assertEquals("Data location is not set correctly", wh
.getDefaultTablePath(MetaStoreUtils.DEFAULT_DATABASE_NAME,
- tableName).toString(), ft.getDataLocation().toString());
+ tableName).toString(), ft.getDataLocation().toString());
// now that URI is set correctly, set the original table's uri and then
// compare the two tables
tbl.setDataLocation(ft.getDataLocation());
@@ -251,8 +255,7 @@
}
}
- private static Table createTestTable(String dbName, String tableName)
- throws HiveException {
+ private static Table createTestTable(String dbName, String tableName) throws HiveException {
Table tbl = new Table(tableName);
tbl.getTTable().setDbName(dbName);
tbl.setInputFormatClass(SequenceFileInputFormat.class.getName());
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java Tue Feb 9 07:55:30 2010
@@ -18,6 +18,10 @@
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.thrift.TException;
+/**
+ * TestHiveMetaStoreChecker.
+ *
+ */
public class TestHiveMetaStoreChecker extends TestCase {
private Hive hive;
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/tool/TestLineageInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/tool/TestLineageInfo.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/tool/TestLineageInfo.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/tool/TestLineageInfo.java Tue Feb 9 07:55:30 2010
@@ -24,10 +24,14 @@
import org.apache.hadoop.hive.ql.tools.LineageInfo;
+/**
+ * TestLineageInfo.
+ *
+ */
public class TestLineageInfo extends TestCase {
/**
- * Checks whether the test outputs match the expected outputs
+ * Checks whether the test outputs match the expected outputs.
*
* @param lep
* The LineageInfo extracted from the test
@@ -49,8 +53,9 @@
public void testSimpleQuery() {
LineageInfo lep = new LineageInfo();
try {
- lep
- .getLineageInfo("INSERT OVERWRITE TABLE dest1 partition (ds = '111') SELECT s.* FROM srcpart TABLESAMPLE (BUCKET 1 OUT OF 1) s WHERE s.ds='2008-04-08' and s.hr='11'");
+ lep.getLineageInfo("INSERT OVERWRITE TABLE dest1 partition (ds = '111') "
+ + "SELECT s.* FROM srcpart TABLESAMPLE (BUCKET 1 OUT OF 1) s "
+ + "WHERE s.ds='2008-04-08' and s.hr='11'");
TreeSet<String> i = new TreeSet<String>();
TreeSet<String> o = new TreeSet<String>();
i.add("srcpart");
@@ -65,8 +70,10 @@
public void testSimpleQuery2() {
LineageInfo lep = new LineageInfo();
try {
- lep
- .getLineageInfo("FROM (FROM src select src.key, src.value WHERE src.key < 10 UNION ALL FROM src SELECT src.* WHERE src.key > 10 ) unioninput INSERT OVERWRITE DIRECTORY '../../../../build/contrib/hive/ql/test/data/warehouse/union.out' SELECT unioninput.*");
+ lep.getLineageInfo("FROM (FROM src select src.key, src.value "
+ + "WHERE src.key < 10 UNION ALL FROM src SELECT src.* WHERE src.key > 10 ) unioninput "
+ + "INSERT OVERWRITE DIRECTORY '../../../../build/contrib/hive/ql/test/data/warehouse/union.out' "
+ + "SELECT unioninput.*");
TreeSet<String> i = new TreeSet<String>();
TreeSet<String> o = new TreeSet<String>();
i.add("src");
@@ -80,8 +87,10 @@
public void testSimpleQuery3() {
LineageInfo lep = new LineageInfo();
try {
- lep
- .getLineageInfo("FROM (FROM src select src.key, src.value WHERE src.key < 10 UNION ALL FROM src1 SELECT src1.* WHERE src1.key > 10 ) unioninput INSERT OVERWRITE DIRECTORY '../../../../build/contrib/hive/ql/test/data/warehouse/union.out' SELECT unioninput.*");
+ lep.getLineageInfo("FROM (FROM src select src.key, src.value "
+ + "WHERE src.key < 10 UNION ALL FROM src1 SELECT src1.* WHERE src1.key > 10 ) unioninput "
+ + "INSERT OVERWRITE DIRECTORY '../../../../build/contrib/hive/ql/test/data/warehouse/union.out' "
+ + "SELECT unioninput.*");
TreeSet<String> i = new TreeSet<String>();
TreeSet<String> o = new TreeSet<String>();
i.add("src");
@@ -96,8 +105,7 @@
public void testSimpleQuery4() {
LineageInfo lep = new LineageInfo();
try {
- lep
- .getLineageInfo("FROM ( FROM ( FROM src1 src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20) a RIGHT OUTER JOIN ( FROM src2 src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25) b ON (a.c1 = b.c3) SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4) c SELECT c.c1, c.c2, c.c3, c.c4");
+ lep.getLineageInfo("FROM ( FROM ( FROM src1 src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20) a RIGHT OUTER JOIN ( FROM src2 src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25) b ON (a.c1 = b.c3) SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4) c SELECT c.c1, c.c2, c.c3, c.c4");
TreeSet<String> i = new TreeSet<String>();
TreeSet<String> o = new TreeSet<String>();
i.add("src1");
@@ -112,8 +120,8 @@
public void testSimpleQuery5() {
LineageInfo lep = new LineageInfo();
try {
- lep
- .getLineageInfo("insert overwrite table x select a.y, b.y from a a full outer join b b on (a.x = b.y)");
+ lep.getLineageInfo("insert overwrite table x select a.y, b.y "
+ + "from a a full outer join b b on (a.x = b.y)");
TreeSet<String> i = new TreeSet<String>();
TreeSet<String> o = new TreeSet<String>();
i.add("a");
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java Tue Feb 9 07:55:30 2010
@@ -28,9 +28,17 @@
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
+/**
+ * UDAFTestMax.
+ *
+ */
public class UDAFTestMax extends UDAF {
- static public class MaxShortEvaluator implements UDAFEvaluator {
+ /**
+ * MaxShortEvaluator.
+ *
+ */
+ public static class MaxShortEvaluator implements UDAFEvaluator {
private short mMax;
private boolean mEmpty;
@@ -69,7 +77,11 @@
}
}
- static public class MaxIntEvaluator implements UDAFEvaluator {
+ /**
+ * MaxIntEvaluator.
+ *
+ */
+ public static class MaxIntEvaluator implements UDAFEvaluator {
private int mMax;
private boolean mEmpty;
@@ -108,7 +120,11 @@
}
}
- static public class MaxLongEvaluator implements UDAFEvaluator {
+ /**
+ * MaxLongEvaluator.
+ *
+ */
+ public static class MaxLongEvaluator implements UDAFEvaluator {
private long mMax;
private boolean mEmpty;
@@ -147,7 +163,11 @@
}
}
- static public class MaxFloatEvaluator implements UDAFEvaluator {
+ /**
+ * MaxFloatEvaluator.
+ *
+ */
+ public static class MaxFloatEvaluator implements UDAFEvaluator {
private float mMax;
private boolean mEmpty;
@@ -186,7 +206,11 @@
}
}
- static public class MaxDoubleEvaluator implements UDAFEvaluator {
+ /**
+ * MaxDoubleEvaluator.
+ *
+ */
+ public static class MaxDoubleEvaluator implements UDAFEvaluator {
private double mMax;
private boolean mEmpty;
@@ -225,7 +249,11 @@
}
}
- static public class MaxStringEvaluator implements UDAFEvaluator {
+ /**
+ * MaxStringEvaluator.
+ *
+ */
+ public static class MaxStringEvaluator implements UDAFEvaluator {
private Text mMax;
private boolean mEmpty;
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java Tue Feb 9 07:55:30 2010
@@ -32,7 +32,7 @@
import org.apache.hadoop.io.Text;
/**
- * Mimics oracle's function translate(str1, str2, str3)
+ * Mimics oracle's function translate(str1, str2, str3).
*/
public class GenericUDFTestTranslate extends GenericUDF {
ObjectInspector[] argumentOIs;
@@ -44,16 +44,15 @@
int unit = i % 10;
return (i <= 0) ? "" : (i != 11 && unit == 1) ? i + "st"
: (i != 12 && unit == 2) ? i + "nd" : (i != 13 && unit == 3) ? i + "rd"
- : i + "th";
+ : i + "th";
}
@Override
- public ObjectInspector initialize(ObjectInspector[] arguments)
- throws UDFArgumentException {
+ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 3) {
throw new UDFArgumentLengthException(
"The function TRANSLATE(expr,from_string,to_string) accepts exactly 3 arguments, but "
- + arguments.length + " arguments is found.");
+ + arguments.length + " arguments is found.");
}
for (int i = 0; i < 3; i++) {
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/scripts/extracturl.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/scripts/extracturl.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/scripts/extracturl.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/scripts/extracturl.java Tue Feb 9 07:55:30 2010
@@ -23,7 +23,11 @@
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-public class extracturl {
+/**
+ * extracturl.
+ *
+ */
+public final class extracturl {
protected static final Pattern pattern = Pattern.compile(
"<a href=\"http://([\\w\\d]+\\.html)\">link</a>",
@@ -47,4 +51,8 @@
System.exit(1);
}
}
+
+ private extracturl() {
+ // prevent instantiation
+ }
}
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/serde2/TestSerDe.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/serde2/TestSerDe.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/serde2/TestSerDe.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/serde2/TestSerDe.java Tue Feb 9 07:55:30 2010
@@ -37,6 +37,10 @@
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
+/**
+ * TestSerDe.
+ *
+ */
public class TestSerDe implements SerDe {
public static final Log LOG = LogFactory.getLog(TestSerDe.class.getName());
@@ -62,7 +66,7 @@
}
}
- final public static String DefaultSeparator = "\002";
+ public static final String DefaultSeparator = "\002";
private String separator;
// constant for now, will make it configurable later.
@@ -79,17 +83,16 @@
separator = DefaultSeparator;
}
- public void initialize(Configuration job, Properties tbl)
- throws SerDeException {
+ public void initialize(Configuration job, Properties tbl) throws SerDeException {
separator = DefaultSeparator;
- String alt_sep = tbl.getProperty("testserde.default.serialization.format");
- if (alt_sep != null && alt_sep.length() > 0) {
+ String altSep = tbl.getProperty("testserde.default.serialization.format");
+ if (altSep != null && altSep.length() > 0) {
try {
- byte b[] = new byte[1];
- b[0] = Byte.valueOf(alt_sep).byteValue();
+ byte[] b = new byte[1];
+ b[0] = Byte.valueOf(altSep).byteValue();
separator = new String(b);
} catch (NumberFormatException e) {
- separator = alt_sep;
+ separator = altSep;
}
}
@@ -99,7 +102,7 @@
// Treat it as a table with a single column called "col"
cachedObjectInspector = ObjectInspectorFactory
.getReflectionObjectInspector(ColumnSet.class,
- ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+ ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
} else {
columnNames = Arrays.asList(columnProperty.split(","));
cachedObjectInspector = MetadataListStructObjectInspector
@@ -166,8 +169,7 @@
Text serializeCache = new Text();
- public Writable serialize(Object obj, ObjectInspector objInspector)
- throws SerDeException {
+ public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
if (objInspector.getCategory() != Category.STRUCT) {
throw new SerDeException(getClass().toString()
Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ByteStream.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ByteStream.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ByteStream.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ByteStream.java Tue Feb 9 07:55:30 2010
@@ -22,10 +22,14 @@
import org.apache.hadoop.hive.common.io.NonSyncByteArrayOutputStream;
/**
- * Extensions to bytearrayinput/output streams
+ * Extensions to bytearrayinput/output streams.
*
*/
public class ByteStream {
+ /**
+ * Input.
+ *
+ */
public static class Input extends NonSyncByteArrayInputStream {
public byte[] getData() {
return buf;
@@ -54,6 +58,10 @@
}
}
+ /**
+ * Output.
+ *
+ */
public static class Output extends NonSyncByteArrayOutputStream {
@Override
public byte[] getData() {
Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ByteStreamTypedSerDe.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ByteStreamTypedSerDe.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ByteStreamTypedSerDe.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ByteStreamTypedSerDe.java Tue Feb 9 07:55:30 2010
@@ -23,6 +23,10 @@
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Writable;
+/**
+ * ByteStreamTypedSerDe.
+ *
+ */
public abstract class ByteStreamTypedSerDe extends TypedSerDe {
protected ByteStream.Input bis;
Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java Tue Feb 9 07:55:30 2010
@@ -23,9 +23,13 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.StringUtils;
-public class ColumnProjectionUtils {
+/**
+ * ColumnProjectionUtils.
+ *
+ */
+public final class ColumnProjectionUtils {
- public static String READ_COLUMN_IDS_CONF_STR = "hive.io.file.readcolumn.ids";
+ public static final String READ_COLUMN_IDS_CONF_STR = "hive.io.file.readcolumn.ids";
/**
* Sets read columns' ids(start from zero) for RCFile's Reader. Once a column
@@ -107,4 +111,8 @@
conf.set(READ_COLUMN_IDS_CONF_STR, "");
}
+ private ColumnProjectionUtils() {
+ // prevent instantiation
+ }
+
}
Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ColumnSet.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ColumnSet.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ColumnSet.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ColumnSet.java Tue Feb 9 07:55:30 2010
@@ -20,6 +20,10 @@
import java.util.ArrayList;
+/**
+ * ColumnSet.
+ *
+ */
public class ColumnSet {
public ArrayList<String> col;
Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java Tue Feb 9 07:55:30 2010
@@ -44,8 +44,7 @@
* table properties
* @throws SerDeException
*/
- public void initialize(Configuration conf, Properties tbl)
- throws SerDeException;
+ void initialize(Configuration conf, Properties tbl) throws SerDeException;
/**
* Deserialize an object out of a Writable blob. In most cases, the return
@@ -58,12 +57,11 @@
* The Writable object containing a serialized object
* @return A Java object representing the contents in the blob.
*/
- public Object deserialize(Writable blob) throws SerDeException;
+ Object deserialize(Writable blob) throws SerDeException;
/**
* Get the object inspector that can be used to navigate through the internal
* structure of the Object returned from deserialize(...).
*/
- public ObjectInspector getObjectInspector() throws SerDeException;
-
+ ObjectInspector getObjectInspector() throws SerDeException;
}
Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java Tue Feb 9 07:55:30 2010
@@ -38,6 +38,10 @@
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
+/**
+ * MetadataTypedColumnsetSerDe.
+ *
+ */
public class MetadataTypedColumnsetSerDe implements SerDe {
public static final Log LOG = LogFactory
@@ -51,16 +55,16 @@
// class.
SerDeUtils.registerSerDe(
"org.apache.hadoop.hive.serde.thrift.columnsetSerDe", Class
- .forName(className));
+ .forName(className));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
- final public static String DefaultSeparator = "\001";
+ public static final String DefaultSeparator = "\001";
private String separator;
- final public static String defaultNullString = "\\N";
+ public static final String defaultNullString = "\\N";
private String nullString;
private List<String> columnNames;
@@ -81,7 +85,7 @@
private String getByteValue(String altValue, String defaultVal) {
if (altValue != null && altValue.length() > 0) {
try {
- byte b[] = new byte[1];
+ byte[] b = new byte[1];
b[0] = Byte.valueOf(altValue).byteValue();
return new String(b);
} catch (NumberFormatException e) {
@@ -91,13 +95,12 @@
return defaultVal;
}
- public void initialize(Configuration job, Properties tbl)
- throws SerDeException {
- String alt_sep = tbl.getProperty(Constants.SERIALIZATION_FORMAT);
- separator = getByteValue(alt_sep, DefaultSeparator);
+ public void initialize(Configuration job, Properties tbl) throws SerDeException {
+ String altSep = tbl.getProperty(Constants.SERIALIZATION_FORMAT);
+ separator = getByteValue(altSep, DefaultSeparator);
- String alt_null = tbl.getProperty(Constants.SERIALIZATION_NULL_FORMAT);
- nullString = getByteValue(alt_null, defaultNullString);
+ String altNull = tbl.getProperty(Constants.SERIALIZATION_NULL_FORMAT);
+ nullString = getByteValue(altNull, defaultNullString);
String columnProperty = tbl.getProperty("columns");
String serdeName = tbl.getProperty(Constants.SERIALIZATION_LIB);
@@ -105,8 +108,7 @@
// so this hack applies to all such tables
boolean columnsetSerDe = false;
if ((serdeName != null)
- && serdeName
- .equals("org.apache.hadoop.hive.serde.thrift.columnsetSerDe")) {
+ && serdeName.equals("org.apache.hadoop.hive.serde.thrift.columnsetSerDe")) {
columnsetSerDe = true;
}
if (columnProperty == null || columnProperty.length() == 0
@@ -115,7 +117,7 @@
// Treat it as a table with a single column called "col"
cachedObjectInspector = ObjectInspectorFactory
.getReflectionObjectInspector(ColumnSet.class,
- ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+ ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
} else {
columnNames = Arrays.asList(columnProperty.split(","));
cachedObjectInspector = MetadataListStructObjectInspector
@@ -201,8 +203,7 @@
Text serializeCache = new Text();
- public Writable serialize(Object obj, ObjectInspector objInspector)
- throws SerDeException {
+ public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
if (objInspector.getCategory() != Category.STRUCT) {
throw new SerDeException(getClass().toString()
Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeException.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeException.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeException.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeException.java Tue Feb 9 07:55:30 2010
@@ -19,7 +19,7 @@
package org.apache.hadoop.hive.serde2;
/**
- * Generic exception class for SerDes
+ * Generic exception class for SerDes.
*
*/
Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java Tue Feb 9 07:55:30 2010
@@ -39,7 +39,11 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
-public class SerDeUtils {
+/**
+ * SerDeUtils.
+ *
+ */
+public final class SerDeUtils {
public static final char QUOTE = '"';
public static final char COLON = ':';
@@ -58,8 +62,7 @@
serdes.put(name, serde);
}
- public static Deserializer lookupDeserializer(String name)
- throws SerDeException {
+ public static Deserializer lookupDeserializer(String name) throws SerDeException {
Class<?> c;
if (serdes.containsKey(name)) {
c = serdes.get(name);
@@ -81,10 +84,10 @@
static {
nativeSerDeNames
.add(org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe.class
- .getName());
+ .getName());
nativeSerDeNames
.add(org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.class
- .getName());
+ .getName());
// For backward compatibility
nativeSerDeNames.add("org.apache.hadoop.hive.serde.thrift.columnsetSerDe");
nativeSerDeNames
@@ -104,12 +107,12 @@
// loading these classes will automatically register the short names
Class
.forName(org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.class
- .getName());
+ .getName());
Class.forName(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class
.getName());
Class
.forName(org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer.class
- .getName());
+ .getName());
} catch (ClassNotFoundException e) {
throw new RuntimeException(
"IMPOSSIBLE Exception: Unable to initialize core serdes", e);
@@ -326,7 +329,9 @@
default:
throw new RuntimeException("Unknown type in ObjectInspector!");
}
- ;
+ }
+ private SerDeUtils() {
+ // prevent instantiation
}
}
Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java Tue Feb 9 07:55:30 2010
@@ -41,14 +41,13 @@
* table properties
* @throws SerDeException
*/
- public void initialize(Configuration conf, Properties tbl)
- throws SerDeException;
+ void initialize(Configuration conf, Properties tbl) throws SerDeException;
/**
* Returns the Writable class that would be returned by the serialize method.
* This is used to initialize SequenceFile header.
*/
- public Class<? extends Writable> getSerializedClass();
+ Class<? extends Writable> getSerializedClass();
/**
* Serialize an object by navigating inside the Object with the
@@ -57,7 +56,5 @@
* wants to keep a copy of the Writable, the client needs to clone the
* returned value.
*/
- public Writable serialize(Object obj, ObjectInspector objInspector)
- throws SerDeException;
-
+ Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException;
}
Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/TypedSerDe.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/TypedSerDe.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/TypedSerDe.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/TypedSerDe.java Tue Feb 9 07:55:30 2010
@@ -29,6 +29,10 @@
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.util.ReflectionUtils;
+/**
+ * TypedSerDe.
+ *
+ */
public abstract class TypedSerDe implements SerDe {
protected Type objectType;
@@ -66,8 +70,7 @@
return ObjectInspectorFactory.ObjectInspectorOptions.JAVA;
}
- public void initialize(Configuration job, Properties tbl)
- throws SerDeException {
+ public void initialize(Configuration job, Properties tbl) throws SerDeException {
// do nothing
}
@@ -75,8 +78,7 @@
return BytesWritable.class;
}
- public Writable serialize(Object obj, ObjectInspector objInspector)
- throws SerDeException {
+ public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
throw new RuntimeException("not supported");
}
Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java Tue Feb 9 07:55:30 2010
@@ -386,7 +386,7 @@
for (int eid = 0; eid < size; eid++) {
r
.set(eid, deserialize(buffer, fieldTypes.get(eid), invert, r
- .get(eid)));
+ .get(eid)));
}
return r;
}
@@ -400,8 +400,7 @@
OutputByteBuffer outputByteBuffer = new OutputByteBuffer();
@Override
- public Writable serialize(Object obj, ObjectInspector objInspector)
- throws SerDeException {
+ public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
outputByteBuffer.reset();
StructObjectInspector soi = (StructObjectInspector) objInspector;
List<? extends StructField> fields = soi.getAllStructFieldRefs();
Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/BytesRefArrayWritable.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/BytesRefArrayWritable.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/BytesRefArrayWritable.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/BytesRefArrayWritable.java Tue Feb 9 07:55:30 2010
@@ -246,11 +246,11 @@
WritableFactories.setFactory(BytesRefArrayWritable.class,
new WritableFactory() {
- @Override
- public Writable newInstance() {
- return new BytesRefArrayWritable();
- }
+ @Override
+ public Writable newInstance() {
+ return new BytesRefArrayWritable();
+ }
- });
+ });
}
}
Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java Tue Feb 9 07:55:30 2010
@@ -62,10 +62,10 @@
+ Arrays.asList(serdeParams.getSeparators())
+ ":"
+ ((StructTypeInfo) serdeParams.getRowTypeInfo())
- .getAllStructFieldNames()
+ .getAllStructFieldNames()
+ ":"
+ ((StructTypeInfo) serdeParams.getRowTypeInfo())
- .getAllStructFieldTypeInfos() + "]";
+ .getAllStructFieldTypeInfos() + "]";
}
public static final Log LOG = LogFactory
@@ -81,20 +81,18 @@
*
* @see SerDe#initialize(Configuration, Properties)
*/
- public void initialize(Configuration job, Properties tbl)
- throws SerDeException {
- serdeParams = LazySimpleSerDe.initSerdeParams(job, tbl, getClass()
- .getName());
+ public void initialize(Configuration job, Properties tbl) throws SerDeException {
+
+ serdeParams = LazySimpleSerDe.initSerdeParams(job, tbl, getClass().getName());
// Create the ObjectInspectors for the fields. Note: Currently
// ColumnarObject uses same ObjectInpector as LazyStruct
cachedObjectInspector = LazyFactory.createColumnarStructInspector(
serdeParams.getColumnNames(), serdeParams.getColumnTypes(), serdeParams
- .getSeparators(), serdeParams.getNullSequence(), serdeParams
- .isEscaped(), serdeParams.getEscapeChar());
+ .getSeparators(), serdeParams.getNullSequence(), serdeParams
+ .isEscaped(), serdeParams.getEscapeChar());
- java.util.ArrayList<Integer> notSkipIDs = ColumnProjectionUtils
- .getReadColumnIDs(job);
+ java.util.ArrayList<Integer> notSkipIDs = ColumnProjectionUtils.getReadColumnIDs(job);
cachedLazyStruct = new ColumnarStruct(cachedObjectInspector, notSkipIDs);
@@ -160,8 +158,7 @@
* @return The serialized Writable object
* @see SerDe#serialize(Object, ObjectInspector)
*/
- public Writable serialize(Object obj, ObjectInspector objInspector)
- throws SerDeException {
+ public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
if (objInspector.getCategory() != Category.STRUCT) {
throw new SerDeException(getClass().toString()
@@ -203,14 +200,14 @@
// delimited way.
if (!foi.getCategory().equals(Category.PRIMITIVE)
&& (declaredFields == null || declaredFields.get(i)
- .getFieldObjectInspector().getCategory().equals(
- Category.PRIMITIVE))) {
+ .getFieldObjectInspector().getCategory().equals(
+ Category.PRIMITIVE))) {
LazySimpleSerDe.serialize(serializeStream, SerDeUtils.getJSONString(
f, foi),
PrimitiveObjectInspectorFactory.javaStringObjectInspector,
serdeParams.getSeparators(), 1, serdeParams.getNullSequence(),
serdeParams.isEscaped(), serdeParams.getEscapeChar(), serdeParams
- .getNeedsEscape());
+ .getNeedsEscape());
} else {
LazySimpleSerDe.serialize(serializeStream, f, foi, serdeParams
.getSeparators(), 1, serdeParams.getNullSequence(), serdeParams
Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarStruct.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarStruct.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarStruct.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarStruct.java Tue Feb 9 07:55:30 2010
@@ -177,7 +177,7 @@
if (fieldLen == nullSequence.getLength()
&& LazyUtils.compare(data, rawBytesField[fieldID].getStart(), fieldLen,
- nullSequence.getBytes(), 0, nullSequence.getLength()) == 0) {
+ nullSequence.getBytes(), 0, nullSequence.getLength()) == 0) {
return null;
}
Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/LazyDecompressionCallback.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/LazyDecompressionCallback.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/LazyDecompressionCallback.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/LazyDecompressionCallback.java Tue Feb 9 07:55:30 2010
@@ -27,6 +27,6 @@
*/
public interface LazyDecompressionCallback {
- public byte[] decompress() throws IOException;
+ byte[] decompress() throws IOException;
}
Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java Tue Feb 9 07:55:30 2010
@@ -45,6 +45,10 @@
import org.apache.thrift.protocol.TProtocolFactory;
import org.apache.thrift.transport.TIOStreamTransport;
+/**
+ * DynamicSerDe.
+ *
+ */
public class DynamicSerDe implements SerDe, Serializable {
public static final Log LOG = LogFactory.getLog(DynamicSerDe.class.getName());
@@ -54,22 +58,21 @@
public static final String META_TABLE_NAME = "name";
- transient private thrift_grammar parse_tree;
- transient protected ByteStream.Input bis_;
- transient protected ByteStream.Output bos_;
+ private transient thrift_grammar parse_tree;
+ protected transient ByteStream.Input bis_;
+ protected transient ByteStream.Output bos_;
/**
* protocols are protected in case any of their properties need to be queried
* from another class in this package. For TCTLSeparatedProtocol for example,
* may want to query the separators.
*/
- transient protected TProtocol oprot_;
- transient protected TProtocol iprot_;
+ protected transient TProtocol oprot_;
+ protected transient TProtocol iprot_;
TIOStreamTransport tios;
- public void initialize(Configuration job, Properties tbl)
- throws SerDeException {
+ public void initialize(Configuration job, Properties tbl) throws SerDeException {
try {
String ddl = tbl.getProperty(Constants.SERIALIZATION_DDL);
@@ -162,7 +165,7 @@
} else if (bt.isPrimitive()) {
return PrimitiveObjectInspectorFactory
.getPrimitiveJavaObjectInspector(PrimitiveObjectInspectorUtils
- .getTypeEntryFromPrimitiveJavaClass(bt.getRealType()).primitiveCategory);
+ .getTypeEntryFromPrimitiveJavaClass(bt.getRealType()).primitiveCategory);
} else {
// Must be a struct
DynamicSerDeStructBase btStruct = (DynamicSerDeStructBase) bt;
@@ -191,8 +194,7 @@
BytesWritable ret = new BytesWritable();
- public Writable serialize(Object obj, ObjectInspector objInspector)
- throws SerDeException {
+ public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
try {
bos_.reset();
bt.serialize(obj, objInspector, oprot_);
Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeField.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeField.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeField.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeField.java Tue Feb 9 07:55:30 2010
@@ -18,7 +18,10 @@
package org.apache.hadoop.hive.serde2.dynamic_type;
-
+/**
+ * DynamicSerDeField.
+ *
+ */
public class DynamicSerDeField extends DynamicSerDeSimpleNode {
// production is:
Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldList.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldList.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldList.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldList.java Tue Feb 9 07:55:30 2010
@@ -35,6 +35,10 @@
import org.apache.thrift.protocol.TProtocolUtil;
import org.apache.thrift.protocol.TType;
+/**
+ * DynamicSerDeFieldList.
+ *
+ */
public class DynamicSerDeFieldList extends DynamicSerDeSimpleNode implements
Serializable {
@@ -48,7 +52,7 @@
// mapping of the fieldid to the field
private Map<Integer, DynamicSerDeTypeBase> types_by_id = null;
private Map<String, DynamicSerDeTypeBase> types_by_column_name = null;
- private DynamicSerDeTypeBase ordered_types[] = null;
+ private DynamicSerDeTypeBase[] ordered_types = null;
private Map<String, Integer> ordered_column_id_by_name = null;
@@ -64,9 +68,9 @@
return (DynamicSerDeField) jjtGetChild(i);
}
- final public DynamicSerDeField[] getChildren() {
+ public final DynamicSerDeField[] getChildren() {
int size = jjtGetNumChildren();
- DynamicSerDeField result[] = new DynamicSerDeField[size];
+ DynamicSerDeField[] result = new DynamicSerDeField[size];
for (int i = 0; i < size; i++) {
result[i] = (DynamicSerDeField) jjtGetChild(i);
}
@@ -228,8 +232,7 @@
TField field = new TField();
public void serialize(Object o, ObjectInspector oi, TProtocol oprot)
- throws TException, SerDeException, NoSuchFieldException,
- IllegalAccessException {
+ throws TException, SerDeException, NoSuchFieldException, IllegalAccessException {
// Assuming the ObjectInspector represents exactly the same type as this
// struct.
// This assumption should be checked during query compile time.
Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldRequiredness.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldRequiredness.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldRequiredness.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldRequiredness.java Tue Feb 9 07:55:30 2010
@@ -18,7 +18,15 @@
package org.apache.hadoop.hive.serde2.dynamic_type;
+/**
+ * DynamicSerDeFieldRequiredness.
+ *
+ */
public class DynamicSerDeFieldRequiredness extends SimpleNode {
+ /**
+ * RequirednessTypes.
+ *
+ */
public enum RequirednessTypes {
Required, Skippable, Optional,
};