You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2010/01/21 11:38:15 UTC

svn commit: r901644 [1/37] - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ ql/src/java/org/apache/hadoop/hive/ql/history/ ql/src/java...

Author: zshao
Date: Thu Jan 21 10:37:58 2010
New Revision: 901644

URL: http://svn.apache.org/viewvc?rev=901644&view=rev
Log:
HIVE-1081. Automated source code cleanup - Part 3 - ql. (Carl Steinbach via zshao)

Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/AmbiguousMethodException.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/AutoProgressor.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ByteWritable.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CollectOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ComparisonOpMethodResolver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ConditionalTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDAFEvaluatorResolver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDFMethodResolver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeColumnEvaluator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantEvaluator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFieldEvaluator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeNullEvaluator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExtractOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ForwardOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobTrackerURLResolver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/LateralViewJoinOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/LimitOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/NumericOpMethodResolver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/NumericUDAF.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/NumericUDAFEvaluatorResolver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/RecordReader.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/RecordWriter.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskHandle.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskResult.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskRunner.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TerminalOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TextRecordReader.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TextRecordWriter.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Throttle.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDAF.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDAFEvaluator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDAFEvaluatorResolver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDF.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentException.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentTypeException.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFMethodResolver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UnionOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/description.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/DCLLItem.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HashMapWrapper.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MRU.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinObjectKey.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinObjectValue.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecute.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecute.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CodecPool.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/FlatFileInputFormat.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveIgnoreKeyTextOutputFormat.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveKey.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveOutputFormat.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveRecordReader.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveSequenceFileOutputFormat.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/IgnoreKeyTextOutputFormat.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/InputFormatChecker.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/NonSyncDataInputBuffer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/NonSyncDataOutputBuffer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileInputFormat.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileOutputFormat.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileRecordReader.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/SequenceFileInputFormatChecker.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultRuleDispatcher.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/Dispatcher.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/GraphWalker.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/Node.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/NodeProcessor.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/PreOrderWalker.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/Rule.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/RuleRegExp.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/CheckResult.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Dimension.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveException.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/InvalidTableException.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/RandomDimension.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Sample.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMROperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink2.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink3.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink4.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRUnion1.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/JoinReorder.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Transform.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/PhysicalContext.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/PhysicalOptimizer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/PhysicalPlanResolver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinProcFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinResolver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprProcCtx.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprProcFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprPrunerInfo.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpProcFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpWalkerCtx.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcContext.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcessor.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNodeOrigin.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/GenMapRedWalker.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/OpParseContext.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseError.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseException.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrunedPartitionList.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBExpr.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticException.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TableSample.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/joinCond.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/joinType.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverSkewJoin.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalWork.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FunctionWork.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MsckDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/aggregationDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/alterTableDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/collectDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/copyWork.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/createFunctionDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/createTableDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/createTableLikeDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/createViewDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ddlDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/descFunctionDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/descTableDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/dropFunctionDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/dropTableDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/explain.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/explainWork.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/explosionDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeConstantDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeGenericFuncDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeNullDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/extractDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/fetchWork.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/fileSinkDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/forwardDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinCond.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/lateralViewJoinDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/limitDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadFileDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadTableDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapJoinDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredLocalWork.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/moveWork.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/partitionDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/schemaDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/scriptDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/selectDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/showFunctionsDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/showPartitionsDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/showTableStatusDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/showTablesDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/tableScanDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/udtfDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/unionDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFWrongArgLengthForTestCase.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAbs.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAcos.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAscii.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAsin.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseBitOP.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericOp.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericUnaryOp.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBin.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConcat.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConv.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCos.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDate.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateAdd.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateDiff.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateSub.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFExp.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFindInSet.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHex.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLTrim.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLength.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLn.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog10.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog2.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLower.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLpad.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPAnd.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitAnd.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitNot.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitOr.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitXor.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPEqual.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPEqualOrGreaterThan.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPEqualOrLessThan.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPGreaterThan.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPLessThan.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPLongDivide.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMultiply.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNegative.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNot.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNotEqual.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPositive.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFParseUrl.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPosMod.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPower.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRTrim.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRand.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExp.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExpExtract.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExpReplace.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRepeat.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFReverse.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRound.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRpad.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSin.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSpace.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSqrt.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSubstr.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDate.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTrim.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFType.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUnhex.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUnixTimeStamp.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUpper.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/Collector.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBridge.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFResolver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVarianceSample.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArray.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCase.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCoalesce.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcatWS.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFElt.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFField.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFHash.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIndex.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInstr.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLocate.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMap.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotNull.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNull.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSize.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSplit.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFWhen.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTF.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFExplode.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDTFCollector.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/RecordManager.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/RecordManagerFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/RecordManagerOptions.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/RecordManagerProvider.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/ByteArrayComparator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/ByteArraySerializer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/CacheEvictionException.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/CachePolicy.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/CachePolicyListener.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/Conversion.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/DefaultSerializer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/FastIterator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/IntegerComparator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/IntegerSerializer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/IterationException.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/LongComparator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/LongSerializer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/MRU.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/ObjectBAComparator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/Serialization.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/Serializer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/SoftCache.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/StringComparator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/Tuple.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/TupleBrowser.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/WrappedRuntimeException.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/htree/HTree.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/htree/HashBucket.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/htree/HashDirectory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/htree/HashNode.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/BaseRecordManager.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/BlockIo.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/BlockView.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/CacheRecordManager.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/DataPage.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/FileHeader.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/FreeLogicalRowIdPage.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/FreeLogicalRowIdPageManager.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/FreePhysicalRowId.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/FreePhysicalRowIdPage.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/FreePhysicalRowIdPageManager.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/Location.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/LogicalRowIdManager.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/Magic.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/PageCursor.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/PageHeader.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/PageManager.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/PhysicalRowId.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/PhysicalRowIdManager.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/Provider.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/RecordCache.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/RecordFile.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/RecordHeader.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/TransactionManager.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/TranslationPage.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/TestMTQueries.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestHashMapWrapper.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/JavaTestObjFlatFileInputFormat.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/RecordTestObj.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestPartition.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/tool/TestLineageInfo.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/scripts/extracturl.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/serde2/TestSerDe.java

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Thu Jan 21 10:37:58 2010
@@ -25,6 +25,9 @@
     HIVE-1081. Automated source code cleanup - Part 2 - serde.
     (Carl Steinbach via zshao)
 
+    HIVE-1081. Automated source code cleanup - Part 3 - ql.
+    (Carl Steinbach via zshao)
+
   OPTIMIZATIONS
 
   BUG FIXES

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java Thu Jan 21 10:37:58 2010
@@ -18,32 +18,27 @@
 
 package org.apache.hadoop.hive.ql;
 
-import java.io.File;
 import java.io.DataInput;
-import java.io.IOException;
+import java.io.File;
 import java.io.FileNotFoundException;
+import java.io.IOException;
 import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.Random;
 import java.util.ArrayList;
+import java.util.Random;
 
 import org.antlr.runtime.TokenRewriteStream;
-
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.util.StringUtils;
-
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.FileUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.util.StringUtils;
 
 /**
- * Context for Semantic Analyzers.
- * Usage:
- * not reusable - construct a new one for each query
- * should call clear() at end of use to remove temporary folders
+ * Context for Semantic Analyzers. Usage: not reusable - construct a new one for
+ * each query should call clear() at end of use to remove temporary folders
  */
 public class Context {
   private Path resFile;
@@ -51,22 +46,22 @@
   private FileSystem resFs;
   static final private Log LOG = LogFactory.getLog("hive.ql.Context");
   private Path[] resDirPaths;
-  private int    resDirFilesNum;
+  private int resDirFilesNum;
   boolean initialized;
   private String scratchPath;
   private Path MRScratchDir;
   private Path localScratchDir;
-  private ArrayList<Path> allScratchDirs = new ArrayList<Path> ();
+  private final ArrayList<Path> allScratchDirs = new ArrayList<Path>();
   private HiveConf conf;
-  Random rand = new Random ();
+  Random rand = new Random();
   protected int randomid = Math.abs(rand.nextInt());
   protected int pathid = 10000;
   protected boolean explain = false;
   private TokenRewriteStream tokenRewriteStream;
 
-  public Context() {  
+  public Context() {
   }
-  
+
   public Context(HiveConf conf) {
     this.conf = conf;
     Path tmpPath = new Path(conf.getVar(HiveConf.ConfVars.SCRATCHDIR));
@@ -75,7 +70,9 @@
 
   /**
    * Set the context on whether the current query is an explain query
-   * @param value true if the query is an explain query, false if not
+   * 
+   * @param value
+   *          true if the query is an explain query, false if not
    */
   public void setExplain(boolean value) {
     explain = value;
@@ -83,6 +80,7 @@
 
   /**
    * Find out whether the current query is an explain query
+   * 
    * @return true if the query is an explain query, false if not
    */
   public boolean getExplain() {
@@ -95,7 +93,7 @@
   private void makeLocalScratchDir() throws IOException {
     while (true) {
       localScratchDir = new Path(System.getProperty("java.io.tmpdir")
-                                 + File.separator + Math.abs(rand.nextInt()));
+          + File.separator + Math.abs(rand.nextInt()));
       FileSystem fs = FileSystem.getLocal(conf);
       if (fs.mkdirs(localScratchDir)) {
         localScratchDir = fs.makeQualified(localScratchDir);
@@ -106,15 +104,15 @@
   }
 
   /**
-   * Make a tmp directory for MR intermediate data
-   * If URI/Scheme are not supplied - those implied by the default filesystem
-   * will be used (which will typically correspond to hdfs instance on hadoop cluster)
+   * Make a tmp directory for MR intermediate data If URI/Scheme are not
+   * supplied - those implied by the default filesystem will be used (which will
+   * typically correspond to hdfs instance on hadoop cluster)
    */
   private void makeMRScratchDir() throws IOException {
-    while(true) {
-      MRScratchDir = FileUtils.makeQualified
-        (new Path(conf.getVar(HiveConf.ConfVars.SCRATCHDIR),
-                  Integer.toString(Math.abs(rand.nextInt()))), conf);
+    while (true) {
+      MRScratchDir = FileUtils.makeQualified(new Path(conf
+          .getVar(HiveConf.ConfVars.SCRATCHDIR), Integer.toString(Math.abs(rand
+          .nextInt()))), conf);
 
       if (explain) {
         allScratchDirs.add(MRScratchDir);
@@ -128,20 +126,20 @@
       }
     }
   }
-  
+
   /**
-   * Make a tmp directory on specified URI
-   * Currently will use the same path as implied by SCRATCHDIR config variable
+   * Make a tmp directory on specified URI Currently will use the same path as
+   * implied by SCRATCHDIR config variable
    */
   private Path makeExternalScratchDir(URI extURI) throws IOException {
-    while(true) {
-      String extPath = scratchPath + File.separator + 
-        Integer.toString(Math.abs(rand.nextInt()));
+    while (true) {
+      String extPath = scratchPath + File.separator
+          + Integer.toString(Math.abs(rand.nextInt()));
       Path extScratchDir = new Path(extURI.getScheme(), extURI.getAuthority(),
-                                    extPath);
+          extPath);
 
       if (explain) {
-        allScratchDirs.add(extScratchDir);        
+        allScratchDirs.add(extScratchDir);
         return extScratchDir;
       }
 
@@ -154,26 +152,25 @@
   }
 
   /**
-   * Get a tmp directory on specified URI
-   * Will check if this has already been made
-   * (either via MR or Local FileSystem or some other external URI
+   * Get a tmp directory on specified URI Will check if this has already been
+   * made (either via MR or Local FileSystem or some other external URI
    */
   private String getExternalScratchDir(URI extURI) {
     try {
       // first check if we already made a scratch dir on this URI
-      for (Path p: allScratchDirs) {
+      for (Path p : allScratchDirs) {
         URI pURI = p.toUri();
-        if (strEquals(pURI.getScheme(), extURI.getScheme()) &&
-            strEquals(pURI.getAuthority(), extURI.getAuthority())) {
+        if (strEquals(pURI.getScheme(), extURI.getScheme())
+            && strEquals(pURI.getAuthority(), extURI.getAuthority())) {
           return p.toString();
         }
       }
       return makeExternalScratchDir(extURI).toString();
     } catch (IOException e) {
-      throw new RuntimeException (e);
+      throw new RuntimeException(e);
     }
   }
-  
+
   /**
    * Create a map-reduce scratch directory on demand and return it
    */
@@ -182,10 +179,10 @@
       try {
         makeMRScratchDir();
       } catch (IOException e) {
-        throw new RuntimeException (e);
+        throw new RuntimeException(e);
       } catch (IllegalArgumentException e) {
-        throw new RuntimeException("Error while making MR scratch " + 
-            "directory - check filesystem config (" + e.getCause() + ")", e);
+        throw new RuntimeException("Error while making MR scratch "
+            + "directory - check filesystem config (" + e.getCause() + ")", e);
       }
     }
     return MRScratchDir.toString();
@@ -201,8 +198,8 @@
       } catch (IOException e) {
         throw new RuntimeException(e);
       } catch (IllegalArgumentException e) {
-        throw new RuntimeException("Error while making local scratch " + 
-            "directory - check filesystem config (" + e.getCause() + ")", e);
+        throw new RuntimeException("Error while making local scratch "
+            + "directory - check filesystem config (" + e.getCause() + ")", e);
       }
     }
     return localScratchDir.toString();
@@ -214,17 +211,21 @@
   private void removeScratchDir() {
     if (explain) {
       try {
-        if (localScratchDir != null)
+        if (localScratchDir != null) {
           FileSystem.getLocal(conf).delete(localScratchDir, true);
+        }
       } catch (Exception e) {
-        LOG.warn("Error Removing Scratch: " + StringUtils.stringifyException(e));
+        LOG
+            .warn("Error Removing Scratch: "
+                + StringUtils.stringifyException(e));
       }
     } else {
-      for (Path p: allScratchDirs) {
+      for (Path p : allScratchDirs) {
         try {
           p.getFileSystem(conf).delete(p, true);
         } catch (Exception e) {
-          LOG.warn("Error Removing Scratch: " + StringUtils.stringifyException(e));
+          LOG.warn("Error Removing Scratch: "
+              + StringUtils.stringifyException(e));
         }
       }
     }
@@ -238,12 +239,13 @@
   private String nextPath(String base) {
     return base + File.separator + Integer.toString(pathid++);
   }
-  
+
   /**
-   * check if path is tmp path. the assumption is that all uri's relative
-   * to scratchdir are temporary
-   * @return true if a uri is a temporary uri for map-reduce intermediate
-   *         data, false otherwise
+   * check if path is tmp path. the assumption is that all uri's relative to
+   * scratchdir are temporary
+   * 
+   * @return true if a uri is a temporary uri for map-reduce intermediate data,
+   *         false otherwise
    */
   public boolean isMRTmpFileURI(String uriStr) {
     return (uriStr.indexOf(scratchPath) != -1);
@@ -251,28 +253,28 @@
 
   /**
    * Get a path to store map-reduce intermediate data in
+   * 
    * @return next available path for map-red intermediate data
    */
   public String getMRTmpFileURI() {
     return nextPath(getMRScratchDir());
   }
 
-
   /**
    * Get a tmp path on local host to store intermediate data
+   * 
    * @return next available tmp path on local fs
    */
   public String getLocalTmpFileURI() {
     return nextPath(getLocalScratchDir());
   }
-  
 
   /**
    * Get a path to store tmp data destined for external URI
-   * @param extURI external URI to which the tmp data has to be 
-   *               eventually moved
-   * @return next available tmp path on the file system corresponding
-   *              extURI
+   * 
+   * @param extURI
+   *          external URI to which the tmp data has to be eventually moved
+   * @return next available tmp path on the file system corresponding extURI
    */
   public String getExternalTmpFileURI(URI extURI) {
     return nextPath(getExternalScratchDir(extURI));
@@ -286,7 +288,8 @@
   }
 
   /**
-   * @param resFile the resFile to set
+   * @param resFile
+   *          the resFile to set
    */
   public void setResFile(Path resFile) {
     this.resFile = resFile;
@@ -303,7 +306,8 @@
   }
 
   /**
-   * @param resDir the resDir to set
+   * @param resDir
+   *          the resDir to set
    */
   public void setResDir(Path resDir) {
     this.resDir = resDir;
@@ -311,13 +315,11 @@
 
     resDirFilesNum = 0;
     resDirPaths = null;
-  }  
-  
+  }
+
   public void clear() throws IOException {
-    if (resDir != null)
-    {
-      try
-      {
+    if (resDir != null) {
+      try {
         FileSystem fs = resDir.getFileSystem(conf);
         fs.delete(resDir, true);
       } catch (IOException e) {
@@ -325,12 +327,10 @@
       }
     }
 
-    if (resFile != null)
-    {
-      try
-      {
+    if (resFile != null) {
+      try {
         FileSystem fs = resFile.getFileSystem(conf);
-      	fs.delete(resFile, false);
+        fs.delete(resFile, false);
       } catch (IOException e) {
         LOG.info("Context clear error: " + StringUtils.stringifyException(e));
       }
@@ -339,30 +339,34 @@
   }
 
   public DataInput getStream() {
-    try
-    {
+    try {
       if (!initialized) {
         initialized = true;
-        if ((resFile == null) && (resDir == null)) return null;
-      
+        if ((resFile == null) && (resDir == null)) {
+          return null;
+        }
+
         if (resFile != null) {
-          return (DataInput)resFile.getFileSystem(conf).open(resFile);
+          return resFile.getFileSystem(conf).open(resFile);
         }
-        
+
         resFs = resDir.getFileSystem(conf);
         FileStatus status = resFs.getFileStatus(resDir);
         assert status.isDir();
         FileStatus[] resDirFS = resFs.globStatus(new Path(resDir + "/*"));
         resDirPaths = new Path[resDirFS.length];
         int pos = 0;
-        for (FileStatus resFS: resDirFS)
-          if (!resFS.isDir())
+        for (FileStatus resFS : resDirFS) {
+          if (!resFS.isDir()) {
             resDirPaths[pos++] = resFS.getPath();
-        if (pos == 0) return null;
-        
-        return (DataInput)resFs.open(resDirPaths[resDirFilesNum++]);
-      }
-      else {
+          }
+        }
+        if (pos == 0) {
+          return null;
+        }
+
+        return resFs.open(resDirPaths[resDirFilesNum++]);
+      } else {
         return getNextStream();
       }
     } catch (FileNotFoundException e) {
@@ -375,11 +379,11 @@
   }
 
   private DataInput getNextStream() {
-    try
-    {
-      if (resDir != null && resDirFilesNum < resDirPaths.length && 
-          (resDirPaths[resDirFilesNum] != null))
-        return (DataInput)resFs.open(resDirPaths[resDirFilesNum++]);
+    try {
+      if (resDir != null && resDirFilesNum < resDirPaths.length
+          && (resDirPaths[resDirFilesNum] != null)) {
+        return resFs.open(resDirPaths[resDirFilesNum++]);
+      }
     } catch (FileNotFoundException e) {
       LOG.info("getNextStream error: " + StringUtils.stringifyException(e));
       return null;
@@ -387,7 +391,7 @@
       LOG.info("getNextStream error: " + StringUtils.stringifyException(e));
       return null;
     }
-    
+
     return null;
   }
 
@@ -400,25 +404,25 @@
 
   /**
    * Set the token rewrite stream being used to parse the current top-level SQL
-   * statement.  Note that this should <b>not</b> be used for other parsing
-   * activities; for example, when we encounter a reference to a view, we
-   * switch to a new stream for parsing the stored view definition from the
-   * catalog, but we don't clobber the top-level stream in the context.
-   *
-   * @param tokenRewriteStream the stream being used
+   * statement. Note that this should <b>not</b> be used for other parsing
+   * activities; for example, when we encounter a reference to a view, we switch
+   * to a new stream for parsing the stored view definition from the catalog,
+   * but we don't clobber the top-level stream in the context.
+   * 
+   * @param tokenRewriteStream
+   *          the stream being used
    */
   public void setTokenRewriteStream(TokenRewriteStream tokenRewriteStream) {
-    assert(this.tokenRewriteStream == null);
+    assert (this.tokenRewriteStream == null);
     this.tokenRewriteStream = tokenRewriteStream;
   }
 
   /**
-   * @return the token rewrite stream being used to parse the current
-   * top-level SQL statement, or null if it isn't available
-   * (e.g. for parser tests)
+   * @return the token rewrite stream being used to parse the current top-level
+   *         SQL statement, or null if it isn't available (e.g. for parser
+   *         tests)
    */
   public TokenRewriteStream getTokenRewriteStream() {
     return tokenRewriteStream;
   }
 }
-

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Thu Jan 21 10:37:58 2010
@@ -21,50 +21,54 @@
 import java.io.DataInput;
 import java.io.IOException;
 import java.io.Serializable;
-import java.util.*;
-
-import org.apache.hadoop.hive.ql.parse.ASTNode;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Queue;
+import java.util.Set;
+import java.util.Vector;
 
 import org.apache.commons.lang.StringUtils;
-
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.common.JavaUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Schema;
-import org.apache.hadoop.hive.ql.parse.ParseDriver;
-import org.apache.hadoop.hive.ql.parse.ParseUtils;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
-import org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory;
-import org.apache.hadoop.hive.ql.parse.ErrorMsg;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.hive.ql.exec.ConditionalTask;
 import org.apache.hadoop.hive.ql.exec.ExecDriver;
+import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.Task;
-import org.apache.hadoop.hive.ql.exec.TaskRunner;
-import org.apache.hadoop.hive.ql.exec.TaskResult;
-import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.exec.TaskResult;
+import org.apache.hadoop.hive.ql.exec.TaskRunner;
 import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.hooks.PreExecute;
-import org.apache.hadoop.hive.ql.hooks.PostExecute;
-
 import org.apache.hadoop.hive.ql.history.HiveHistory.Keys;
-import org.apache.hadoop.hive.ql.processors.CommandProcessor;
+import org.apache.hadoop.hive.ql.hooks.PostExecute;
+import org.apache.hadoop.hive.ql.hooks.PreExecute;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.ErrorMsg;
+import org.apache.hadoop.hive.ql.parse.ParseDriver;
+import org.apache.hadoop.hive.ql.parse.ParseException;
+import org.apache.hadoop.hive.ql.parse.ParseUtils;
+import org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.tableDesc;
+import org.apache.hadoop.hive.ql.processors.CommandProcessor;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.hive.serde2.ByteStream;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.mapred.ClusterStatus;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.security.UnixUserGroupInformation;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
 public class Driver implements CommandProcessor {
 
   static final private Log LOG = LogFactory.getLog(Driver.class.getName());
@@ -81,31 +85,35 @@
   private String SQLState;
 
   // A limit on the number of threads that can be launched
-  private int maxthreads = 8;
-  private int sleeptime = 2000;
+  private final int maxthreads = 8;
+  private final int sleeptime = 2000;
 
   public void init() {
     Operator.resetId();
   }
-  
+
   public int countJobs(List<Task<? extends Serializable>> tasks) {
     return countJobs(tasks, new ArrayList<Task<? extends Serializable>>());
   }
 
-  public int countJobs(List<Task<? extends Serializable>> tasks, List<Task<? extends Serializable>> seenTasks) {
-    if (tasks == null)
+  public int countJobs(List<Task<? extends Serializable>> tasks,
+      List<Task<? extends Serializable>> seenTasks) {
+    if (tasks == null) {
       return 0;
+    }
     int jobs = 0;
     for (Task<? extends Serializable> task : tasks) {
       if (!seenTasks.contains(task)) {
         seenTasks.add(task);
-        
-        if(task instanceof ConditionalTask)
-          jobs +=countJobs(((ConditionalTask)task).getListTasks(), seenTasks);
-        else if (task.isMapRedTask()) { //this may be true for conditional task, but we will not inc the counter 
+
+        if (task instanceof ConditionalTask) {
+          jobs += countJobs(((ConditionalTask) task).getListTasks(), seenTasks);
+        } else if (task.isMapRedTask()) { // this may be true for conditional
+                                          // task, but we will not inc the
+                                          // counter
           jobs++;
         }
-        
+
         jobs += countJobs(task.getChildTasks(), seenTasks);
       }
     }
@@ -121,8 +129,7 @@
       JobConf job = new JobConf(conf, ExecDriver.class);
       JobClient jc = new JobClient(job);
       cs = jc.getClusterStatus();
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       e.printStackTrace();
       throw e;
     }
@@ -147,28 +154,30 @@
 
         tableDesc td = ft.getTblDesc();
         // partitioned tables don't have tableDesc set on the FetchTask. Instead
-        // they have a list of PartitionDesc objects, each with a table desc. Let's
-        // try to fetch the desc for the first partition and use it's deserializer.
-        if (td == null && ft.getWork() != null && ft.getWork().getPartDesc() != null) {
+        // they have a list of PartitionDesc objects, each with a table desc.
+        // Let's
+        // try to fetch the desc for the first partition and use it's
+        // deserializer.
+        if (td == null && ft.getWork() != null
+            && ft.getWork().getPartDesc() != null) {
           if (ft.getWork().getPartDesc().size() > 0) {
             td = ft.getWork().getPartDesc().get(0).getTableDesc();
           }
         }
 
         if (td == null) {
-          throw new Exception("No table description found for fetch task: " + ft);
+          throw new Exception("No table description found for fetch task: "
+              + ft);
         }
 
         String tableName = "result";
         List<FieldSchema> lst = MetaStoreUtils.getFieldsFromDeserializer(
             tableName, td.getDeserializer());
         schema = new Schema(lst, null);
-      }
-      else {
+      } else {
         schema = new Schema();
       }
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       e.printStackTrace();
       throw e;
     }
@@ -182,18 +191,17 @@
   public Schema getThriftSchema() throws Exception {
     Schema schema;
     try {
-      schema = this.getSchema();
+      schema = getSchema();
       if (schema != null) {
-	    List<FieldSchema> lst = schema.getFieldSchemas();
-	    // Go over the schema and convert type to thrift type
-	    if (lst != null) {
-	      for (FieldSchema f : lst) {
-	        f.setType(MetaStoreUtils.typeToThriftType(f.getType()));
+        List<FieldSchema> lst = schema.getFieldSchemas();
+        // Go over the schema and convert type to thrift type
+        if (lst != null) {
+          for (FieldSchema f : lst) {
+            f.setType(MetaStoreUtils.typeToThriftType(f.getType()));
           }
-	    }
+        }
       }
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       e.printStackTrace();
       throw e;
     }
@@ -216,8 +224,9 @@
   }
 
   public boolean hasReduceTasks(List<Task<? extends Serializable>> tasks) {
-    if (tasks == null)
+    if (tasks == null) {
       return false;
+    }
 
     boolean hasReduce = false;
     for (Task<? extends Serializable> task : tasks) {
@@ -254,9 +263,11 @@
   }
 
   /**
-   * Compile a new query. Any currently-planned query associated with this Driver is discarded.
-   *
-   * @param command The SQL query to compile.
+   * Compile a new query. Any currently-planned query associated with this
+   * Driver is discarded.
+   * 
+   * @param command
+   *          The SQL query to compile.
    */
   public int compile(String command) {
     if (plan != null) {
@@ -267,7 +278,7 @@
     TaskFactory.resetId();
 
     try {
-      ctx = new Context (conf);
+      ctx = new Context(conf);
 
       ParseDriver pd = new ParseDriver();
       ASTNode tree = pd.parse(command, ctx);
@@ -322,25 +333,27 @@
     SQLState = null;
 
     int ret = compile(command);
-    if (ret != 0)
+    if (ret != 0) {
       return new DriverResponse(ret, errorMessage, SQLState);
+    }
 
     ret = execute();
-    if (ret != 0)
+    if (ret != 0) {
       return new DriverResponse(ret, errorMessage, SQLState);
+    }
 
     return new DriverResponse(ret);
   }
 
   /**
    * Encapsulates the basic response info returned by the Driver. Typically
-   * <code>errorMessage</code> and <code>SQLState</code> will only be set if
-   * the <code>responseCode</code> is not 0.
+   * <code>errorMessage</code> and <code>SQLState</code> will only be set if the
+   * <code>responseCode</code> is not 0.
    */
   public class DriverResponse {
-    private int responseCode;
-    private String errorMessage;
-    private String SQLState;
+    private final int responseCode;
+    private final String errorMessage;
+    private final String SQLState;
 
     public DriverResponse(int responseCode) {
       this(responseCode, null, null);
@@ -352,23 +365,33 @@
       this.SQLState = SQLState;
     }
 
-    public int getResponseCode() { return responseCode; }
-    public String getErrorMessage() { return errorMessage; }
-    public String getSQLState() { return SQLState; }
+    public int getResponseCode() {
+      return responseCode;
+    }
+
+    public String getErrorMessage() {
+      return errorMessage;
+    }
+
+    public String getSQLState() {
+      return SQLState;
+    }
   }
 
   private List<PreExecute> getPreExecHooks() throws Exception {
     ArrayList<PreExecute> pehooks = new ArrayList<PreExecute>();
     String pestr = conf.getVar(HiveConf.ConfVars.PREEXECHOOKS);
     pestr = pestr.trim();
-    if (pestr.equals(""))
+    if (pestr.equals("")) {
       return pehooks;
+    }
 
     String[] peClasses = pestr.split(",");
 
-    for(String peClass: peClasses) {
+    for (String peClass : peClasses) {
       try {
-        pehooks.add((PreExecute)Class.forName(peClass.trim(), true, JavaUtils.getClassLoader()).newInstance());
+        pehooks.add((PreExecute) Class.forName(peClass.trim(), true,
+            JavaUtils.getClassLoader()).newInstance());
       } catch (ClassNotFoundException e) {
         console.printError("Pre Exec Hook Class not found:" + e.getMessage());
         throw e;
@@ -382,14 +405,16 @@
     ArrayList<PostExecute> pehooks = new ArrayList<PostExecute>();
     String pestr = conf.getVar(HiveConf.ConfVars.POSTEXECHOOKS);
     pestr = pestr.trim();
-    if (pestr.equals(""))
+    if (pestr.equals("")) {
       return pehooks;
+    }
 
     String[] peClasses = pestr.split(",");
 
-    for(String peClass: peClasses) {
+    for (String peClass : peClasses) {
       try {
-        pehooks.add((PostExecute)Class.forName(peClass.trim(), true, JavaUtils.getClassLoader()).newInstance());
+        pehooks.add((PostExecute) Class.forName(peClass.trim(), true,
+            JavaUtils.getClassLoader()).newInstance());
       } catch (ClassNotFoundException e) {
         console.printError("Post Exec Hook Class not found:" + e.getMessage());
         throw e;
@@ -404,7 +429,7 @@
         .getVar(HiveConf.ConfVars.HADOOPJOBNAME));
     int maxlen = conf.getIntVar(HiveConf.ConfVars.HIVEJOBNAMELENGTH);
 
-    int curJobNo=0;
+    int curJobNo = 0;
 
     String queryId = plan.getQueryId();
     String queryStr = plan.getQueryStr();
@@ -418,7 +443,8 @@
       plan.setStarted();
 
       if (SessionState.get() != null) {
-        SessionState.get().getHiveHistory().startQuery(queryStr, conf.getVar(HiveConf.ConfVars.HIVEQUERYID) );
+        SessionState.get().getHiveHistory().startQuery(queryStr,
+            conf.getVar(HiveConf.ConfVars.HIVEQUERYID));
         SessionState.get().getHiveHistory().logPlanProgress(plan);
       }
       resStream = null;
@@ -426,34 +452,36 @@
       BaseSemanticAnalyzer sem = plan.getPlan();
 
       // Get all the pre execution hooks and execute them.
-      for(PreExecute peh: getPreExecHooks()) {
-        peh.run(SessionState.get(),
-                sem.getInputs(), sem.getOutputs(),
-                UnixUserGroupInformation.readFromConf(conf, UnixUserGroupInformation.UGI_PROPERTY_NAME));
+      for (PreExecute peh : getPreExecHooks()) {
+        peh.run(SessionState.get(), sem.getInputs(), sem.getOutputs(),
+            UnixUserGroupInformation.readFromConf(conf,
+                UnixUserGroupInformation.UGI_PROPERTY_NAME));
       }
 
       int jobs = countJobs(sem.getRootTasks());
       if (jobs > 0) {
         console.printInfo("Total MapReduce jobs = " + jobs);
       }
-      if (SessionState.get() != null){
+      if (SessionState.get() != null) {
         SessionState.get().getHiveHistory().setQueryProperty(queryId,
             Keys.QUERY_NUM_TASKS, String.valueOf(jobs));
-        SessionState.get().getHiveHistory().setIdToTableMap(sem.getIdToTableNameMap());
+        SessionState.get().getHiveHistory().setIdToTableMap(
+            sem.getIdToTableNameMap());
       }
       String jobname = Utilities.abbreviate(queryStr, maxlen - 6);
 
-      // A runtime that launches runnable tasks as separate Threads through TaskRunners
+      // A runtime that launches runnable tasks as separate Threads through
+      // TaskRunners
       // As soon as a task isRunnable, it is put in a queue
       // At any time, at most maxthreads tasks can be running
       // The main thread polls the TaskRunners to check if they have finished.
 
       Queue<Task<? extends Serializable>> runnable = new LinkedList<Task<? extends Serializable>>();
-      Map<TaskResult, TaskRunner> running = new HashMap<TaskResult, TaskRunner> ();
+      Map<TaskResult, TaskRunner> running = new HashMap<TaskResult, TaskRunner>();
 
-      DriverContext driverCxt = new DriverContext(runnable); 
+      DriverContext driverCxt = new DriverContext(runnable);
 
-      //Add root Tasks to runnable
+      // Add root Tasks to runnable
 
       for (Task<? extends Serializable> tsk : sem.getRootTasks()) {
         driverCxt.addToRunnable(tsk);
@@ -461,11 +489,12 @@
 
       // Loop while you either have tasks running, or tasks queued up
 
-      while (running.size() != 0 || runnable.peek()!=null) {
+      while (running.size() != 0 || runnable.peek() != null) {
         // Launch upto maxthreads tasks
-        while(runnable.peek() != null && running.size() < maxthreads) {
+        while (runnable.peek() != null && running.size() < maxthreads) {
           Task<? extends Serializable> tsk = runnable.remove();
-          curJobNo = launchTask(tsk, queryId, noName,running, jobname, jobs, curJobNo, driverCxt);
+          curJobNo = launchTask(tsk, queryId, noName, running, jobname, jobs,
+              curJobNo, driverCxt);
         }
 
         // poll the Tasks to see which one completed
@@ -474,13 +503,13 @@
         Task<? extends Serializable> tsk = tskRun.getTask();
 
         int exitVal = tskRes.getExitVal();
-        if(exitVal != 0) {
-          //TODO: This error messaging is not very informative. Fix that.
+        if (exitVal != 0) {
+          // TODO: This error messaging is not very informative. Fix that.
           errorMessage = "FAILED: Execution Error, return code " + exitVal
-                         + " from " + tsk.getClass().getName();
+              + " from " + tsk.getClass().getName();
           SQLState = "08S01";
           console.printError(errorMessage);
-          if(running.size() !=0) {
+          if (running.size() != 0) {
             taskCleanup();
           }
           return 9;
@@ -494,7 +523,7 @@
 
         if (tsk.getChildTasks() != null) {
           for (Task<? extends Serializable> child : tsk.getChildTasks()) {
-            if(DriverContext.isLaunchable(child)) {
+            if (DriverContext.isLaunchable(child)) {
               driverCxt.addToRunnable(child);
             }
           }
@@ -502,22 +531,23 @@
       }
 
       // Get all the post execution hooks and execute them.
-      for(PostExecute peh: getPostExecHooks()) {
-        peh.run(SessionState.get(),
-                sem.getInputs(), sem.getOutputs(),
-                UnixUserGroupInformation.readFromConf(conf, UnixUserGroupInformation.UGI_PROPERTY_NAME));
+      for (PostExecute peh : getPostExecHooks()) {
+        peh.run(SessionState.get(), sem.getInputs(), sem.getOutputs(),
+            UnixUserGroupInformation.readFromConf(conf,
+                UnixUserGroupInformation.UGI_PROPERTY_NAME));
       }
 
-      if (SessionState.get() != null){
+      if (SessionState.get() != null) {
         SessionState.get().getHiveHistory().setQueryProperty(queryId,
             Keys.QUERY_RET_CODE, String.valueOf(0));
         SessionState.get().getHiveHistory().printRowCount(queryId);
       }
     } catch (Exception e) {
-      if (SessionState.get() != null)
+      if (SessionState.get() != null) {
         SessionState.get().getHiveHistory().setQueryProperty(queryId,
             Keys.QUERY_RET_CODE, String.valueOf(12));
-      //TODO: do better with handling types of Exception here
+      }
+      // TODO: do better with handling types of Exception here
       errorMessage = "FAILED: Unknown exception : " + e.getMessage();
       SQLState = "08S01";
       console.printError(errorMessage, "\n"
@@ -546,59 +576,62 @@
   /**
    * Launches a new task
    * 
-   * @param tsk      task being launched
-   * @param queryId  Id of the query containing the task
-   * @param noName   whether the task has a name set
-   * @param running map from taskresults to taskrunners
-   * @param jobname  name of the task, if it is a map-reduce job
-   * @param jobs     number of map-reduce jobs
-   * @param curJobNo the sequential number of the next map-reduce job
-   * @return         the updated number of last the map-reduce job launched
-   */
-
+   * @param tsk
+   *          task being launched
+   * @param queryId
+   *          Id of the query containing the task
+   * @param noName
+   *          whether the task has a name set
+   * @param running
+   *          map from taskresults to taskrunners
+   * @param jobname
+   *          name of the task, if it is a map-reduce job
+   * @param jobs
+   *          number of map-reduce jobs
+   * @param curJobNo
+   *          the sequential number of the next map-reduce job
+   * @return the updated number of last the map-reduce job launched
+   */
+
+  public int launchTask(Task<? extends Serializable> tsk, String queryId,
+      boolean noName, Map<TaskResult, TaskRunner> running, String jobname,
+      int jobs, int curJobNo, DriverContext cxt) {
 
-
-  public int launchTask(Task<? extends Serializable> tsk, String queryId, 
-    boolean noName, Map<TaskResult,TaskRunner> running, String jobname, 
-    int jobs, int curJobNo, DriverContext cxt) {
-    
     if (SessionState.get() != null) {
       SessionState.get().getHiveHistory().startTask(queryId, tsk,
-        tsk.getClass().getName());
+          tsk.getClass().getName());
     }
     if (tsk.isMapRedTask() && !(tsk instanceof ConditionalTask)) {
       if (noName) {
-        conf.setVar(HiveConf.ConfVars.HADOOPJOBNAME, jobname + "(" 
-          + tsk.getId() + ")");
+        conf.setVar(HiveConf.ConfVars.HADOOPJOBNAME, jobname + "("
+            + tsk.getId() + ")");
       }
       curJobNo++;
-      console.printInfo("Launching Job " + curJobNo + " out of "+jobs);
+      console.printInfo("Launching Job " + curJobNo + " out of " + jobs);
     }
     tsk.initialize(conf, plan, cxt);
     TaskResult tskRes = new TaskResult();
-    TaskRunner tskRun = new TaskRunner(tsk,tskRes);
+    TaskRunner tskRun = new TaskRunner(tsk, tskRes);
 
-    //Launch Task
-    if(HiveConf.getBoolVar(conf, HiveConf.ConfVars.EXECPARALLEL) && tsk.isMapRedTask()) {
+    // Launch Task
+    if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.EXECPARALLEL)
+        && tsk.isMapRedTask()) {
       // Launch it in the parallel mode, as a separate thread only for MR tasks
       tskRun.start();
-    }
-    else
-    {
+    } else {
       tskRun.runSequential();
     }
-    running.put(tskRes,tskRun);        
+    running.put(tskRes, tskRun);
     return curJobNo;
   }
 
-
   /**
    * Cleans up remaining tasks in case of failure
    */
-   
+
   public void taskCleanup() {
-    // The currently existing Shutdown hooks will be automatically called, 
-    // killing the map-reduce processes. 
+    // The currently existing Shutdown hooks will be automatically called,
+    // killing the map-reduce processes.
     // The non MR processes will be killed as well.
     System.exit(9);
   }
@@ -606,16 +639,17 @@
   /**
    * Polls running tasks to see if a task has ended.
    * 
-   * @param results  Set of result objects for running tasks
-   * @return         The result object for any completed/failed task
+   * @param results
+   *          Set of result objects for running tasks
+   * @return The result object for any completed/failed task
    */
 
   public TaskResult pollTasks(Set<TaskResult> results) {
     Iterator<TaskResult> resultIterator = results.iterator();
-    while(true) {
-      while(resultIterator.hasNext()) {
+    while (true) {
+      while (resultIterator.hasNext()) {
         TaskResult tskRes = resultIterator.next();
-        if(tskRes.isRunning() == false) {
+        if (tskRes.isRunning() == false) {
           return tskRes;
         }
       }
@@ -624,9 +658,8 @@
       // Sleep 10 seconds and restart
       try {
         Thread.sleep(sleeptime);
-      }
-      catch (InterruptedException ie) {
-        //Do Nothing
+      } catch (InterruptedException ie) {
+        // Do Nothing
         ;
       }
       resultIterator = results.iterator();
@@ -645,30 +678,34 @@
       return ft.fetch(res);
     }
 
-    if (resStream == null)
+    if (resStream == null) {
       resStream = ctx.getStream();
-    if (resStream == null)
+    }
+    if (resStream == null) {
       return false;
+    }
 
     int numRows = 0;
     String row = null;
 
     while (numRows < maxRows) {
       if (resStream == null) {
-        if (numRows > 0)
+        if (numRows > 0) {
           return true;
-        else
+        } else {
           return false;
+        }
       }
 
       bos.reset();
       Utilities.streamStatus ss;
       try {
         ss = Utilities.readColumn(resStream, bos);
-        if (bos.getCount() > 0)
+        if (bos.getCount() > 0) {
           row = new String(bos.getData(), 0, bos.getCount(), "UTF-8");
-        else if (ss == Utilities.streamStatus.TERMINATED)
+        } else if (ss == Utilities.streamStatus.TERMINATED) {
           row = new String();
+        }
 
         if (row != null) {
           numRows++;
@@ -681,8 +718,9 @@
         return false;
       }
 
-      if (ss == Utilities.streamStatus.EOF)
+      if (ss == Utilities.streamStatus.EOF) {
         resStream = ctx.getStream();
+      }
     }
     return true;
   }
@@ -699,7 +737,8 @@
     return (0);
   }
 
-  public org.apache.hadoop.hive.ql.plan.api.Query getQueryPlan() throws IOException {
+  public org.apache.hadoop.hive.ql.plan.api.Query getQueryPlan()
+      throws IOException {
     return plan.getQueryPlan();
   }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java Thu Jan 21 10:37:58 2010
@@ -25,32 +25,34 @@
 import org.apache.hadoop.hive.ql.exec.Task;
 
 public class DriverContext {
-  
+
   Queue<Task<? extends Serializable>> runnable = new LinkedList<Task<? extends Serializable>>();
-  
-  public DriverContext( Queue<Task<? extends Serializable>> runnable) {
+
+  public DriverContext(Queue<Task<? extends Serializable>> runnable) {
     this.runnable = runnable;
   }
-  
+
   public Queue<Task<? extends Serializable>> getRunnable() {
-    return this.runnable;
+    return runnable;
   }
-  
+
   /**
    * Checks if a task can be launched
    * 
-   * @param tsk the task to be checked 
-   * @return    true if the task is launchable, false otherwise
+   * @param tsk
+   *          the task to be checked
+   * @return true if the task is launchable, false otherwise
    */
 
   public static boolean isLaunchable(Task<? extends Serializable> tsk) {
-    // A launchable task is one that hasn't been queued, hasn't been initialized, and is runnable.
+    // A launchable task is one that hasn't been queued, hasn't been
+    // initialized, and is runnable.
     return !tsk.getQueued() && !tsk.getInitialized() && tsk.isRunnable();
   }
 
   public void addToRunnable(Task<? extends Serializable> tsk) {
     runnable.add(tsk);
     tsk.setQueued();
- }
+  }
 
 }