You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by am...@apache.org on 2013/06/05 12:36:35 UTC
svn commit: r1489800 [1/2] - in /hive/branches/HIVE-4115: ./
beeline/src/java/org/apache/hive/beeline/
beeline/src/test/org/apache/hive/beeline/src/test/
cli/src/java/org/apache/hadoop/hive/cli/
common/src/java/org/apache/hadoop/hive/conf/ hcatalog/cor...
Author: amareshwari
Date: Wed Jun 5 10:36:34 2013
New Revision: 1489800
URL: http://svn.apache.org/r1489800
Log:
Merging r1486517 through r1489796 into branch HIVE-4115
Added:
hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/drivers/TestDriverHiveCmdLine.pm
- copied unchanged from r1489796, hive/trunk/hcatalog/src/test/e2e/hcatalog/drivers/TestDriverHiveCmdLine.pm
hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/tests/hive_cmdline.conf
- copied unchanged from r1489796, hive/trunk/hcatalog/src/test/e2e/hcatalog/tests/hive_cmdline.conf
hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/tests/hive_nightly.conf
- copied unchanged from r1489796, hive/trunk/hcatalog/src/test/e2e/hcatalog/tests/hive_nightly.conf
hive/branches/HIVE-4115/ql/src/test/queries/clientnegative/invalid_columns.q
- copied unchanged from r1489796, hive/trunk/ql/src/test/queries/clientnegative/invalid_columns.q
hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/authorization_8.q
- copied unchanged from r1489796, hive/trunk/ql/src/test/queries/clientpositive/authorization_8.q
hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/auto_join33.q
- copied unchanged from r1489796, hive/trunk/ql/src/test/queries/clientpositive/auto_join33.q
hive/branches/HIVE-4115/ql/src/test/results/clientnegative/invalid_columns.q.out
- copied unchanged from r1489796, hive/trunk/ql/src/test/results/clientnegative/invalid_columns.q.out
hive/branches/HIVE-4115/ql/src/test/results/clientpositive/authorization_8.q.out
- copied unchanged from r1489796, hive/trunk/ql/src/test/results/clientpositive/authorization_8.q.out
hive/branches/HIVE-4115/ql/src/test/results/clientpositive/auto_join33.q.out
- copied unchanged from r1489796, hive/trunk/ql/src/test/results/clientpositive/auto_join33.q.out
Modified:
hive/branches/HIVE-4115/ (props changed)
hive/branches/HIVE-4115/beeline/src/java/org/apache/hive/beeline/Commands.java
hive/branches/HIVE-4115/beeline/src/test/org/apache/hive/beeline/src/test/TestBeeLineWithArgs.java
hive/branches/HIVE-4115/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
hive/branches/HIVE-4115/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
hive/branches/HIVE-4115/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestSemanticAnalysis.java
hive/branches/HIVE-4115/hcatalog/src/docs/src/documentation/content/xdocs/readerwriter.xml
hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/build.xml
hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/resource/default.res
hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/resource/windows.res
hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/tools/test/floatpostprocessor.pl
hive/branches/HIVE-4115/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java
hive/branches/HIVE-4115/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
hive/branches/HIVE-4115/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
hive/branches/HIVE-4115/jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java
hive/branches/HIVE-4115/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
hive/branches/HIVE-4115/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
hive/branches/HIVE-4115/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
hive/branches/HIVE-4115/ql/build.xml
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/Context.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/ConditionalTask.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/DependencyCollectionTask.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskRunner.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/index/IndexMetadataChangeTask.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/BlockMergeTask.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkDeDuplication.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverSkewJoin.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/join_vc.q
hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/reduce_deduplicate_extended.q
hive/branches/HIVE-4115/ql/src/test/results/clientnegative/recursive_view.q.out
hive/branches/HIVE-4115/ql/src/test/results/clientpositive/alter_view_rename.q.out
hive/branches/HIVE-4115/ql/src/test/results/clientpositive/auto_sortmerge_join_9.q.out
hive/branches/HIVE-4115/ql/src/test/results/clientpositive/join_vc.q.out
hive/branches/HIVE-4115/ql/src/test/results/clientpositive/multiMapJoin1.q.out
hive/branches/HIVE-4115/ql/src/test/results/clientpositive/reduce_deduplicate_extended.q.out
hive/branches/HIVE-4115/ql/src/test/results/compiler/plan/join1.q.xml
hive/branches/HIVE-4115/ql/src/test/results/compiler/plan/join3.q.xml
hive/branches/HIVE-4115/ql/src/test/templates/TestNegativeCliDriver.vm
hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
hive/branches/HIVE-4115/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
hive/branches/HIVE-4115/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
Propchange: hive/branches/HIVE-4115/
------------------------------------------------------------------------------
Merged /hive/trunk:r1486517-1489796
Modified: hive/branches/HIVE-4115/beeline/src/java/org/apache/hive/beeline/Commands.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/beeline/src/java/org/apache/hive/beeline/Commands.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/beeline/src/java/org/apache/hive/beeline/Commands.java (original)
+++ hive/branches/HIVE-4115/beeline/src/java/org/apache/hive/beeline/Commands.java Wed Jun 5 10:36:34 2013
@@ -96,8 +96,12 @@ public class Commands {
public boolean metadata(String cmd, String[] args) {
+ if (!(beeLine.assertConnection())) {
+ return false;
+ }
+
try {
- Method[] m = beeLine.getDatabaseConnection().getDatabaseMetaData().getClass().getMethods();
+ Method[] m = beeLine.getDatabaseMetaData().getClass().getMethods();
Set<String> methodNames = new TreeSet<String>();
Set<String> methodNamesUpper = new TreeSet<String>();
for (int i = 0; i < m.length; i++) {
@@ -114,7 +118,7 @@ public class Commands {
return false;
}
- Object res = beeLine.getReflector().invoke(beeLine.getDatabaseConnection().getDatabaseMetaData(),
+ Object res = beeLine.getReflector().invoke(beeLine.getDatabaseMetaData(),
DatabaseMetaData.class, cmd, Arrays.asList(args));
if (res instanceof ResultSet) {
@@ -224,7 +228,7 @@ public class Commands {
if (sql.startsWith("native")) {
sql = sql.substring("native".length() + 1);
}
- String nat = beeLine.getDatabaseConnection().getConnection().nativeSQL(sql);
+ String nat = beeLine.getConnection().nativeSQL(sql);
beeLine.output(nat);
return true;
}
@@ -568,7 +572,7 @@ public class Commands {
for (int i = 0; i < m.length; i++) {
try {
beeLine.output(beeLine.getColorBuffer().pad(m[i], padlen).append(
- "" + beeLine.getReflector().invoke(beeLine.getDatabaseConnection().getDatabaseMetaData(),
+ "" + beeLine.getReflector().invoke(beeLine.getDatabaseMetaData(),
m[i], new Object[0])));
} catch (Exception e) {
beeLine.handleException(e);
@@ -771,9 +775,6 @@ public class Commands {
beeLine.info(beeLine.loc("rows-affected", count)
+ " " + beeLine.locElapsedTime(end - start));
}
- } catch (Exception e) {
- beeLine.error(e);
- throw e;
} finally {
if (stmnt != null) {
stmnt.close();
Modified: hive/branches/HIVE-4115/beeline/src/test/org/apache/hive/beeline/src/test/TestBeeLineWithArgs.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/beeline/src/test/org/apache/hive/beeline/src/test/TestBeeLineWithArgs.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/beeline/src/test/org/apache/hive/beeline/src/test/TestBeeLineWithArgs.java (original)
+++ hive/branches/HIVE-4115/beeline/src/test/org/apache/hive/beeline/src/test/TestBeeLineWithArgs.java Wed Jun 5 10:36:34 2013
@@ -18,23 +18,19 @@
package org.apache.hive.beeline.src.test;
-import java.io.File;
-import java.io.IOException;
-import java.io.PrintStream;
import java.io.ByteArrayOutputStream;
+import java.io.File;
import java.io.FileOutputStream;
-
-import junit.framework.TestCase;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.Assert;
+import java.io.PrintStream;
+import java.io.UnsupportedEncodingException;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hive.beeline.BeeLine;
import org.apache.hive.service.server.HiveServer2;
-import org.apache.hive.service.cli.HiveSQLException;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
/**
* TestBeeLineWithArgs - executes tests of the command-line arguments to BeeLine
@@ -216,4 +212,31 @@ public class TestBeeLineWithArgs {
throw e;
}
}
+
+ /**
+ * HIVE-4566
+ * @throws UnsupportedEncodingException
+ */
+ @Test
+ public void testNPE() throws UnsupportedEncodingException {
+ BeeLine beeLine = new BeeLine();
+
+ ByteArrayOutputStream os = new ByteArrayOutputStream();
+ PrintStream beelineOutputStream = new PrintStream(os);
+ beeLine.setOutputStream(beelineOutputStream);
+ beeLine.setErrorStream(beelineOutputStream);
+
+ beeLine.runCommands( new String[] {"!typeinfo"} );
+ String output = os.toString("UTF8");
+ Assert.assertFalse( output.contains("java.lang.NullPointerException") );
+ Assert.assertTrue( output.contains("No current connection") );
+
+ beeLine.runCommands( new String[] {"!nativesql"} );
+ output = os.toString("UTF8");
+ Assert.assertFalse( output.contains("java.lang.NullPointerException") );
+ Assert.assertTrue( output.contains("No current connection") );
+
+ System.out.println(">>> PASSED " + "testNPE" );
+ }
+
}
Modified: hive/branches/HIVE-4115/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java (original)
+++ hive/branches/HIVE-4115/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java Wed Jun 5 10:36:34 2013
@@ -32,12 +32,12 @@ import java.util.Map;
import java.util.Set;
import jline.ArgumentCompletor;
+import jline.ArgumentCompletor.AbstractArgumentDelimiter;
+import jline.ArgumentCompletor.ArgumentDelimiter;
import jline.Completor;
import jline.ConsoleReader;
import jline.History;
import jline.SimpleCompletor;
-import jline.ArgumentCompletor.AbstractArgumentDelimiter;
-import jline.ArgumentCompletor.ArgumentDelimiter;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
@@ -669,6 +669,30 @@ public class CliDriver {
SessionState.start(ss);
+ // execute cli driver work
+ int ret = 0;
+ try {
+ ret = executeDriver(ss, conf, oproc);
+ } catch (Exception e) {
+ ss.close();
+ throw e;
+ }
+
+ ss.close();
+ return ret;
+ }
+
+ /**
+ * Execute the cli work
+ * @param ss CliSessionState of the CLI driver
+ * @param conf HiveConf for the driver sionssion
+ * @param oproc Opetion processor of the CLI invocation
+ * @return status of the CLI comman execution
+ * @throws Exception
+ */
+ private static int executeDriver(CliSessionState ss, HiveConf conf, OptionsProcessor oproc)
+ throws Exception {
+
// connect to Hive Server
if (ss.getHost() != null) {
ss.connect();
@@ -704,12 +728,14 @@ public class CliDriver {
cli.processInitFiles(ss);
if (ss.execString != null) {
- return cli.processLine(ss.execString);
+ int cmdProcessStatus = cli.processLine(ss.execString);
+ return cmdProcessStatus;
}
try {
if (ss.fileName != null) {
- return cli.processFile(ss.fileName);
+ int fileProcessStatus = cli.processFile(ss.fileName);
+ return fileProcessStatus;
}
} catch (FileNotFoundException e) {
System.err.println("Could not open input file for reading. (" + e.getMessage() + ")");
@@ -764,9 +790,6 @@ public class CliDriver {
continue;
}
}
-
- ss.close();
-
return ret;
}
Modified: hive/branches/HIVE-4115/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/HIVE-4115/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Wed Jun 5 10:36:34 2013
@@ -905,7 +905,8 @@ public class HiveConf extends Configurat
private static synchronized InputStream getConfVarInputStream() {
if (confVarByteArray == null) {
try {
- Configuration conf = new Configuration();
+ // Create a Hadoop configuration without inheriting default settings.
+ Configuration conf = new Configuration(false);
applyDefaultNonNullConfVars(conf);
@@ -1164,10 +1165,6 @@ public class HiveConf extends Configurat
// Don't override ConfVars with null values
continue;
}
- if (conf.get(var.varname) != null) {
- l4j.debug("Overriding Hadoop conf property " + var.varname + "='" + conf.get(var.varname)
- + "' with Hive default value '" + var.defaultVal +"'");
- }
conf.set(var.varname, var.defaultVal);
}
}
Modified: hive/branches/HIVE-4115/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestSemanticAnalysis.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestSemanticAnalysis.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestSemanticAnalysis.java (original)
+++ hive/branches/HIVE-4115/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestSemanticAnalysis.java Wed Jun 5 10:36:34 2013
@@ -63,6 +63,8 @@ public class TestSemanticAnalysis extend
public void setUpHCatDriver() throws IOException {
if (hcatDriver == null) {
HiveConf hcatConf = new HiveConf(hiveConf);
+ hcatConf.set(HiveConf.ConfVars.HIVEDEFAULTRCFILESERDE.varname,
+ "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe");
hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
HCatSemanticAnalyzer.class.getName());
hcatDriver = new Driver(hcatConf);
Modified: hive/branches/HIVE-4115/hcatalog/src/docs/src/documentation/content/xdocs/readerwriter.xml
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/hcatalog/src/docs/src/documentation/content/xdocs/readerwriter.xml?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/hcatalog/src/docs/src/documentation/content/xdocs/readerwriter.xml (original)
+++ hive/branches/HIVE-4115/hcatalog/src/docs/src/documentation/content/xdocs/readerwriter.xml Wed Jun 5 10:36:34 2013
@@ -149,7 +149,7 @@ and writes out all the records attached
<!-- ==================================================================== -->
<section>
<title>Complete Example Program</title>
-<p>A complete java program for the reader and writer examples above can be found at: <a href="https://svn.apache.org/repos/asf/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestReaderWriter.java">https://svn.apache.org/repos/asf/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestReaderWriter.java</a>.</p>
+<p>A complete java program for the reader and writer examples above can be found at: <a href="https://svn.apache.org/repos/asf/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/TestReaderWriter.java">https://svn.apache.org/repos/asf/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/TestReaderWriter.java</a>.</p>
</section>
Modified: hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/build.xml
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/build.xml?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/build.xml (original)
+++ hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/build.xml Wed Jun 5 10:36:34 2013
@@ -280,6 +280,8 @@
<arg value="${test.location}/tests/hive.conf"/>
<arg value="${test.location}/tests/hcat.conf"/>
<arg value="${test.location}/tests/hadoop.conf"/>
+ <arg value="${test.location}/tests/hive_nightly.conf"/>
+ <arg value="${test.location}/tests/hive_cmdline.conf"/>
</exec>
</target>
Modified: hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/resource/default.res
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/resource/default.res?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/resource/default.res (original)
+++ hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/resource/default.res Wed Jun 5 10:36:34 2013
@@ -1,2 +1,19 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
$resources = {
};
Modified: hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/resource/windows.res
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/resource/windows.res?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/resource/windows.res (original)
+++ hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/resource/windows.res Wed Jun 5 10:36:34 2013
@@ -1,2 +1,19 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
$resources = {
};
Modified: hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/tools/test/floatpostprocessor.pl
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/tools/test/floatpostprocessor.pl?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/tools/test/floatpostprocessor.pl (original)
+++ hive/branches/HIVE-4115/hcatalog/src/test/e2e/hcatalog/tools/test/floatpostprocessor.pl Wed Jun 5 10:36:34 2013
@@ -42,7 +42,7 @@ sub postprocess($)
for (my $i = 0; $i < @fields; $i++) {
if ($i != 0) { print($delim); }
if ($floats[$i]) {
- printf("%.3f", $fields[$i]);
+ printf("%.2f", $fields[$i]);
} else {
print($fields[$i]);
}
Modified: hive/branches/HIVE-4115/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java (original)
+++ hive/branches/HIVE-4115/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java Wed Jun 5 10:36:34 2013
@@ -39,6 +39,7 @@ import java.sql.SQLWarning;
import java.sql.SQLXML;
import java.sql.Time;
import java.sql.Timestamp;
+import java.text.MessageFormat;
import java.util.Calendar;
import java.util.HashMap;
@@ -648,8 +649,32 @@ public class HivePreparedStatement imple
*/
public void setObject(int parameterIndex, Object x) throws SQLException {
- // TODO Auto-generated method stub
- throw new SQLException("Method not supported");
+ if (x instanceof String) {
+ setString(parameterIndex, (String) x);
+ } else if (x instanceof Short) {
+ setShort(parameterIndex, ((Short) x).shortValue());
+ } else if (x instanceof Integer) {
+ setInt(parameterIndex, ((Integer) x).intValue());
+ } else if (x instanceof Long) {
+ setLong(parameterIndex, ((Long) x).longValue());
+ } else if (x instanceof Float) {
+ setFloat(parameterIndex, ((Float) x).floatValue());
+ } else if (x instanceof Double) {
+ setDouble(parameterIndex, ((Double) x).doubleValue());
+ } else if (x instanceof Boolean) {
+ setBoolean(parameterIndex, ((Boolean) x).booleanValue());
+ } else if (x instanceof Byte) {
+ setByte(parameterIndex, ((Byte) x).byteValue());
+ } else if (x instanceof Character) {
+ setString(parameterIndex, ((Character) x).toString());
+ } else {
+ // Can't infer a type.
+ throw new SQLException(
+ MessageFormat
+ .format(
+ "Can''t infer the SQL type to use for an instance of {0}. Use setObject() with an explicit Types value to specify the type to use.",
+ x.getClass().getName()));
+ }
}
/*
Modified: hive/branches/HIVE-4115/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java (original)
+++ hive/branches/HIVE-4115/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java Wed Jun 5 10:36:34 2013
@@ -229,7 +229,7 @@ public abstract class HiveBaseResultSet
return Date.valueOf((String) obj);
} catch (Exception e) {
throw new SQLException("Cannot convert column " + columnIndex
- + " to date: " + e.toString());
+ + " to date: " + e.toString(), e);
}
}
@@ -258,7 +258,7 @@ public abstract class HiveBaseResultSet
throw new Exception("Illegal conversion");
} catch (Exception e) {
throw new SQLException("Cannot convert column " + columnIndex
- + " to double: " + e.toString());
+ + " to double: " + e.toString(), e);
}
}
@@ -287,7 +287,7 @@ public abstract class HiveBaseResultSet
throw new Exception("Illegal conversion");
} catch (Exception e) {
throw new SQLException("Cannot convert column " + columnIndex
- + " to float: " + e.toString());
+ + " to float: " + e.toString(), e);
}
}
@@ -311,7 +311,9 @@ public abstract class HiveBaseResultSet
}
throw new Exception("Illegal conversion");
} catch (Exception e) {
- throw new SQLException("Cannot convert column " + columnIndex + " to integer" + e.toString());
+ throw new SQLException(
+ "Cannot convert column " + columnIndex + " to integer" + e.toString(),
+ e);
}
}
@@ -331,7 +333,9 @@ public abstract class HiveBaseResultSet
}
throw new Exception("Illegal conversion");
} catch (Exception e) {
- throw new SQLException("Cannot convert column " + columnIndex + " to long: " + e.toString());
+ throw new SQLException(
+ "Cannot convert column " + columnIndex + " to long: " + e.toString(),
+ e);
}
}
@@ -578,7 +582,7 @@ public abstract class HiveBaseResultSet
throw new Exception("Illegal conversion");
} catch (Exception e) {
throw new SQLException("Cannot convert column " + columnIndex
- + " to short: " + e.toString());
+ + " to short: " + e.toString(), e);
}
}
Modified: hive/branches/HIVE-4115/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java (original)
+++ hive/branches/HIVE-4115/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java Wed Jun 5 10:36:34 2013
@@ -146,7 +146,7 @@ public class HiveConnection implements j
}
} catch (SaslException e) {
throw new SQLException("Could not establish secure connection to "
- + uri + ": " + e.getMessage(), " 08S01");
+ + uri + ": " + e.getMessage(), " 08S01", e);
}
}
@@ -155,9 +155,8 @@ public class HiveConnection implements j
try {
transport.open();
} catch (TTransportException e) {
- e.printStackTrace();
throw new SQLException("Could not establish connection to "
- + uri + ": " + e.getMessage(), " 08S01");
+ + uri + ": " + e.getMessage(), " 08S01", e);
}
}
@@ -178,7 +177,7 @@ public class HiveConnection implements j
sessHandle = openResp.getSessionHandle();
} catch (TException e) {
throw new SQLException("Could not establish connection to "
- + uri + ": " + e.getMessage(), " 08S01");
+ + uri + ": " + e.getMessage(), " 08S01", e);
}
isClosed = false;
}
Modified: hive/branches/HIVE-4115/jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java (original)
+++ hive/branches/HIVE-4115/jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java Wed Jun 5 10:36:34 2013
@@ -124,7 +124,7 @@ public class HiveDatabaseMetaData implem
try {
catalogResp = client.GetCatalogs(new TGetCatalogsReq(sessHandle));
} catch (TException e) {
- throw new SQLException(e.getMessage(), "08S01");
+ throw new SQLException(e.getMessage(), "08S01", e);
}
Utils.verifySuccess(catalogResp.getStatus());
@@ -197,7 +197,7 @@ public class HiveDatabaseMetaData implem
try {
colResp = client.GetColumns(colReq);
} catch (TException e) {
- throw new SQLException(e.getMessage(), "08S01");
+ throw new SQLException(e.getMessage(), "08S01", e);
}
Utils.verifySuccess(colResp.getStatus());
// build the resultset from response
@@ -312,7 +312,7 @@ public class HiveDatabaseMetaData implem
try {
funcResp = client.GetFunctions(getFunctionsReq);
} catch (TException e) {
- throw new SQLException(e.getMessage(), "08S01");
+ throw new SQLException(e.getMessage(), "08S01", e);
}
Utils.verifySuccess(funcResp.getStatus());
@@ -553,7 +553,7 @@ public class HiveDatabaseMetaData implem
try {
schemaResp = client.GetSchemas(schemaReq);
} catch (TException e) {
- throw new SQLException(e.getMessage(), "08S01");
+ throw new SQLException(e.getMessage(), "08S01", e);
}
Utils.verifySuccess(schemaResp.getStatus());
@@ -597,7 +597,7 @@ public class HiveDatabaseMetaData implem
try {
tableTypeResp = client.GetTableTypes(new TGetTableTypesReq(sessHandle));
} catch (TException e) {
- throw new SQLException(e.getMessage(), "08S01");
+ throw new SQLException(e.getMessage(), "08S01", e);
}
Utils.verifySuccess(tableTypeResp.getStatus());
@@ -630,7 +630,7 @@ public class HiveDatabaseMetaData implem
try {
getTableResp = client.GetTables(getTableReq);
} catch (TException e) {
- throw new SQLException(e.getMessage(), "08S01");
+ throw new SQLException(e.getMessage(), "08S01", e);
}
Utils.verifySuccess(getTableResp.getStatus());
@@ -687,7 +687,7 @@ public class HiveDatabaseMetaData implem
try {
getTypeInfoResp = client.GetTypeInfo(getTypeInfoReq);
} catch (TException e) {
- throw new SQLException(e.getMessage(), "08S01");
+ throw new SQLException(e.getMessage(), "08S01", e);
}
Utils.verifySuccess(getTypeInfoResp.getStatus());
return new HiveQueryResultSet.Builder()
Modified: hive/branches/HIVE-4115/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java (original)
+++ hive/branches/HIVE-4115/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java Wed Jun 5 10:36:34 2013
@@ -108,7 +108,7 @@ public class HiveStatement implements ja
} catch (SQLException e) {
throw e;
} catch (Exception e) {
- throw new SQLException(e.toString(), "08S01");
+ throw new SQLException(e.toString(), "08S01", e);
}
}
@@ -143,7 +143,7 @@ public class HiveStatement implements ja
} catch (SQLException e) {
throw e;
} catch (Exception e) {
- throw new SQLException(e.toString(), "08S01");
+ throw new SQLException(e.toString(), "08S01", e);
}
stmtHandle = null;
}
@@ -184,7 +184,7 @@ public class HiveStatement implements ja
} catch (SQLException eS) {
throw eS;
} catch (Exception ex) {
- throw new SQLException(ex.toString(), "08S01");
+ throw new SQLException(ex.toString(), "08S01", ex);
}
if (!stmtHandle.isHasResultSet()) {
Modified: hive/branches/HIVE-4115/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (original)
+++ hive/branches/HIVE-4115/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java Wed Jun 5 10:36:34 2013
@@ -32,6 +32,7 @@ import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.util.Arrays;
+import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
@@ -229,49 +230,18 @@ public class TestJdbcDriver extends Test
///////////////////////////////////////////////
//////////////////// correct testcase
+ //////////////////// executed twice: once with the typed ps setters, once with the generic setObject
//////////////////////////////////////////////
try {
- PreparedStatement ps = con.prepareStatement(sql);
-
- ps.setBoolean(1, true);
- ps.setBoolean(2, true);
-
- ps.setShort(3, Short.valueOf("1"));
- ps.setInt(4, 2);
- ps.setFloat(5, 3f);
- ps.setDouble(6, Double.valueOf(4));
- ps.setString(7, "test'string\"");
- ps.setLong(8, 5L);
- ps.setByte(9, (byte) 1);
- ps.setByte(10, (byte) 1);
-
- ps.setMaxRows(2);
-
- assertTrue(true);
-
+ PreparedStatement ps = createPreapredStatementUsingSetXXX(sql);
ResultSet res = ps.executeQuery();
- assertNotNull(res);
-
- while (res.next()) {
- assertEquals("2011-03-25", res.getString("ddate"));
- assertEquals("10", res.getString("num"));
- assertEquals((byte) 10, res.getByte("num"));
- assertEquals("2011-03-25", res.getDate("ddate").toString());
- assertEquals(Double.valueOf(10).doubleValue(), res.getDouble("num"), 0.1);
- assertEquals(10, res.getInt("num"));
- assertEquals(Short.valueOf("10").shortValue(), res.getShort("num"));
- assertEquals(10L, res.getLong("num"));
- assertEquals(true, res.getBoolean("bv"));
- Object o = res.getObject("ddate");
- assertNotNull(o);
- o = res.getObject("num");
- assertNotNull(o);
- }
- res.close();
- assertTrue(true);
+ assertPreparedStatementResultAsExpected(res);
+ ps.close();
+ ps = createPreapredStatementUsingSetObject(sql);
+ res = ps.executeQuery();
+ assertPreparedStatementResultAsExpected(res);
ps.close();
- assertTrue(true);
} catch (Exception e) {
e.printStackTrace();
@@ -326,6 +296,80 @@ public class TestJdbcDriver extends Test
assertNotNull(
"Execute the invalid setted sql statement should throw exception",
expectedException);
+
+ // setObject to the yet unknown type java.util.Date
+ expectedException = null;
+ try {
+ PreparedStatement ps = con.prepareStatement(sql);
+ ps.setObject(1, new Date());
+ ps.executeQuery();
+ } catch (Exception e) {
+ expectedException = e;
+ }
+ assertNotNull(
+ "Setting to an unknown type should throw an exception",
+ expectedException);
+
+ }
+
+ private PreparedStatement createPreapredStatementUsingSetObject(String sql) throws SQLException {
+ PreparedStatement ps = con.prepareStatement(sql);
+
+ ps.setObject(1, true); //setBoolean
+ ps.setObject(2, true); //setBoolean
+
+ ps.setObject(3, Short.valueOf("1")); //setShort
+ ps.setObject(4, 2); //setInt
+ ps.setObject(5, 3f); //setFloat
+ ps.setObject(6, Double.valueOf(4)); //setDouble
+ ps.setObject(7, "test'string\""); //setString
+ ps.setObject(8, 5L); //setLong
+ ps.setObject(9, (byte) 1); //setByte
+ ps.setObject(10, (byte) 1); //setByte
+
+ ps.setMaxRows(2);
+ return ps;
+ }
+
+ private PreparedStatement createPreapredStatementUsingSetXXX(String sql) throws SQLException {
+ PreparedStatement ps = con.prepareStatement(sql);
+
+ ps.setBoolean(1, true); //setBoolean
+ ps.setBoolean(2, true); //setBoolean
+
+ ps.setShort(3, Short.valueOf("1")); //setShort
+ ps.setInt(4, 2); //setInt
+ ps.setFloat(5, 3f); //setFloat
+ ps.setDouble(6, Double.valueOf(4)); //setDouble
+ ps.setString(7, "test'string\""); //setString
+ ps.setLong(8, 5L); //setLong
+ ps.setByte(9, (byte) 1); //setByte
+ ps.setByte(10, (byte) 1); //setByte
+
+ ps.setMaxRows(2);
+ return ps;
+ }
+
+ private void assertPreparedStatementResultAsExpected(ResultSet res ) throws SQLException {
+ assertNotNull(res);
+
+ while (res.next()) {
+ assertEquals("2011-03-25", res.getString("ddate"));
+ assertEquals("10", res.getString("num"));
+ assertEquals((byte) 10, res.getByte("num"));
+ assertEquals("2011-03-25", res.getDate("ddate").toString());
+ assertEquals(Double.valueOf(10).doubleValue(), res.getDouble("num"), 0.1);
+ assertEquals(10, res.getInt("num"));
+ assertEquals(Short.valueOf("10").shortValue(), res.getShort("num"));
+ assertEquals(10L, res.getLong("num"));
+ assertEquals(true, res.getBoolean("bv"));
+ Object o = res.getObject("ddate");
+ assertNotNull(o);
+ o = res.getObject("num");
+ assertNotNull(o);
+ }
+ res.close();
+ assertTrue(true);
}
public final void testSelectAll() throws Exception {
Modified: hive/branches/HIVE-4115/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (original)
+++ hive/branches/HIVE-4115/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java Wed Jun 5 10:36:34 2013
@@ -37,6 +37,7 @@ import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@@ -45,13 +46,13 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.SerDeInfo;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
-import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
-import org.apache.hadoop.hive.serde.serdeConstants;;
+import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.SerDeUtils;
@@ -64,7 +65,7 @@ import org.apache.hadoop.hive.serde2.obj
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
-import org.apache.hadoop.util.StringUtils;
+
public class MetaStoreUtils {
@@ -379,7 +380,7 @@ public class MetaStoreUtils {
throw new InvalidOperationException(
"The following columns have types incompatible with the existing " +
"columns in their respective positions :\n" +
- StringUtils.join(",", incompatibleCols)
+ StringUtils.join(incompatibleCols, ',')
);
}
}
@@ -954,12 +955,11 @@ public class MetaStoreUtils {
* @throws MetaException
*/
static void logAndThrowMetaException(Exception e) throws MetaException {
- LOG
- .error("Got exception: " + e.getClass().getName() + " "
- + e.getMessage());
- LOG.error(StringUtils.stringifyException(e));
- throw new MetaException("Got exception: " + e.getClass().getName() + " "
- + e.getMessage());
+ String exInfo = "Got exception: " + e.getClass().getName() + " "
+ + e.getMessage();
+ LOG.error(exInfo, e);
+ LOG.error("Converting exception to MetaException");
+ throw new MetaException(exInfo);
}
/**
Modified: hive/branches/HIVE-4115/ql/build.xml
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/build.xml?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/build.xml (original)
+++ hive/branches/HIVE-4115/ql/build.xml Wed Jun 5 10:36:34 2013
@@ -239,14 +239,12 @@
<exclude name="META-INF/MANIFEST.MF"/>
</patternset>
</unzip>
-
<unzip src="${build.ivy.lib.dir}/default/javolution-${javolution.version}.jar" dest="${build.dir.hive}/javolution/classes">
<patternset>
<exclude name="META-INF"/>
<exclude name="META-INF/MANIFEST.MF"/>
</patternset>
</unzip>
-
<unzip
src="${build.ivy.lib.dir}/default/protobuf-java-${protobuf.version}.jar"
dest="${build.dir.hive}/protobuf-java/classes">
@@ -255,7 +253,6 @@
<exclude name="META-INF/MANIFEST.MF"/>
</patternset>
</unzip>
-
<unzip
src="${build.ivy.lib.dir}/default/snappy-${snappy.version}.jar"
dest="${build.dir.hive}/snappy/classes">
@@ -264,6 +261,22 @@
<exclude name="META-INF/MANIFEST.MF"/>
</patternset>
</unzip>
+ <unzip
+ src="${build.ivy.lib.dir}/default/jackson-core-asl-${jackson.version}.jar"
+ dest="${build.dir.hive}/jackson-core-asl/classes">
+ <patternset>
+ <exclude name="META-INF"/>
+ <exclude name="META-INF/MANIFEST.MF"/>
+ </patternset>
+ </unzip>
+ <unzip
+ src="${build.ivy.lib.dir}/default/jackson-mapper-asl-${jackson.version}.jar"
+ dest="${build.dir.hive}/jackson-mapper-asl/classes">
+ <patternset>
+ <exclude name="META-INF"/>
+ <exclude name="META-INF/MANIFEST.MF"/>
+ </patternset>
+ </unzip>
<!-- jar jarfile="${build.dir}/hive_${name}.jar" basedir="${build.classes}" / -->
<jar jarfile="${build.dir}/hive-exec-${version}.jar">
@@ -282,6 +295,10 @@
includes="**/*.class"/>
<fileset dir="${build.dir.hive}/snappy/classes"
includes="**/*.class"/>
+ <fileset dir="${build.dir.hive}/jackson-core-asl/classes"
+ includes="**/*.class"/>
+ <fileset dir="${build.dir.hive}/jackson-mapper-asl/classes"
+ includes="**/*.class"/>
<manifest>
<!-- Not putting these in their own manifest section, since that inserts
a new-line, which breaks the reading of the attributes. -->
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/Context.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/Context.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/Context.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/Context.java Wed Jun 5 10:36:34 2013
@@ -42,6 +42,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.FileUtils;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.TaskRunner;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.lockmgr.HiveLock;
import org.apache.hadoop.hive.ql.lockmgr.HiveLockManager;
@@ -171,10 +172,11 @@ public class Context {
boolean mkdir, String scratchDir) {
String fileSystem = scheme + ":" + authority;
- String dir = fsScratchDirs.get(fileSystem);
+ String dir = fsScratchDirs.get(fileSystem + "-" + TaskRunner.getTaskRunnerID());
if (dir == null) {
- Path dirPath = new Path(scheme, authority, scratchDir);
+ Path dirPath = new Path(scheme, authority,
+ scratchDir + "-" + TaskRunner.getTaskRunnerID());
if (mkdir) {
try {
FileSystem fs = dirPath.getFileSystem(conf);
@@ -191,7 +193,7 @@ public class Context {
}
}
dir = dirPath.toString();
- fsScratchDirs.put(fileSystem, dir);
+ fsScratchDirs.put(fileSystem + "-" + TaskRunner.getTaskRunnerID(), dir);
}
return dir;
@@ -228,9 +230,10 @@ public class Context {
try {
Path dir = FileUtils.makeQualified(nonLocalScratchPath, conf);
URI uri = dir.toUri();
- return getScratchDir(uri.getScheme(), uri.getAuthority(),
+ String newScratchDir = getScratchDir(uri.getScheme(), uri.getAuthority(),
!explain, uri.getPath());
-
+ LOG.info("New scratch dir is " + newScratchDir);
+ return newScratchDir;
} catch (IOException e) {
throw new RuntimeException(e);
} catch (IllegalArgumentException e) {
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Wed Jun 5 10:36:34 2013
@@ -113,7 +113,7 @@ public class Driver implements CommandPr
static final protected LogHelper console = new LogHelper(LOG);
private static final Object compileMonitor = new Object();
-
+
private int maxRows = 100;
ByteStream.Output bos = new ByteStream.Output();
@@ -889,10 +889,10 @@ public class Driver implements CommandPr
driverRunHook.preDriverRun(hookContext);
}
} catch (Exception e) {
- errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e);
+ errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e)
+ + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e);
SQLState = ErrorMsg.findSQLState(e.getMessage());
- console.printError(errorMessage + "\n"
- + org.apache.hadoop.util.StringUtils.stringifyException(e));
+ console.printError(errorMessage);
return new CommandProcessorResponse(12, errorMessage, SQLState);
}
@@ -965,10 +965,10 @@ public class Driver implements CommandPr
driverRunHook.postDriverRun(hookContext);
}
} catch (Exception e) {
- errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e);
+ errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e)
+ + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e);
SQLState = ErrorMsg.findSQLState(e.getMessage());
- console.printError(errorMessage + "\n"
- + org.apache.hadoop.util.StringUtils.stringifyException(e));
+ console.printError(errorMessage);
return new CommandProcessorResponse(12, errorMessage, SQLState);
}
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java Wed Jun 5 10:36:34 2013
@@ -390,17 +390,4 @@ public class ColumnStatsTask extends Tas
public String getName() {
return "COLUMNSTATS TASK";
}
-
- @Override
- protected void localizeMRTmpFilesImpl(Context ctx) {
- FetchWork fWork = work.getfWork();
- String s = fWork.getTblDir();
- if ((s != null) && ctx.isMRTmpFileURI(s)) {
- fWork.setTblDir(ctx.localizeMRTmpFileURI(s));
- }
- ArrayList<String> ls = fWork.getPartDir();
- if (ls != null) {
- ctx.localizePaths(ls);
- }
- }
}
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/ConditionalTask.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/ConditionalTask.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/ConditionalTask.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/ConditionalTask.java Wed Jun 5 10:36:34 2013
@@ -205,15 +205,6 @@ public class ConditionalTask extends Tas
}
@Override
- protected void localizeMRTmpFilesImpl(Context ctx) {
- if (getListTasks() != null) {
- for (Task<? extends Serializable> t : getListTasks()) {
- t.localizeMRTmpFiles(ctx);
- }
- }
- }
-
- @Override
public List<Task<? extends Serializable>> getDependentTasks() {
return listTasks;
}
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java Wed Jun 5 10:36:34 2013
@@ -106,12 +106,4 @@ public class CopyTask extends Task<CopyW
public String getName() {
return "COPY";
}
-
- @Override
- protected void localizeMRTmpFilesImpl(Context ctx) {
- // copy task is only used by the load command and
- // does not use any map-reduce tmp files
- // we don't expect to enter this code path at all
- throw new RuntimeException ("Unexpected call");
- }
}
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Wed Jun 5 10:36:34 2013
@@ -4059,9 +4059,4 @@ public class DDLTask extends Task<DDLWor
public String getName() {
return "DDL";
}
-
- @Override
- protected void localizeMRTmpFilesImpl(Context ctx) {
- // no-op
- }
}
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/DependencyCollectionTask.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/DependencyCollectionTask.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/DependencyCollectionTask.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/DependencyCollectionTask.java Wed Jun 5 10:36:34 2013
@@ -53,12 +53,4 @@ public class DependencyCollectionTask ex
public String getName() {
return "DEPENDENCY_COLLECTION";
}
-
- @Override
- protected void localizeMRTmpFilesImpl(Context ctx) {
- // copy task doesn't have any execution and so
- // does not use any map-reduce tmp files
- // we don't expect to enter this code path at all
- throw new RuntimeException ("Unexpected call");
- }
}
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java Wed Jun 5 10:36:34 2013
@@ -915,57 +915,6 @@ public class ExecDriver extends Task<Map
}
@Override
- protected void localizeMRTmpFilesImpl(Context ctx) {
-
- // localize any map-reduce input paths
- ctx.localizeKeys((Map<String, Object>) ((Object) work.getPathToAliases()));
- ctx.localizeKeys((Map<String, Object>) ((Object) work.getPathToPartitionInfo()));
-
- // localize any input paths for maplocal work
- MapredLocalWork l = work.getMapLocalWork();
- if (l != null) {
- Map<String, FetchWork> m = l.getAliasToFetchWork();
- if (m != null) {
- for (FetchWork fw : m.values()) {
- String s = fw.getTblDir();
- if ((s != null) && ctx.isMRTmpFileURI(s)) {
- fw.setTblDir(ctx.localizeMRTmpFileURI(s));
- }
- }
- }
- }
-
- // fix up outputs
- Map<String, ArrayList<String>> pa = work.getPathToAliases();
- if (pa != null) {
- for (List<String> ls : pa.values()) {
- for (String a : ls) {
- ArrayList<Operator<? extends OperatorDesc>> opList =
- new ArrayList<Operator<? extends OperatorDesc>>();
- opList.add(work.getAliasToWork().get(a));
-
- while (!opList.isEmpty()) {
- Operator<? extends OperatorDesc> op = opList.remove(0);
-
- if (op instanceof FileSinkOperator) {
- FileSinkDesc fdesc = ((FileSinkOperator) op).getConf();
- String s = fdesc.getDirName();
- if ((s != null) && ctx.isMRTmpFileURI(s)) {
- fdesc.setDirName(ctx.localizeMRTmpFileURI(s));
- }
- ((FileSinkOperator) op).setConf(fdesc);
- }
-
- if (op.getChildOperators() != null) {
- opList.addAll(op.getChildOperators());
- }
- }
- }
- }
- }
- }
-
- @Override
public void updateCounters(Counters ctrs, RunningJob rj) throws IOException {
for (Operator<? extends OperatorDesc> op : work.getAliasToWork().values()) {
op.updateCounters(ctrs);
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java Wed Jun 5 10:36:34 2013
@@ -717,13 +717,6 @@ public class ExplainTask extends Task<Ex
}
@Override
- protected void localizeMRTmpFilesImpl(Context ctx) {
- // explain task has nothing to localize
- // we don't expect to enter this code path at all
- throw new RuntimeException("Unexpected call");
- }
-
- @Override
public List<FieldSchema> getResultSchema() {
FieldSchema tmpFieldSchema = new FieldSchema();
List<FieldSchema> colList = new ArrayList<FieldSchema>();
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java Wed Jun 5 10:36:34 2013
@@ -163,19 +163,6 @@ public class FetchTask extends Task<Fetc
return "FETCH";
}
- @Override
- protected void localizeMRTmpFilesImpl(Context ctx) {
- String s = work.getTblDir();
- if ((s != null) && ctx.isMRTmpFileURI(s)) {
- work.setTblDir(ctx.localizeMRTmpFileURI(s));
- }
-
- ArrayList<String> ls = work.getPartDir();
- if (ls != null) {
- ctx.localizePaths(ls);
- }
- }
-
/**
* Clear the Fetch Operator.
*
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java Wed Jun 5 10:36:34 2013
@@ -114,9 +114,4 @@ public class FunctionTask extends Task<F
public String getName() {
return "FUNCTION";
}
-
- @Override
- protected void localizeMRTmpFilesImpl(Context ctx) {
- throw new RuntimeException ("Unexpected call");
- }
}
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java Wed Jun 5 10:36:34 2013
@@ -448,11 +448,6 @@ public class MapredLocalTask extends Tas
}
@Override
- public void localizeMRTmpFilesImpl(Context ctx) {
-
- }
-
- @Override
public boolean isMapRedLocalTask() {
return true;
}
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java Wed Jun 5 10:36:34 2013
@@ -537,10 +537,4 @@ public class MoveTask extends Task<MoveW
public String getName() {
return "MOVE";
}
-
-
- @Override
- protected void localizeMRTmpFilesImpl(Context ctx) {
- // no-op
- }
}
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java Wed Jun 5 10:36:34 2013
@@ -263,11 +263,6 @@ public class StatsTask extends Task<Stat
return "STATS";
}
- @Override
- protected void localizeMRTmpFilesImpl(Context ctx) {
- // Nothing to do for StatsTask here.
- }
-
private int aggregateStats() {
StatsAggregator statsAggregator = null;
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java Wed Jun 5 10:36:34 2013
@@ -384,37 +384,6 @@ public abstract class Task<T extends Ser
public abstract StageType getType();
/**
- * If this task uses any map-reduce intermediate data (either for reading or for writing),
- * localize them (using the supplied Context). Map-Reduce intermediate directories are allocated
- * using Context.getMRTmpFileURI() and can be localized using localizeMRTmpFileURI().
- *
- * This method is declared abstract to force any task code to explicitly deal with this aspect of
- * execution.
- *
- * @param ctx
- * context object with which to localize
- */
- abstract protected void localizeMRTmpFilesImpl(Context ctx);
-
- /**
- * Localize a task tree
- *
- * @param ctx
- * context object with which to localize
- */
- public final void localizeMRTmpFiles(Context ctx) {
- localizeMRTmpFilesImpl(ctx);
-
- if (childTasks == null) {
- return;
- }
-
- for (Task<? extends Serializable> t : childTasks) {
- t.localizeMRTmpFiles(ctx);
- }
- }
-
- /**
* Subscribe the feed of publisher. To prevent cycles, a task can only subscribe to its ancestor.
* Feed is a generic form of execution-time feedback (type, value) pair from one task to another
* task. Examples include dynamic partitions (which are only available at execution time). The
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskRunner.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskRunner.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskRunner.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskRunner.java Wed Jun 5 10:36:34 2013
@@ -19,6 +19,7 @@
package org.apache.hadoop.hive.ql.exec;
import java.io.Serializable;
+import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.hive.ql.session.SessionState;
@@ -30,6 +31,13 @@ public class TaskRunner extends Thread {
protected Task<? extends Serializable> tsk;
protected TaskResult result;
protected SessionState ss;
+ private static AtomicLong taskCounter = new AtomicLong(0);
+ private static ThreadLocal<Long> taskRunnerID = new ThreadLocal<Long>() {
+ @Override
+ protected Long initialValue() {
+ return taskCounter.incrementAndGet();
+ }
+ };
public TaskRunner(Task<? extends Serializable> tsk, TaskResult result) {
this.tsk = tsk;
@@ -61,4 +69,7 @@ public class TaskRunner extends Thread {
result.setExitVal(exitVal);
}
+ public static long getTaskRunnerID () {
+ return taskRunnerID.get();
+ }
}
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Wed Jun 5 10:36:34 2013
@@ -1313,6 +1313,18 @@ public final class Utilities {
}
/**
+ * returns null if path is not exist
+ */
+ public static FileStatus[] listStatusIfExists(Path path, FileSystem fs) throws IOException {
+ try {
+ return fs.listStatus(path);
+ } catch (FileNotFoundException e) {
+ // FS in hadoop 2.0 throws FNF instead of returning null
+ return null;
+ }
+ }
+
+ /**
* Get all file status from a root path and recursively go deep into certain levels.
*
* @param path
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/index/IndexMetadataChangeTask.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/index/IndexMetadataChangeTask.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/index/IndexMetadataChangeTask.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/index/IndexMetadataChangeTask.java Wed Jun 5 10:36:34 2013
@@ -98,9 +98,4 @@ public class IndexMetadataChangeTask ext
public StageType getType() {
return StageType.DDL;
}
-
- @Override
- protected void localizeMRTmpFilesImpl(Context ctx) {
- }
-
}
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/BlockMergeTask.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/BlockMergeTask.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/BlockMergeTask.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/BlockMergeTask.java Wed Jun 5 10:36:34 2013
@@ -378,10 +378,4 @@ public class BlockMergeTask extends Task
public void updateCounters(Counters ctrs, RunningJob rj) throws IOException {
// no op
}
-
- @Override
- protected void localizeMRTmpFilesImpl(Context ctx) {
- // no op
- }
-
}
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java Wed Jun 5 10:36:34 2013
@@ -379,11 +379,4 @@ public class PartialScanTask extends Tas
public void updateCounters(Counters ctrs, RunningJob rj) throws IOException {
// no op
}
-
- @Override
- protected void localizeMRTmpFilesImpl(Context ctx) {
- // no op
- }
-
-
}
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java Wed Jun 5 10:36:34 2013
@@ -254,10 +254,4 @@ public class ColumnTruncateTask extends
public void updateCounters(Counters ctrs, RunningJob rj) throws IOException {
// no op
}
-
- @Override
- protected void localizeMRTmpFilesImpl(Context ctx) {
- // no op
- }
-
}
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java Wed Jun 5 10:36:34 2013
@@ -196,6 +196,10 @@ public class Table implements Serializab
List<String> colNames = new ArrayList<String>();
while (iterCols.hasNext()) {
String colName = iterCols.next().getName();
+ if (!MetaStoreUtils.validateName(colName)) {
+ throw new HiveException("Invalid column name '" + colName
+ + "' in the table definition");
+ }
Iterator<String> iter = colNames.iterator();
while (iter.hasNext()) {
String oldColName = iter.next();
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java Wed Jun 5 10:36:34 2013
@@ -61,6 +61,7 @@ import org.apache.hadoop.hive.ql.parse.S
import org.apache.hadoop.hive.ql.plan.AggregationDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils;
import org.apache.hadoop.hive.ql.plan.GroupByDesc;
import org.apache.hadoop.hive.ql.plan.JoinDesc;
import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
@@ -695,12 +696,14 @@ public final class ColumnPrunerProcFacto
ReduceSinkOperator reduce, ColumnPrunerProcCtx cppCtx) throws SemanticException {
ReduceSinkDesc reduceConf = reduce.getConf();
Map<String, ExprNodeDesc> oldMap = reduce.getColumnExprMap();
+ LOG.info("RS " + reduce.getIdentifier() + " oldColExprMap: " + oldMap);
RowResolver oldRR = cppCtx.getOpToParseCtxMap().get(reduce).getRowResolver();
ArrayList<ColumnInfo> signature = oldRR.getRowSchema().getSignature();
List<String> valueColNames = reduceConf.getOutputValueColumnNames();
ArrayList<String> newValueColNames = new ArrayList<String>();
+ List<ExprNodeDesc> keyExprs = reduceConf.getKeyCols();
List<ExprNodeDesc> valueExprs = reduceConf.getValueCols();
ArrayList<ExprNodeDesc> newValueExprs = new ArrayList<ExprNodeDesc>();
@@ -713,10 +716,16 @@ public final class ColumnPrunerProcFacto
outputCol = Utilities.ReduceField.VALUE.toString() + "." + outputCol;
nm = oldRR.reverseLookup(outputCol);
}
- ColumnInfo colInfo = oldRR.getFieldMap(nm[0]).remove(nm[1]);
- oldRR.getInvRslvMap().remove(colInfo.getInternalName());
- oldMap.remove(outputCol);
- signature.remove(colInfo);
+
+ // Only remove information of a column if it is not a key,
+ // i.e. this column is not appearing in keyExprs of the RS
+ if (ExprNodeDescUtils.indexOf(outputColExpr, keyExprs) == -1) {
+ ColumnInfo colInfo = oldRR.getFieldMap(nm[0]).remove(nm[1]);
+ oldRR.getInvRslvMap().remove(colInfo.getInternalName());
+ oldMap.remove(outputCol);
+ signature.remove(colInfo);
+ }
+
} else {
newValueColNames.add(outputCol);
newValueExprs.add(outputColExpr);
@@ -729,6 +738,7 @@ public final class ColumnPrunerProcFacto
.getFieldSchemasFromColumnList(reduceConf.getValueCols(),
newValueColNames, 0, ""));
reduceConf.setValueSerializeInfo(newValueTable);
+ LOG.info("RS " + reduce.getIdentifier() + " newColExprMap: " + oldMap);
}
/**
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkDeDuplication.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkDeDuplication.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkDeDuplication.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkDeDuplication.java Wed Jun 5 10:36:34 2013
@@ -73,6 +73,8 @@ import static org.apache.hadoop.hive.con
* If two reducer sink operators share the same partition/sort columns and order,
* they can be merged. This should happen after map join optimization because map
* join optimization will remove reduce sink operators.
+ *
+ * This optimizer removes/replaces child-RS (not parent) which is safer way for DefaultGraphWalker.
*/
public class ReduceSinkDeDuplication implements Transform{
@@ -89,9 +91,12 @@ public class ReduceSinkDeDuplication imp
// generate pruned column list for all relevant operators
ReduceSinkDeduplicateProcCtx cppCtx = new ReduceSinkDeduplicateProcCtx(pGraphContext);
+ // for auto convert map-joins, it not safe to dedup in here (todo)
boolean mergeJoins = !pctx.getConf().getBoolVar(HIVECONVERTJOIN) &&
!pctx.getConf().getBoolVar(HIVECONVERTJOINNOCONDITIONALTASK);
+ // If multiple rules can be matched with same cost, last rule will be choosen as a processor
+ // see DefaultRuleDispatcher#dispatch()
Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
opRules.put(new RuleRegExp("R1", RS + "%.*%" + RS + "%"),
ReduceSinkDeduplicateProcFactory.getReducerReducerProc());
@@ -119,8 +124,14 @@ public class ReduceSinkDeDuplication imp
class ReduceSinkDeduplicateProcCtx implements NodeProcessorCtx {
ParseContext pctx;
+
+ // For queries using script, the optimization cannot be applied without user's confirmation
+ // If script preserves alias and value for columns related to keys, user can set this true
boolean trustScript;
- // min reducer num for merged RS (to avoid query contains "order by" executed by one reducer)
+
+ // This is min number of reducer for deduped RS to avoid query executed on
+ // too small number of reducers. For example, queries GroupBy+OrderBy can be executed by
+ // only one reducer if this configuration does not prevents
int minReducer;
Set<Operator<?>> removedOps;
@@ -178,7 +189,7 @@ public class ReduceSinkDeDuplication imp
}
}
- public abstract static class AbsctractReducerReducerProc implements NodeProcessor {
+ public abstract static class AbstractReducerReducerProc implements NodeProcessor {
ReduceSinkDeduplicateProcCtx dedupCtx;
@@ -323,6 +334,8 @@ public class ReduceSinkDeDuplication imp
return result;
}
+ // for left outer joins, left alias is sorted but right alias might be not
+ // (nulls, etc.). vice versa.
private boolean isSortedTag(JoinOperator joinOp, int tag) {
for (JoinCondDesc cond : joinOp.getConf().getConds()) {
switch (cond.getType()) {
@@ -356,6 +369,10 @@ public class ReduceSinkDeDuplication imp
return -1;
}
+ /**
+ * Current RSDedup remove/replace child RS. So always copies
+ * more specific part of configurations of child RS to that of parent RS.
+ */
protected boolean merge(ReduceSinkOperator cRS, ReduceSinkOperator pRS, int minReducer)
throws SemanticException {
int[] result = checkStatus(cRS, pRS, minReducer);
@@ -379,7 +396,15 @@ public class ReduceSinkDeDuplication imp
return true;
}
- // -1 for p to c, 1 for c to p
+ /**
+ * Returns merge directions between two RSs for criterias (ordering, number of reducers,
+ * reducer keys, partition keys). Returns null if any of categories is not mergeable.
+ *
+ * Values for each index can be -1, 0, 1
+ * 1. 0 means two configuration in the category is the same
+ * 2. for -1, configuration of parent RS is more specific than child RS
+ * 3. for 1, configuration of child RS is more specific than parent RS
+ */
private int[] checkStatus(ReduceSinkOperator cRS, ReduceSinkOperator pRS, int minReducer)
throws SemanticException {
ReduceSinkDesc cConf = cRS.getConf();
@@ -408,6 +433,11 @@ public class ReduceSinkDeDuplication imp
return new int[] {moveKeyColTo, movePartitionColTo, moveRSOrderTo, moveReducerNumTo};
}
+ /**
+ * Overlapping part of keys should be the same between parent and child.
+ * And if child has more keys than parent, non-overlapping part of keys
+ * should be backtrackable to parent.
+ */
private Integer checkExprs(List<ExprNodeDesc> ckeys, List<ExprNodeDesc> pkeys,
ReduceSinkOperator cRS, ReduceSinkOperator pRS) throws SemanticException {
Integer moveKeyColTo = 0;
@@ -419,6 +449,7 @@ public class ReduceSinkDeDuplication imp
if (pkeys == null || pkeys.isEmpty()) {
for (ExprNodeDesc ckey : ckeys) {
if (ExprNodeDescUtils.backtrack(ckey, cRS, pRS) == null) {
+ // cKey is not present in parent
return null;
}
}
@@ -430,6 +461,7 @@ public class ReduceSinkDeDuplication imp
return moveKeyColTo;
}
+ // backtrack key exprs of child to parent and compare it with parent's
protected Integer sameKeys(List<ExprNodeDesc> cexprs, List<ExprNodeDesc> pexprs,
Operator<?> child, Operator<?> parent) throws SemanticException {
int common = Math.min(cexprs.size(), pexprs.size());
@@ -438,13 +470,14 @@ public class ReduceSinkDeDuplication imp
for (; i < common; i++) {
ExprNodeDesc pexpr = pexprs.get(i);
ExprNodeDesc cexpr = ExprNodeDescUtils.backtrack(cexprs.get(i), child, parent);
- if (!pexpr.isSame(cexpr)) {
+ if (cexpr == null || !pexpr.isSame(cexpr)) {
return null;
}
}
for (;i < limit; i++) {
if (cexprs.size() > pexprs.size()) {
if (ExprNodeDescUtils.backtrack(cexprs.get(i), child, parent) == null) {
+ // cKey is not present in parent
return null;
}
}
@@ -452,6 +485,7 @@ public class ReduceSinkDeDuplication imp
return Integer.valueOf(cexprs.size()).compareTo(pexprs.size());
}
+ // order of overlapping keys should be exactly the same
protected Integer checkOrder(String corder, String porder) {
if (corder == null || corder.trim().equals("")) {
if (porder == null || porder.trim().equals("")) {
@@ -471,6 +505,11 @@ public class ReduceSinkDeDuplication imp
return Integer.valueOf(corder.length()).compareTo(porder.length());
}
+ /**
+ * If number of reducers for RS is -1, the RS can have any number of reducers.
+ * It's generally true except for order-by or forced bucketing cases.
+ * if both of num-reducers are not -1, those number should be the same.
+ */
protected Integer checkNumReducer(int creduce, int preduce) {
if (creduce < 0) {
if (preduce < 0) {
@@ -549,6 +588,8 @@ public class ReduceSinkDeDuplication imp
return select;
}
+ // replace the cRS to SEL operator
+ // If child if cRS is EXT, EXT also should be removed
private SelectOperator replaceOperatorWithSelect(Operator<?> operator, ParseContext context)
throws SemanticException {
RowResolver inputRR = context.getOpParseCtx().get(operator).getRowResolver();
@@ -585,6 +626,8 @@ public class ReduceSinkDeDuplication imp
Operator<?> parent = getSingleParent(cRS);
if (parent instanceof GroupByOperator) {
+ // pRS-cGBYm-cRS-cGBYr (map aggregation) --> pRS-cGBYr(COMPLETE)
+ // copies desc of cGBYm to cGBYr and remove cGBYm and cRS
GroupByOperator cGBYm = (GroupByOperator) parent;
cGBYr.getConf().setKeys(cGBYm.getConf().getKeys());
@@ -597,6 +640,8 @@ public class ReduceSinkDeDuplication imp
RowResolver resolver = context.getOpParseCtx().get(cGBYm).getRowResolver();
context.getOpParseCtx().get(cGBYr).setRowResolver(resolver);
} else {
+ // pRS-cRS-cGBYr (no map aggregation) --> pRS-cGBYr(COMPLETE)
+ // revert expressions of cGBYr to that of cRS
cGBYr.getConf().setKeys(ExprNodeDescUtils.backtrack(cGBYr.getConf().getKeys(), cGBYr, cRS));
for (AggregationDesc aggr : cGBYr.getConf().getAggregators()) {
aggr.setParameters(ExprNodeDescUtils.backtrack(aggr.getParameters(), cGBYr, cRS));
@@ -655,7 +700,7 @@ public class ReduceSinkDeDuplication imp
}
}
- static class GroupbyReducerProc extends AbsctractReducerReducerProc {
+ static class GroupbyReducerProc extends AbstractReducerReducerProc {
// pRS-pGBY-cRS
public Object process(ReduceSinkOperator cRS, ParseContext context)
@@ -689,7 +734,7 @@ public class ReduceSinkDeDuplication imp
}
}
- static class JoinReducerProc extends AbsctractReducerReducerProc {
+ static class JoinReducerProc extends AbstractReducerReducerProc {
// pRS-pJOIN-cRS
public Object process(ReduceSinkOperator cRS, ParseContext context)
@@ -717,7 +762,7 @@ public class ReduceSinkDeDuplication imp
}
}
- static class ReducerReducerProc extends AbsctractReducerReducerProc {
+ static class ReducerReducerProc extends AbstractReducerReducerProc {
// pRS-cRS
public Object process(ReduceSinkOperator cRS, ParseContext context)
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java Wed Jun 5 10:36:34 2013
@@ -36,6 +36,7 @@ import org.apache.hadoop.hive.ql.exec.Fi
import org.apache.hadoop.hive.ql.exec.JoinOperator;
import org.apache.hadoop.hive.ql.exec.MapRedTask;
import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -364,6 +365,17 @@ public class CommonJoinTaskDispatcher ex
return;
}
+ // remove the unnecessary TableScan
+ if (childAliasOp instanceof TableScanOperator) {
+ TableScanOperator tso = (TableScanOperator)childAliasOp;
+ if (tso.getNumChild() != 1) {
+ // shouldn't happen
+ return;
+ }
+ childAliasOp = tso.getChildOperators().get(0);
+ childAliasOp.getParentOperators().remove(tso);
+ }
+
// Merge the 2 trees - remove the FileSinkOperator from the first tree pass it to the
// top of the second
Operator<? extends Serializable> parentFOp = mapJoinTaskFileSinkOperator
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java Wed Jun 5 10:36:34 2013
@@ -118,17 +118,10 @@ public class SortMergeJoinTaskDispatcher
PartitionDesc partitionInfo = currWork.getAliasToPartnInfo().get(alias);
if (fetchWork.getTblDir() != null) {
- ArrayList<String> aliases = new ArrayList<String>();
- aliases.add(alias);
- currWork.getPathToAliases().put(fetchWork.getTblDir(), aliases);
- currWork.getPathToPartitionInfo().put(fetchWork.getTblDir(), partitionInfo);
- }
- else {
+ currWork.mergeAliasedInput(alias, fetchWork.getTblDir(), partitionInfo);
+ } else {
for (String pathDir : fetchWork.getPartDir()) {
- ArrayList<String> aliases = new ArrayList<String>();
- aliases.add(alias);
- currWork.getPathToAliases().put(pathDir, aliases);
- currWork.getPathToPartitionInfo().put(pathDir, partitionInfo);
+ currWork.mergeAliasedInput(alias, pathDir, partitionInfo);
}
}
}
Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1489800&r1=1489799&r2=1489800&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Wed Jun 5 10:36:34 2013
@@ -9637,15 +9637,6 @@ public class SemanticAnalyzer extends Ba
ctx.setOriginalTracker(ShimLoader.getHadoopShims().getJobLauncherRpcAddress(conf));
ShimLoader.getHadoopShims().setJobLauncherRpcAddress(conf, "local");
console.printInfo("Automatically selecting local only mode for query");
-
- // If all the tasks can be run locally, we can use local disk for
- // storing intermediate data.
-
- /**
- * This code is commented out pending further testing/development
- * for (Task<? extends OperatorDesc> t: rootTasks)
- * t.localizeMRTmpFiles(ctx);
- */
}
}