You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jv...@apache.org on 2010/08/24 17:47:26 UTC
svn commit: r988603 [2/2] - in /hadoop/hive/trunk: ./
cli/src/java/org/apache/hadoop/hive/cli/
common/src/java/org/apache/hadoop/hive/conf/ conf/ data/conf/
hbase-handler/lib/ hbase-handler/src/test/templates/
hwi/src/test/org/apache/hadoop/hive/hwi/ j...
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java?rev=988603&r1=988602&r2=988603&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java Tue Aug 24 15:47:24 2010
@@ -38,7 +38,10 @@ public class DDLWork implements Serializ
private DropTableDesc dropTblDesc;
private AlterTableDesc alterTblDesc;
private ShowTablesDesc showTblsDesc;
+ private LockTableDesc lockTblDesc;
+ private UnlockTableDesc unlockTblDesc;
private ShowFunctionsDesc showFuncsDesc;
+ private ShowLocksDesc showLocksDesc;
private DescFunctionDesc descFunctionDesc;
private ShowPartitionsDesc showPartsDesc;
private DescTableDesc descTblDesc;
@@ -143,6 +146,26 @@ public class DDLWork implements Serializ
}
/**
+ * @param lockTblDesc
+ */
+ public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
+ LockTableDesc lockTblDesc) {
+ this(inputs, outputs);
+
+ this.lockTblDesc = lockTblDesc;
+ }
+
+ /**
+ * @param unlockTblDesc
+ */
+ public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
+ UnlockTableDesc unlockTblDesc) {
+ this(inputs, outputs);
+
+ this.unlockTblDesc = unlockTblDesc;
+ }
+
+ /**
* @param showFuncsDesc
*/
public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
@@ -153,6 +176,16 @@ public class DDLWork implements Serializ
}
/**
+ * @param showLocksDesc
+ */
+ public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
+ ShowLocksDesc showLocksDesc) {
+ this(inputs, outputs);
+
+ this.showLocksDesc = showLocksDesc;
+ }
+
+ /**
* @param descFuncDesc
*/
public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
@@ -331,6 +364,30 @@ public class DDLWork implements Serializ
}
/**
+ * @return the showLocksDesc
+ */
+ @Explain(displayName = "Show Lock Operator")
+ public ShowLocksDesc getShowLocksDesc() {
+ return showLocksDesc;
+ }
+
+ /**
+ * @return the lockTblDesc
+ */
+ @Explain(displayName = "Lock Table Operator")
+ public LockTableDesc getLockTblDesc() {
+ return lockTblDesc;
+ }
+
+ /**
+ * @return the unlockTblDesc
+ */
+ @Explain(displayName = "Unlock Table Operator")
+ public UnlockTableDesc getUnlockTblDesc() {
+ return unlockTblDesc;
+ }
+
+ /**
* @return the descFuncDesc
*/
@Explain(displayName = "Show Function Operator")
@@ -347,6 +404,30 @@ public class DDLWork implements Serializ
}
/**
+ * @param showLocksDesc
+ * the showLocksDesc to set
+ */
+ public void setShowLocksDesc(ShowLocksDesc showLocksDesc) {
+ this.showLocksDesc = showLocksDesc;
+ }
+
+ /**
+ * @param lockTblDesc
+ * the lockTblDesc to set
+ */
+ public void setLockTblDesc(LockTableDesc lockTblDesc) {
+ this.lockTblDesc = lockTblDesc;
+ }
+
+ /**
+ * @param unlockTblDesc
+ * the unlockTblDesc to set
+ */
+ public void setUnlockTblDesc(UnlockTableDesc unlockTblDesc) {
+ this.unlockTblDesc = unlockTblDesc;
+ }
+
+ /**
* @param descFuncDesc
* the showFuncsDesc to set
*/
Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LockTableDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LockTableDesc.java?rev=988603&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LockTableDesc.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LockTableDesc.java Tue Aug 24 15:47:24 2010
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.Serializable;
+import java.util.Map;
+
+import org.apache.hadoop.fs.Path;
+
+/**
+ * LockTableDesc.
+ *
+ */
+@Explain(displayName = "Lock Table")
+public class LockTableDesc extends DDLDesc implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ private String tableName;
+ private String mode;
+ private Map<String, String> partSpec;
+
+ public LockTableDesc() {
+ }
+
+ public LockTableDesc(String tableName, String mode, Map<String, String> partSpec) {
+ this.tableName = tableName;
+ this.mode = mode;
+ this.partSpec = partSpec;
+ }
+
+ public String getTableName() {
+ return tableName;
+ }
+
+ public void setTableName(String tableName) {
+ this.tableName = tableName;
+ }
+
+ public void setMode(String mode) {
+ this.mode = mode;
+ }
+
+ public String getMode() {
+ return mode;
+ }
+
+ public Map<String, String> getPartSpec() {
+ return partSpec;
+ }
+
+ public void setPartSpec(Map<String, String> partSpec) {
+ this.partSpec = partSpec;
+ }
+}
Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java?rev=988603&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java Tue Aug 24 15:47:24 2010
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.fs.Path;
+
+/**
+ * ShowLocksDesc.
+ *
+ */
+@Explain(displayName = "Show Locks")
+public class ShowLocksDesc extends DDLDesc implements Serializable {
+ private static final long serialVersionUID = 1L;
+ String resFile;
+
+ /**
+ * table name for the result of show locks.
+ */
+ private static final String table = "showlocks";
+ /**
+ * thrift ddl for the result of show locks.
+ */
+ private static final String schema = "tab_name,mode#string:string";
+
+ public String getTable() {
+ return table;
+ }
+
+ public String getSchema() {
+ return schema;
+ }
+
+ public ShowLocksDesc() {
+ }
+
+ /**
+ * @param resFile
+ */
+ public ShowLocksDesc(Path resFile) {
+ this.resFile = resFile.toString();
+ }
+
+ /**
+ * @return the resFile
+ */
+ @Explain(displayName = "result file", normalExplain = false)
+ public String getResFile() {
+ return resFile;
+ }
+
+ /**
+ * @param resFile
+ * the resFile to set
+ */
+ public void setResFile(String resFile) {
+ this.resFile = resFile;
+ }
+}
Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnlockTableDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnlockTableDesc.java?rev=988603&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnlockTableDesc.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnlockTableDesc.java Tue Aug 24 15:47:24 2010
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.Serializable;
+import java.util.Map;
+
+import org.apache.hadoop.fs.Path;
+
+/**
+ * UnlockTableDesc.
+ *
+ */
+@Explain(displayName = "Unlock Table")
+public class UnlockTableDesc extends DDLDesc implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ private String tableName;
+ private Map<String, String> partSpec;
+
+ public UnlockTableDesc() {
+ }
+
+ public UnlockTableDesc(String tableName, Map<String, String> partSpec) {
+ this.tableName = tableName;
+ this.partSpec = partSpec;
+ }
+
+ public String getTableName() {
+ return tableName;
+ }
+
+ public void setTableName(String tableName) {
+ this.tableName = tableName;
+ }
+
+ public Map<String, String> getPartSpec() {
+ return partSpec;
+ }
+
+ public void setPartSpec(Map<String, String> partSpec) {
+ this.partSpec = partSpec;
+ }
+}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java?rev=988603&r1=988602&r2=988603&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java Tue Aug 24 15:47:24 2010
@@ -19,9 +19,12 @@
package org.apache.hadoop.hive.ql.processors;
import static org.apache.commons.lang.StringUtils.isBlank;
+import java.util.Map;
+import java.util.HashMap;
import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.conf.HiveConf;
/**
* CommandProcessorFactory.
@@ -33,7 +36,12 @@ public final class CommandProcessorFacto
// prevent instantiation
}
+ static Map<HiveConf, Driver> mapDrivers = new HashMap<HiveConf, Driver>();
public static CommandProcessor get(String cmd) {
+ return get(cmd, null);
+ }
+
+ public static CommandProcessor get(String cmd, HiveConf conf) {
String cmdl = cmd.toLowerCase();
if ("set".equals(cmdl)) {
@@ -46,9 +54,28 @@ public final class CommandProcessorFacto
} else if ("delete".equals(cmdl)) {
return new DeleteResourceProcessor();
} else if (!isBlank(cmd)) {
- return new Driver();
+ if (conf == null) {
+ return new Driver();
+ }
+
+ Driver drv = mapDrivers.get(conf);
+ if (drv == null) {
+ drv = new Driver();
+ mapDrivers.put(conf, drv);
+ }
+ drv.init();
+ return drv;
}
+
return null;
}
+ public static void clean(HiveConf conf) {
+ Driver drv = mapDrivers.get(conf);
+ if (drv != null) {
+ drv.destroy();
+ }
+
+ mapDrivers.remove(conf);
+ }
}
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=988603&r1=988602&r2=988603&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Tue Aug 24 15:47:24 2010
@@ -38,6 +38,7 @@ import java.util.Set;
import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+import junit.framework.Test;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -69,6 +70,9 @@ import org.apache.hadoop.mapred.Sequence
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.thrift.protocol.TBinaryProtocol;
+import org.apache.hadoop.hbase.MiniZooKeeperCluster;
+import org.apache.zookeeper.ZooKeeper;
+import org.apache.hadoop.hive.ql.lockmgr.zookeeper.ZooKeeperHiveLockManager;
/**
* QTestUtil.
@@ -103,6 +107,7 @@ public class QTestUtil {
private HadoopShims.MiniDFSShim dfs = null;
private boolean miniMr = false;
private String hadoopVer = null;
+ private QTestSetup setup = null;
public boolean deleteDirectory(File path) {
if (path.exists()) {
@@ -198,10 +203,10 @@ public class QTestUtil {
.concat("/build/ql/test/data/warehouse/"));
conf.set("mapred.job.tracker", "localhost:" + mr.getJobTrackerPort());
}
-
}
- public QTestUtil(String outDir, String logDir, boolean miniMr, String hadoopVer) throws Exception {
+ public QTestUtil(String outDir, String logDir, boolean miniMr, String hadoopVer)
+ throws Exception {
this.outDir = outDir;
this.logDir = logDir;
conf = new HiveConf(Driver.class);
@@ -227,6 +232,8 @@ public class QTestUtil {
overWrite = true;
}
+ setup = new QTestSetup();
+ setup.preTest(conf);
init();
}
@@ -301,6 +308,13 @@ public class QTestUtil {
/**
* Clear out any side effects of running tests
*/
+ public void clearPostTestEffects () throws Exception {
+ setup.postTest(conf);
+ }
+
+ /**
+ * Clear out any side effects of running tests
+ */
public void clearTestSideEffects () throws Exception {
// delete any tables other than the source tables
for (String s: db.getAllTables()) {
@@ -312,9 +326,9 @@ public class QTestUtil {
// modify conf by using 'set' commands
conf = new HiveConf (Driver.class);
initConf();
+ setup.preTest(conf);
}
-
public void cleanUp() throws Exception {
String warehousePath = ((new URI(testWarehouse)).getPath());
// Drop any tables that remain due to unsuccessful runs
@@ -329,6 +343,7 @@ public class QTestUtil {
}
FunctionRegistry.unregisterTemporaryUDF("test_udaf");
FunctionRegistry.unregisterTemporaryUDF("test_error");
+ setup.tearDown();
}
private void runLoadCmd(String loadCmd) throws Exception {
@@ -916,6 +931,59 @@ public class QTestUtil {
}
/**
+ * QTestSetup defines test fixtures which are reused across testcases,
+ * and are needed before any test can be run
+ */
+ public static class QTestSetup
+ {
+ private MiniZooKeeperCluster zooKeeperCluster = null;
+ private int zkPort;
+ private ZooKeeper zooKeeper;
+
+ public QTestSetup() {
+ }
+
+ public void preTest(HiveConf conf) throws Exception {
+
+ if (zooKeeperCluster == null) {
+ String tmpdir = System.getProperty("user.dir")+"/../build/ql/tmp";
+ zooKeeperCluster = new MiniZooKeeperCluster();
+ zkPort = zooKeeperCluster.startup(new File(tmpdir, "zookeeper"));
+ }
+
+ if (zooKeeper != null) {
+ zooKeeper.close();
+ }
+
+ int sessionTimeout = conf.getIntVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_SESSION_TIMEOUT);
+ zooKeeper = new ZooKeeper("localhost:" + zkPort, sessionTimeout, null);
+
+ String zkServer = "localhost";
+ conf.set("hive.zookeeper.quorum", zkServer);
+ conf.set("hive.zookeeper.client.port", "" + zkPort);
+ }
+
+ public void postTest(HiveConf conf) throws Exception {
+ if (zooKeeperCluster == null) {
+ return;
+ }
+
+ if (zooKeeper != null) {
+ zooKeeper.close();
+ }
+
+ ZooKeeperHiveLockManager.releaseAllLocks(conf);
+ }
+
+ public void tearDown() throws Exception {
+ if (zooKeeperCluster != null) {
+ zooKeeperCluster.shutdown();
+ zooKeeperCluster = null;
+ }
+ }
+ }
+
+ /**
* QTRunner: Runnable class for running a a single query file.
*
**/
@@ -962,17 +1030,18 @@ public class QTestUtil {
* (in terms of destination tables)
*/
public static boolean queryListRunner(File[] qfiles, String[] resDirs,
- String[] logDirs, boolean mt) {
+ String[] logDirs, boolean mt, Test test) {
assert (qfiles.length == resDirs.length);
assert (qfiles.length == logDirs.length);
boolean failed = false;
-
try {
QTestUtil[] qt = new QTestUtil[qfiles.length];
+ QTestSetup[] qsetup = new QTestSetup[qfiles.length];
for (int i = 0; i < qfiles.length; i++) {
- qt[i] = new QTestUtil(resDirs[i], logDirs[i]);
+ qt[i] = new QTestUtil(resDirs[i], logDirs[i], false, "0.20");
qt[i].addFile(qfiles[i]);
+ qt[i].clearTestSideEffects();
}
if (mt) {
@@ -980,6 +1049,7 @@ public class QTestUtil {
qt[0].cleanUp();
qt[0].createSources();
+ qt[0].clearTestSideEffects();
QTRunner[] qtRunners = new QTestUtil.QTRunner[qfiles.length];
Thread[] qtThread = new Thread[qfiles.length];
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/TestMTQueries.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/TestMTQueries.java?rev=988603&r1=988602&r2=988603&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/TestMTQueries.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/TestMTQueries.java Tue Aug 24 15:47:24 2010
@@ -46,7 +46,7 @@ public class TestMTQueries extends TestC
qfiles[i] = new File(inpDir, testNames[i]);
}
- boolean success = QTestUtil.queryListRunner(qfiles, resDirs, logDirs, true);
+ boolean success = QTestUtil.queryListRunner(qfiles, resDirs, logDirs, true, this);
if (!success) {
fail("One or more queries failed");
}
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java?rev=988603&r1=988602&r2=988603&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java Tue Aug 24 15:47:24 2010
@@ -39,6 +39,7 @@ import org.apache.hadoop.hive.ql.metadat
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.tools.LineageInfo;
import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hadoop.hive.ql.QTestUtil.QTestSetup;
/**
* TestHiveHistory.
@@ -53,6 +54,7 @@ public class TestHiveHistory extends Tes
private static Path tmppath = new Path(tmpdir);
private static Hive db;
private static FileSystem fs;
+ private QTestSetup setup;
/*
* intialize the tables
@@ -75,6 +77,9 @@ public class TestHiveHistory extends Tes
}
}
+ setup = new QTestSetup();
+ setup.preTest(conf);
+
// copy the test files into hadoop if required.
int i = 0;
Path[] hadoopDataFile = new Path[2];
@@ -109,6 +114,19 @@ public class TestHiveHistory extends Tes
}
}
+ @Override
+ protected void tearDown() {
+ try {
+ setup.tearDown();
+ }
+ catch (Exception e) {
+ System.out.println("Exception: " + e.getMessage());
+ e.printStackTrace();
+ System.out.flush();
+ fail("Unexpected exception in tearDown");
+ }
+ }
+
/**
* Check history file output for this query.
*/
@@ -133,7 +151,7 @@ public class TestHiveHistory extends Tes
SessionState.start(ss);
String cmd = "select a.key from src a";
- Driver d = new Driver();
+ Driver d = new Driver(conf);
int ret = d.run(cmd).getResponseCode();
if (ret != 0) {
fail("Failed");
Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/lockneg1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/lockneg1.q?rev=988603&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/lockneg1.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/lockneg1.q Tue Aug 24 15:47:24 2010
@@ -0,0 +1,7 @@
+drop table tstsrc;
+create table tstsrc like src;
+insert overwrite table tstsrc select key, value from src;
+
+LOCK TABLE tstsrc SHARED;
+LOCK TABLE tstsrc SHARED;
+LOCK TABLE tstsrc EXCLUSIVE;
Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/lockneg2.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/lockneg2.q?rev=988603&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/lockneg2.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/lockneg2.q Tue Aug 24 15:47:24 2010
@@ -0,0 +1,5 @@
+drop table tstsrc;
+create table tstsrc like src;
+insert overwrite table tstsrc select key, value from src;
+
+UNLOCK TABLE tstsrc;
Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/lockneg3.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/lockneg3.q?rev=988603&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/lockneg3.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/lockneg3.q Tue Aug 24 15:47:24 2010
@@ -0,0 +1,7 @@
+drop table tstsrcpart;
+create table tstsrcpart like srcpart;
+
+insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='11')
+select key, value from srcpart where ds='2008-04-08' and hr='11';
+
+UNLOCK TABLE tstsrcpart PARTITION(ds='2008-04-08', hr='11');
Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/lock1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/lock1.q?rev=988603&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/lock1.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/lock1.q Tue Aug 24 15:47:24 2010
@@ -0,0 +1,18 @@
+drop table tstsrc;
+create table tstsrc like src;
+insert overwrite table tstsrc select key, value from src;
+
+SHOW LOCKS;
+
+LOCK TABLE tstsrc shared;
+SHOW LOCKS;
+UNLOCK TABLE tstsrc;
+SHOW LOCKS;
+lock TABLE tstsrc SHARED;
+SHOW LOCKS;
+LOCK TABLE tstsrc SHARED;
+SHOW LOCKS;
+UNLOCK TABLE tstsrc;
+SHOW LOCKS;
+
+drop table tstsrc;
Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/lock2.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/lock2.q?rev=988603&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/lock2.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/lock2.q Tue Aug 24 15:47:24 2010
@@ -0,0 +1,24 @@
+drop table tstsrc;
+create table tstsrc like src;
+insert overwrite table tstsrc select key, value from src;
+
+drop table tstsrcpart;
+create table tstsrcpart like srcpart;
+
+insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='11')
+select key, value from srcpart where ds='2008-04-08' and hr='11';
+
+LOCK TABLE tstsrc SHARED;
+LOCK TABLE tstsrcpart SHARED;
+LOCK TABLE tstsrcpart PARTITION(ds='2008-04-08', hr='11') EXCLUSIVE;
+SHOW LOCKS;
+UNLOCK TABLE tstsrc;
+SHOW LOCKS;
+UNLOCK TABLE tstsrcpart;
+SHOW LOCKS;
+UNLOCK TABLE tstsrcpart PARTITION(ds='2008-04-08', hr='11');
+SHOW LOCKS;
+
+
+drop table tstsrc;
+drop table tstsrcpart;
Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/lockneg1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/lockneg1.q.out?rev=988603&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/lockneg1.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/lockneg1.q.out Tue Aug 24 15:47:24 2010
@@ -0,0 +1,35 @@
+PREHOOK: query: drop table tstsrc
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tstsrc
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table tstsrc like src
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tstsrc like src
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tstsrc
+PREHOOK: query: insert overwrite table tstsrc select key, value from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@tstsrc
+POSTHOOK: query: insert overwrite table tstsrc select key, value from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@tstsrc
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: LOCK TABLE tstsrc SHARED
+PREHOOK: type: LOCKTABLE
+POSTHOOK: query: LOCK TABLE tstsrc SHARED
+POSTHOOK: type: LOCKTABLE
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: LOCK TABLE tstsrc SHARED
+PREHOOK: type: LOCKTABLE
+POSTHOOK: query: LOCK TABLE tstsrc SHARED
+POSTHOOK: type: LOCKTABLE
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: LOCK TABLE tstsrc EXCLUSIVE
+PREHOOK: type: LOCKTABLE
+conflicting lock present
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask
Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/lockneg2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/lockneg2.q.out?rev=988603&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/lockneg2.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/lockneg2.q.out Tue Aug 24 15:47:24 2010
@@ -0,0 +1,23 @@
+PREHOOK: query: drop table tstsrc
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tstsrc
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table tstsrc like src
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tstsrc like src
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tstsrc
+PREHOOK: query: insert overwrite table tstsrc select key, value from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@tstsrc
+POSTHOOK: query: insert overwrite table tstsrc select key, value from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@tstsrc
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: UNLOCK TABLE tstsrc
+PREHOOK: type: UNLOCKTABLE
+FAILED: Error in metadata: Table tstsrc is not locked
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask
Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/lockneg3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/lockneg3.q.out?rev=988603&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/lockneg3.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/lockneg3.q.out Tue Aug 24 15:47:24 2010
@@ -0,0 +1,25 @@
+PREHOOK: query: drop table tstsrcpart
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tstsrcpart
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table tstsrcpart like srcpart
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tstsrcpart like srcpart
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tstsrcpart
+PREHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='11')
+select key, value from srcpart where ds='2008-04-08' and hr='11'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
+POSTHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='11')
+select key, value from srcpart where ds='2008-04-08' and hr='11'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: UNLOCK TABLE tstsrcpart PARTITION(ds='2008-04-08', hr='11')
+PREHOOK: type: UNLOCKTABLE
+FAILED: Error in metadata: Table tstsrcpart is not locked
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask
Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/lock1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/lock1.q.out?rev=988603&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/lock1.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/lock1.q.out Tue Aug 24 15:47:24 2010
@@ -0,0 +1,99 @@
+PREHOOK: query: drop table tstsrc
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tstsrc
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table tstsrc like src
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tstsrc like src
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tstsrc
+PREHOOK: query: insert overwrite table tstsrc select key, value from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@tstsrc
+POSTHOOK: query: insert overwrite table tstsrc select key, value from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@tstsrc
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SHOW LOCKS
+PREHOOK: type: SHOWLOCKS
+POSTHOOK: query: SHOW LOCKS
+POSTHOOK: type: SHOWLOCKS
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: LOCK TABLE tstsrc shared
+PREHOOK: type: LOCKTABLE
+POSTHOOK: query: LOCK TABLE tstsrc shared
+POSTHOOK: type: LOCKTABLE
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SHOW LOCKS
+PREHOOK: type: SHOWLOCKS
+POSTHOOK: query: SHOW LOCKS
+POSTHOOK: type: SHOWLOCKS
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+default@tstsrc SHARED
+PREHOOK: query: UNLOCK TABLE tstsrc
+PREHOOK: type: UNLOCKTABLE
+POSTHOOK: query: UNLOCK TABLE tstsrc
+POSTHOOK: type: UNLOCKTABLE
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SHOW LOCKS
+PREHOOK: type: SHOWLOCKS
+POSTHOOK: query: SHOW LOCKS
+POSTHOOK: type: SHOWLOCKS
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: lock TABLE tstsrc SHARED
+PREHOOK: type: LOCKTABLE
+POSTHOOK: query: lock TABLE tstsrc SHARED
+POSTHOOK: type: LOCKTABLE
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SHOW LOCKS
+PREHOOK: type: SHOWLOCKS
+POSTHOOK: query: SHOW LOCKS
+POSTHOOK: type: SHOWLOCKS
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+default@tstsrc SHARED
+PREHOOK: query: LOCK TABLE tstsrc SHARED
+PREHOOK: type: LOCKTABLE
+POSTHOOK: query: LOCK TABLE tstsrc SHARED
+POSTHOOK: type: LOCKTABLE
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SHOW LOCKS
+PREHOOK: type: SHOWLOCKS
+POSTHOOK: query: SHOW LOCKS
+POSTHOOK: type: SHOWLOCKS
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+default@tstsrc SHARED
+default@tstsrc SHARED
+PREHOOK: query: UNLOCK TABLE tstsrc
+PREHOOK: type: UNLOCKTABLE
+POSTHOOK: query: UNLOCK TABLE tstsrc
+POSTHOOK: type: UNLOCKTABLE
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SHOW LOCKS
+PREHOOK: type: SHOWLOCKS
+POSTHOOK: query: SHOW LOCKS
+POSTHOOK: type: SHOWLOCKS
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: drop table tstsrc
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tstsrc
+PREHOOK: Output: default@tstsrc
+POSTHOOK: query: drop table tstsrc
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tstsrc
+POSTHOOK: Output: default@tstsrc
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/lock2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/lock2.q.out?rev=988603&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/lock2.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/lock2.q.out Tue Aug 24 15:47:24 2010
@@ -0,0 +1,156 @@
+PREHOOK: query: drop table tstsrc
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tstsrc
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table tstsrc like src
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tstsrc like src
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tstsrc
+PREHOOK: query: insert overwrite table tstsrc select key, value from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@tstsrc
+POSTHOOK: query: insert overwrite table tstsrc select key, value from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@tstsrc
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: drop table tstsrcpart
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tstsrcpart
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: create table tstsrcpart like srcpart
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tstsrcpart like srcpart
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tstsrcpart
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='11')
+select key, value from srcpart where ds='2008-04-08' and hr='11'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
+POSTHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='11')
+select key, value from srcpart where ds='2008-04-08' and hr='11'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: LOCK TABLE tstsrc SHARED
+PREHOOK: type: LOCKTABLE
+POSTHOOK: query: LOCK TABLE tstsrc SHARED
+POSTHOOK: type: LOCKTABLE
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: LOCK TABLE tstsrcpart SHARED
+PREHOOK: type: LOCKTABLE
+POSTHOOK: query: LOCK TABLE tstsrcpart SHARED
+POSTHOOK: type: LOCKTABLE
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: LOCK TABLE tstsrcpart PARTITION(ds='2008-04-08', hr='11') EXCLUSIVE
+PREHOOK: type: LOCKTABLE
+POSTHOOK: query: LOCK TABLE tstsrcpart PARTITION(ds='2008-04-08', hr='11') EXCLUSIVE
+POSTHOOK: type: LOCKTABLE
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SHOW LOCKS
+PREHOOK: type: SHOWLOCKS
+POSTHOOK: query: SHOW LOCKS
+POSTHOOK: type: SHOWLOCKS
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+default@tstsrc SHARED
+default@tstsrcpart SHARED
+default@tstsrcpart@ds=2008-04-08/hr=11 EXCLUSIVE
+PREHOOK: query: UNLOCK TABLE tstsrc
+PREHOOK: type: UNLOCKTABLE
+POSTHOOK: query: UNLOCK TABLE tstsrc
+POSTHOOK: type: UNLOCKTABLE
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SHOW LOCKS
+PREHOOK: type: SHOWLOCKS
+POSTHOOK: query: SHOW LOCKS
+POSTHOOK: type: SHOWLOCKS
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+default@tstsrcpart SHARED
+default@tstsrcpart@ds=2008-04-08/hr=11 EXCLUSIVE
+PREHOOK: query: UNLOCK TABLE tstsrcpart
+PREHOOK: type: UNLOCKTABLE
+POSTHOOK: query: UNLOCK TABLE tstsrcpart
+POSTHOOK: type: UNLOCKTABLE
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SHOW LOCKS
+PREHOOK: type: SHOWLOCKS
+POSTHOOK: query: SHOW LOCKS
+POSTHOOK: type: SHOWLOCKS
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+default@tstsrcpart@ds=2008-04-08/hr=11 EXCLUSIVE
+PREHOOK: query: UNLOCK TABLE tstsrcpart PARTITION(ds='2008-04-08', hr='11')
+PREHOOK: type: UNLOCKTABLE
+POSTHOOK: query: UNLOCK TABLE tstsrcpart PARTITION(ds='2008-04-08', hr='11')
+POSTHOOK: type: UNLOCKTABLE
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SHOW LOCKS
+PREHOOK: type: SHOWLOCKS
+POSTHOOK: query: SHOW LOCKS
+POSTHOOK: type: SHOWLOCKS
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: drop table tstsrc
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tstsrc
+PREHOOK: Output: default@tstsrc
+POSTHOOK: query: drop table tstsrc
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tstsrc
+POSTHOOK: Output: default@tstsrc
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: drop table tstsrcpart
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tstsrcpart
+PREHOOK: Output: default@tstsrcpart
+POSTHOOK: query: drop table tstsrcpart
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tstsrcpart
+POSTHOOK: Output: default@tstsrcpart
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
Modified: hadoop/hive/trunk/ql/src/test/templates/TestCliDriver.vm
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/templates/TestCliDriver.vm?rev=988603&r1=988602&r2=988603&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/templates/TestCliDriver.vm (original)
+++ hadoop/hive/trunk/ql/src/test/templates/TestCliDriver.vm Tue Aug 24 15:47:24 2010
@@ -71,6 +71,7 @@ public class $className extends TestCase
@Override
protected void tearDown() {
try {
+ qt.clearPostTestEffects();
if (getName().equals("testCliDriver_shutdown"))
qt.shutdown();
}
@@ -110,7 +111,7 @@ public class $className extends TestCase
if (qt.shouldBeSkipped("$fname")) {
return;
}
-
+
qt.cliInit("$fname", false);
int ecode = qt.executeClient("$fname");
if (ecode != 0) {
Modified: hadoop/hive/trunk/ql/src/test/templates/TestNegativeCliDriver.vm
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/templates/TestNegativeCliDriver.vm?rev=988603&r1=988602&r2=988603&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/templates/TestNegativeCliDriver.vm (original)
+++ hadoop/hive/trunk/ql/src/test/templates/TestNegativeCliDriver.vm Tue Aug 24 15:47:24 2010
@@ -16,13 +16,13 @@ import org.antlr.runtime.tree.*;
public class $className extends TestCase {
private static QTestUtil qt;
+
static {
try {
- qt = new QTestUtil("$resultsDir.getCanonicalPath()", "$logDir.getCanonicalPath()");
+ qt = new QTestUtil("$resultsDir.getCanonicalPath()", "$logDir.getCanonicalPath()", false, "0.20");
// do a one time initialization
qt.cleanUp();
qt.createSources();
-
} catch (Exception e) {
System.out.println("Exception: " + e.getMessage());
e.printStackTrace();
@@ -47,6 +47,21 @@ public class $className extends TestCase
}
}
+ @Override
+ protected void tearDown() {
+ try {
+ qt.clearPostTestEffects();
+ if (getName().equals("testNegativeCliDriver_shutdown"))
+ qt.shutdown();
+ }
+ catch (Exception e) {
+ System.out.println("Exception: " + e.getMessage());
+ e.printStackTrace();
+ System.out.flush();
+ fail("Unexpected exception in tearDown");
+ }
+ }
+
public static Test suite() {
TestSuite suite = new TestSuite();
#foreach ($qf in $qfiles)
@@ -55,9 +70,17 @@ public class $className extends TestCase
#set ($tname = $fname.substring(0, $eidx))
suite.addTest(new $className("testNegativeCliDriver_$tname"));
#end
+ suite.addTest(new $className("testNegativeCliDriver_shutdown"));
return suite;
}
+ /**
+ * Dummy last test. This is only meant to shutdown qt
+ */
+ public void testNegativeCliDriver_shutdown() {
+ System.out.println ("Cleaning up " + "$className");
+ }
+
static String debugHint = "\nSee build/ql/tmp/hive.log, "
+ "or try \"ant test ... -Dtest.silent=false\" to get more logs.";
@@ -75,7 +98,7 @@ public class $className extends TestCase
System.out.println("Test $fname skipped");
return;
}
-
+
qt.cliInit("$fname", false);
int ecode = qt.executeClient("$fname");
if (ecode == 0) {
Modified: hadoop/hive/trunk/ql/src/test/templates/TestParse.vm
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/templates/TestParse.vm?rev=988603&r1=988602&r2=988603&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/templates/TestParse.vm (original)
+++ hadoop/hive/trunk/ql/src/test/templates/TestParse.vm Tue Aug 24 15:47:24 2010
@@ -48,17 +48,41 @@ public class $className extends TestCase
}
}
+ @Override
+ protected void tearDown() {
+ try {
+ qt.clearPostTestEffects();
+ if (getName().equals("testParse_shutdown"))
+ qt.shutdown();
+ }
+ catch (Exception e) {
+ System.out.println("Exception: " + e.getMessage());
+ e.printStackTrace();
+ System.out.flush();
+ fail("Unexpected exception in tearDown");
+ }
+ }
+
public static Test suite() {
TestSuite suite = new TestSuite();
+
#foreach ($qf in $qfiles)
#set ($fname = $qf.getName())
#set ($eidx = $fname.length() - 2)
#set ($tname = $fname.substring(0, $eidx))
suite.addTest(new $className("testParse_$tname"));
#end
+ suite.addTest(new $className("testParse_shutdown"));
return suite;
}
+ /**
+ * Dummy last test. This is only meant to shutdown qt
+ */
+ public void testParse_shutdown() {
+ System.out.println ("Cleaning up " + "$className");
+ }
+
static String debugHint = "\nSee build/ql/tmp/hive.log, "
+ "or try \"ant test ... -Dtest.silent=false\" to get more logs.";
Modified: hadoop/hive/trunk/ql/src/test/templates/TestParseNegative.vm
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/templates/TestParseNegative.vm?rev=988603&r1=988602&r2=988603&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/templates/TestParseNegative.vm (original)
+++ hadoop/hive/trunk/ql/src/test/templates/TestParseNegative.vm Tue Aug 24 15:47:24 2010
@@ -28,7 +28,7 @@ public class $className extends TestCase
fail("Unexpected exception in static initialization");
}
}
-
+
public $className(String name) {
super(name);
qt = null;
@@ -37,7 +37,7 @@ public class $className extends TestCase
@Override
protected void setUp() {
try {
- qt = new QTestUtil("$resultsDir.getCanonicalPath()", "$logDir.getCanonicalPath()",
+ qt = new QTestUtil("$resultsDir.getCanonicalPath()", "$logDir.getCanonicalPath()",
miniMR, hadoopVer);
}
catch (Exception e) {
@@ -48,14 +48,38 @@ public class $className extends TestCase
}
}
+ @Override
+ protected void tearDown() {
+ try {
+ qt.clearPostTestEffects();
+ if (getName().equals("testParseNegative_shutdown"))
+ qt.shutdown();
+ }
+ catch (Exception e) {
+ System.out.println("Exception: " + e.getMessage());
+ e.printStackTrace();
+ System.out.flush();
+ fail("Unexpected exception in tearDown");
+ }
+ }
+
+ /**
+ * Dummy last test. This is only meant to shutdown qt
+ */
+ public void testParseNegative_shutdown() {
+ System.out.println ("Cleaning up " + "$className");
+ }
+
public static Test suite() {
TestSuite suite = new TestSuite();
+
#foreach ($qf in $qfiles)
#set ($fname = $qf.getName())
#set ($eidx = $fname.length() - 2)
#set ($tname = $fname.substring(0, $eidx))
suite.addTest(new $className("testParseNegative_$tname"));
#end
+ suite.addTest(new $className("testParseNegative_shutdown"));
return suite;
}
Modified: hadoop/hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java?rev=988603&r1=988602&r2=988603&view=diff
==============================================================================
--- hadoop/hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java (original)
+++ hadoop/hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java Tue Aug 24 15:47:24 2010
@@ -110,6 +110,7 @@ public class HiveServer extends ThriftHi
CommandProcessorResponse response = null;
if (proc != null) {
if (proc instanceof Driver) {
+ ((Driver)proc).destroy();
isHiveQuery = true;
response = driver.run(cmd);
} else {
Modified: hadoop/hive/trunk/service/src/test/org/apache/hadoop/hive/service/TestHiveServer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/service/src/test/org/apache/hadoop/hive/service/TestHiveServer.java?rev=988603&r1=988602&r2=988603&view=diff
==============================================================================
--- hadoop/hive/trunk/service/src/test/org/apache/hadoop/hive/service/TestHiveServer.java (original)
+++ hadoop/hive/trunk/service/src/test/org/apache/hadoop/hive/service/TestHiveServer.java Tue Aug 24 15:47:24 2010
@@ -50,6 +50,7 @@ public class TestHiveServer extends Test
@Override
protected void setUp() throws Exception {
super.setUp();
+
if (standAloneServer) {
try {
transport = new TSocket(host, port);
@@ -74,6 +75,7 @@ public class TestHiveServer extends Test
public void testExecute() throws Exception {
try {
+ client.execute("set hive.support.concurrency = false");
client.execute("drop table " + tableName);
} catch (Exception ex) {
}
@@ -106,6 +108,7 @@ public class TestHiveServer extends Test
public void notestExecute() throws Exception {
try {
+ client.execute("set hive.support.concurrency = false");
client.execute("drop table " + tableName);
} catch (Exception ex) {
}
@@ -122,6 +125,7 @@ public class TestHiveServer extends Test
public void testNonHiveCommand() throws Exception {
try {
+ client.execute("set hive.support.concurrency = false");
client.execute("drop table " + tableName);
} catch (Exception ex) {
}
@@ -173,6 +177,7 @@ public class TestHiveServer extends Test
*/
public void testMetastore() throws Exception {
try {
+ client.execute("set hive.support.concurrency = false");
client.execute("drop table " + tableName);
} catch (Exception ex) {
}
@@ -198,12 +203,13 @@ public class TestHiveServer extends Test
|| clusterStatus.getState() == JobTrackerState.RUNNING);
}
- /**
+ /**
*
*/
public void testFetch() throws Exception {
// create and populate a table with 500 rows.
try {
+ client.execute("set hive.support.concurrency = false");
client.execute("drop table " + tableName);
} catch (Exception ex) {
}
@@ -239,6 +245,7 @@ public class TestHiveServer extends Test
public void testDynamicSerde() throws Exception {
try {
+ client.execute("set hive.support.concurrency = false");
client.execute("drop table " + tableName);
} catch (Exception ex) {
}