You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2013/11/05 08:01:58 UTC
svn commit: r1538880 [3/46] - in /hive/branches/tez: ./ ant/
ant/src/org/apache/hadoop/hive/ant/ beeline/
beeline/src/java/org/apache/hive/beeline/ beeline/src/main/
beeline/src/test/org/apache/hive/beeline/src/test/ cli/ common/
common/src/java/conf/ ...
Modified: hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java (original)
+++ hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java Tue Nov 5 07:01:32 2013
@@ -36,14 +36,13 @@ import org.apache.hadoop.hbase.HTableDes
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hive.conf.HiveConf;
import org.junit.AfterClass;
-import org.junit.BeforeClass;
/**
* Base class for HBase Tests which need a mini cluster instance
*/
public abstract class SkeletonHBaseTest {
- protected static String TEST_DIR = "/tmp/build/test/data/";
+ protected static String TEST_DIR = System.getProperty("test.tmp.dir", "target/tmp/");
protected final static String DEFAULT_CONTEXT_HANDLE = "default";
@@ -56,20 +55,15 @@ public abstract class SkeletonHBaseTest
*/
protected static Configuration testConf = null;
- protected void createTable(String tableName, String[] families) {
- try {
- HBaseAdmin admin = new HBaseAdmin(getHbaseConf());
- HTableDescriptor tableDesc = new HTableDescriptor(tableName);
- for (String family : families) {
- HColumnDescriptor columnDescriptor = new HColumnDescriptor(family);
- tableDesc.addFamily(columnDescriptor);
- }
- admin.createTable(tableDesc);
- } catch (Exception e) {
- e.printStackTrace();
- throw new IllegalStateException(e);
+ protected void createTable(String tableName, String[] families) throws IOException {
+ HBaseAdmin admin = new HBaseAdmin(getHbaseConf());
+ HTableDescriptor tableDesc = new HTableDescriptor(tableName);
+ for (String family : families) {
+ HColumnDescriptor columnDescriptor = new HColumnDescriptor(family);
+ tableDesc.addFamily(columnDescriptor);
}
-
+ admin.createTable(tableDesc);
+ admin.close();
}
protected String newTableName(String prefix) {
@@ -78,21 +72,20 @@ public abstract class SkeletonHBaseTest
do {
name = prefix + "_" + Math.abs(new Random().nextLong());
} while (tableNames.contains(name) && --tries > 0);
- if (tableNames.contains(name))
+ if (tableNames.contains(name)) {
throw new IllegalStateException("Couldn't find a unique table name, tableNames size: " + tableNames.size());
+ }
tableNames.add(name);
return name;
}
-
/**
* startup an hbase cluster instance before a test suite runs
*/
- @BeforeClass
- public static void setup() {
- if (!contextMap.containsKey(getContextHandle()))
+ public static void setupSkeletonHBaseTest() {
+ if (!contextMap.containsKey(getContextHandle())) {
contextMap.put(getContextHandle(), new Context(getContextHandle()));
-
+ }
contextMap.get(getContextHandle()).start();
}
Modified: hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java (original)
+++ hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java Tue Nov 5 07:01:32 2013
@@ -66,6 +66,7 @@ import org.apache.hcatalog.mapreduce.HCa
import org.apache.hcatalog.mapreduce.HCatOutputFormat;
import org.apache.hcatalog.mapreduce.OutputJobInfo;
+import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -89,6 +90,11 @@ public class TestHBaseBulkOutputFormat e
private final HiveConf allConf;
private final HCatDriver hcatDriver;
+ @BeforeClass
+ public static void setup() throws Throwable {
+ setupSkeletonHBaseTest();
+ }
+
public TestHBaseBulkOutputFormat() {
allConf = getHiveConf();
allConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
Modified: hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java (original)
+++ hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java Tue Nov 5 07:01:32 2013
@@ -63,6 +63,7 @@ import org.apache.hcatalog.hbase.snapsho
import org.apache.hcatalog.mapreduce.HCatInputFormat;
import org.apache.hcatalog.mapreduce.HCatOutputFormat;
import org.apache.hcatalog.mapreduce.OutputJobInfo;
+import org.junit.BeforeClass;
import org.junit.Test;
import java.io.IOException;
@@ -83,6 +84,11 @@ public class TestHBaseDirectOutputFormat
private final HiveConf allConf;
private final HCatDriver hcatDriver;
+ @BeforeClass
+ public static void setup() throws Throwable {
+ setupSkeletonHBaseTest();
+ }
+
public TestHBaseDirectOutputFormat() {
allConf = getHiveConf();
allConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
Modified: hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java (original)
+++ hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java Tue Nov 5 07:01:32 2013
@@ -41,6 +41,7 @@ import org.apache.hcatalog.cli.SemanticA
import org.apache.hcatalog.hbase.snapshot.RevisionManager;
import org.apache.hcatalog.hbase.snapshot.RevisionManagerConfiguration;
import org.apache.zookeeper.KeeperException.NoNodeException;
+import org.junit.BeforeClass;
import org.junit.Test;
public class TestHBaseHCatStorageHandler extends SkeletonHBaseTest {
@@ -49,6 +50,11 @@ public class TestHBaseHCatStorageHandler
private static HCatDriver hcatDriver;
private static Warehouse wh;
+ @BeforeClass
+ public static void setup() throws Throwable {
+ setupSkeletonHBaseTest();
+ }
+
public void Initialize() throws Exception {
hcatConf = getHiveConf();
Modified: hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java (original)
+++ hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java Tue Nov 5 07:01:32 2013
@@ -73,6 +73,7 @@ import org.apache.hcatalog.hbase.snapsho
import org.apache.hcatalog.mapreduce.HCatInputFormat;
import org.apache.hcatalog.mapreduce.InputJobInfo;
import org.apache.hcatalog.mapreduce.PartInfo;
+import org.junit.BeforeClass;
import org.junit.Test;
public class TestHCatHBaseInputFormat extends SkeletonHBaseTest {
@@ -83,6 +84,11 @@ public class TestHCatHBaseInputFormat ex
private final byte[] QUALIFIER1 = Bytes.toBytes("testQualifier1");
private final byte[] QUALIFIER2 = Bytes.toBytes("testQualifier2");
+ @BeforeClass
+ public static void setup() throws Throwable {
+ setupSkeletonHBaseTest();
+ }
+
public TestHCatHBaseInputFormat() throws Exception {
hcatConf = getHiveConf();
hcatConf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
Modified: hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java (original)
+++ hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java Tue Nov 5 07:01:32 2013
@@ -41,12 +41,18 @@ import org.apache.hcatalog.common.HCatUt
import org.apache.hcatalog.hbase.snapshot.TableSnapshot;
import org.apache.hcatalog.mapreduce.HCatInputFormat;
import org.apache.hcatalog.mapreduce.InputJobInfo;
+import org.junit.BeforeClass;
import org.junit.Test;
public class TestSnapshots extends SkeletonHBaseTest {
private static HiveConf hcatConf;
private static HCatDriver hcatDriver;
+ @BeforeClass
+ public static void setup() throws Throwable {
+ setupSkeletonHBaseTest();
+ }
+
public void Initialize() throws Exception {
hcatConf = getHiveConf();
hcatConf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
Modified: hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestIDGenerator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestIDGenerator.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestIDGenerator.java (original)
+++ hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestIDGenerator.java Tue Nov 5 07:01:32 2013
@@ -26,10 +26,16 @@ import java.util.HashMap;
import org.apache.hcatalog.hbase.SkeletonHBaseTest;
import org.junit.Assert;
+import org.junit.BeforeClass;
import org.junit.Test;
public class TestIDGenerator extends SkeletonHBaseTest {
+ @BeforeClass
+ public static void setup() throws Throwable {
+ setupSkeletonHBaseTest();
+ }
+
@Test
public void testIDGeneration() throws Exception {
Modified: hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManager.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManager.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManager.java (original)
+++ hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManager.java Tue Nov 5 07:01:32 2013
@@ -32,10 +32,16 @@ import org.apache.hcatalog.hbase.snapsho
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.Stat;
+import org.junit.BeforeClass;
import org.junit.Test;
public class TestRevisionManager extends SkeletonHBaseTest {
+ @BeforeClass
+ public static void setup() throws Throwable {
+ setupSkeletonHBaseTest();
+ }
+
@Test
public void testBasicZNodeCreation() throws IOException, KeeperException, InterruptedException {
Modified: hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerEndpoint.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerEndpoint.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerEndpoint.java (original)
+++ hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerEndpoint.java Tue Nov 5 07:01:32 2013
@@ -31,17 +31,20 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hcatalog.hbase.SkeletonHBaseTest;
import org.junit.Assert;
+import org.junit.BeforeClass;
import org.junit.Test;
public class TestRevisionManagerEndpoint extends SkeletonHBaseTest {
- static {
+ @BeforeClass
+ public static void setup() throws Throwable {
// test case specific mini cluster settings
testConf = new Configuration(false);
testConf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
"org.apache.hcatalog.hbase.snapshot.RevisionManagerEndpoint",
"org.apache.hadoop.hbase.coprocessor.GenericEndpoint");
testConf.set(RMConstants.REVISION_MGR_ENDPOINT_IMPL_CLASS, MockRM.class.getName());
+ setupSkeletonHBaseTest();
}
/**
Modified: hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java (original)
+++ hive/branches/tez/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java Tue Nov 5 07:01:32 2013
@@ -38,6 +38,7 @@ import org.apache.hcatalog.cli.SemanticA
import org.apache.hcatalog.hbase.SkeletonHBaseTest;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.Stat;
+import org.junit.BeforeClass;
import org.junit.Test;
@@ -46,6 +47,11 @@ public class TestZNodeSetUp extends Skel
private static HiveConf hcatConf;
private static HCatDriver hcatDriver;
+ @BeforeClass
+ public static void setup() throws Throwable {
+ setupSkeletonHBaseTest();
+ }
+
public void Initialize() throws Exception {
hcatConf = getHiveConf();
Modified: hive/branches/tez/hcatalog/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java (original)
+++ hive/branches/tez/hcatalog/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java Tue Nov 5 07:01:32 2013
@@ -36,7 +36,6 @@ import org.apache.hadoop.hive.ql.io.RCFi
import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hcatalog.common.HCatConstants;
import org.apache.hcatalog.common.HCatException;
import org.apache.hcatalog.data.schema.HCatFieldSchema;
@@ -52,6 +51,7 @@ import static org.junit.Assert.assertEqu
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.fail;
/**
* @deprecated Use/modify {@link org.apache.hive.hcatalog.api.TestHCatClient} instead
@@ -123,9 +123,9 @@ public class TestHCatClient {
assertTrue(testDb.getComment() == null);
assertTrue(testDb.getProperties().size() == 0);
String warehouseDir = System
- .getProperty(ConfVars.METASTOREWAREHOUSE.varname, "/user/hive/warehouse");
- assertTrue(testDb.getLocation().equals(
- "file:" + warehouseDir + "/" + db + ".db"));
+ .getProperty("test.warehouse.dir", "/user/hive/warehouse");
+ String expectedDir = warehouseDir.replaceAll("\\\\", "/").replaceFirst("pfile:///", "pfile:/");
+ assertEquals(expectedDir + "/" + db + ".db", testDb.getLocation());
ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
cols.add(new HCatFieldSchema("id", Type.INT, "id comment"));
cols.add(new HCatFieldSchema("value", Type.STRING, "value comment"));
@@ -145,6 +145,7 @@ public class TestHCatClient {
// will result in an exception.
try {
client.createTable(tableDesc);
+ fail("Expected exception");
} catch (HCatException e) {
assertTrue(e.getMessage().contains(
"AlreadyExistsException while creating table."));
@@ -159,8 +160,7 @@ public class TestHCatClient {
TextInputFormat.class.getName()));
assertTrue(table2.getOutputFileFormat().equalsIgnoreCase(
IgnoreKeyTextOutputFormat.class.getName()));
- assertTrue(table2.getLocation().equalsIgnoreCase(
- "file:" + warehouseDir + "/" + db + ".db/" + tableTwo));
+ assertEquals((expectedDir + "/" + db + ".db/" + tableTwo).toLowerCase(), table2.getLocation().toLowerCase());
client.close();
}
Modified: hive/branches/tez/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java (original)
+++ hive/branches/tez/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java Tue Nov 5 07:01:32 2013
@@ -36,7 +36,6 @@ import org.apache.hadoop.hive.ql.io.RCFi
import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hive.hcatalog.common.HCatConstants;
import org.apache.hive.hcatalog.common.HCatException;
import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
@@ -48,6 +47,7 @@ import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static org.junit.Assert.fail;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@@ -120,13 +120,9 @@ public class TestHCatClient {
assertTrue(testDb.getComment() == null);
assertTrue(testDb.getProperties().size() == 0);
String warehouseDir = System
- .getProperty(ConfVars.METASTOREWAREHOUSE.varname, "/user/hive/warehouse");
- String expectedDir = warehouseDir.replaceAll("\\\\", "/");
- if (!expectedDir.startsWith("/")) {
- expectedDir = "/" + expectedDir;
- }
- assertTrue(testDb.getLocation().equals(
- "file:" + expectedDir + "/" + db + ".db"));
+ .getProperty("test.warehouse.dir", "/user/hive/warehouse");
+ String expectedDir = warehouseDir.replaceAll("\\\\", "/").replaceFirst("pfile:///", "pfile:/");
+ assertEquals(expectedDir + "/" + db + ".db", testDb.getLocation());
ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
cols.add(new HCatFieldSchema("id", Type.INT, "id comment"));
cols.add(new HCatFieldSchema("value", Type.STRING, "value comment"));
@@ -146,6 +142,7 @@ public class TestHCatClient {
// will result in an exception.
try {
client.createTable(tableDesc);
+ fail("Expected exception");
} catch (HCatException e) {
assertTrue(e.getMessage().contains(
"AlreadyExistsException while creating table."));
@@ -160,8 +157,7 @@ public class TestHCatClient {
TextInputFormat.class.getName()));
assertTrue(table2.getOutputFileFormat().equalsIgnoreCase(
IgnoreKeyTextOutputFormat.class.getName()));
- assertTrue(table2.getLocation().equalsIgnoreCase(
- "file:" + expectedDir + "/" + db + ".db/" + tableTwo));
+ assertEquals((expectedDir + "/" + db + ".db/" + tableTwo).toLowerCase(), table2.getLocation().toLowerCase());
client.close();
}
Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml Tue Nov 5 07:01:32 2013
@@ -116,6 +116,25 @@
</property>
<property>
+ <name>templeton.hive.home</name>
+ <value>hive-0.13.0-SNAPSHOT-bin.tar.gz/hive-0.13.0-SNAPSHOT-bin</value>
+ <description>
+ The path to the Hive home within the tar. This is needed if Hive is not installed on all
+ nodes in the cluster and needs to be shipped to the target node in the cluster to execute Pig
+ job which uses HCat, Hive query, etc. Has no effect if templeton.hive.archive is not set.
+ </description>
+ </property>
+ <property>
+ <name>templeton.hcat.home</name>
+ <value>hive-0.13.0-SNAPSHOT-bin.tar.gz/hive-0.13.0-SNAPSHOT-bin/hcatalog</value>
+ <description>
+ The path to the HCat home within the tar. This is needed if Hive is not installed on all
+ nodes in the cluster and needs to be shipped to the target node in the cluster to execute Pig
+ job which uses HCat, Hive query, etc. Has no effect if templeton.hive.archive is not set.
+ </description>
+ </property>
+
+ <property>
<name>templeton.hive.properties</name>
<value>hive.metastore.local=false,hive.metastore.uris=thrift://localhost:9933,hive.metastore.sasl.enabled=false</value>
<description>Properties to set when running hive.</description>
Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java Tue Nov 5 07:01:32 2013
@@ -91,6 +91,14 @@ public class AppConfig extends Configura
public static final String PYTHON_NAME = "templeton.python";
public static final String HIVE_ARCHIVE_NAME = "templeton.hive.archive";
public static final String HIVE_PATH_NAME = "templeton.hive.path";
+ /**
+ * see webhcat-default.xml
+ */
+ public static final String HIVE_HOME_PATH = "templeton.hive.home";
+ /**
+ * see webhcat-default.xml
+ */
+ public static final String HCAT_HOME_PATH = "templeton.hcat.home";
public static final String HIVE_PROPS_NAME = "templeton.hive.properties";
public static final String LIB_JARS_NAME = "templeton.libjars";
public static final String PIG_ARCHIVE_NAME = "templeton.pig.archive";
Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java Tue Nov 5 07:01:32 2013
@@ -19,7 +19,10 @@
package org.apache.hive.hcatalog.templeton;
import java.io.IOException;
+import java.util.List;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.shims.HadoopShims.WebHCatJTShim;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.mapred.JobID;
@@ -30,6 +33,7 @@ import org.apache.hive.hcatalog.templeto
* Delete a job
*/
public class DeleteDelegator extends TempletonDelegator {
+ private static final Log LOG = LogFactory.getLog(DeleteDelegator.class);
public DeleteDelegator(AppConfig appConf) {
super(appConf);
}
@@ -47,9 +51,16 @@ public class DeleteDelegator extends Tem
throw new BadParam("Invalid jobid: " + id);
tracker.killJob(jobid);
state = new JobState(id, Main.getAppConfigInstance());
- String childid = state.getChildId();
- if (childid != null)
- tracker.killJob(StatusDelegator.StringToJobID(childid));
+ List<JobState> children = state.getChildren();
+ if (children != null) {
+ for (JobState child : children) {
+ try {
+ tracker.killJob(StatusDelegator.StringToJobID(child.getId()));
+ } catch (IOException e) {
+ LOG.warn("templeton: fail to kill job " + child.getId());
+ }
+ }
+ }
return StatusDelegator.makeStatus(tracker, jobid, state);
} catch (IllegalStateException e) {
throw new BadParam(e.getMessage());
Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java Tue Nov 5 07:01:32 2013
@@ -107,12 +107,12 @@ public class HcatDelegator extends Launc
}
LOG.info("Main.getAppConfigInstance().get(AppConfig.UNIT_TEST_MODE)=" +
Main.getAppConfigInstance().get(AppConfig.UNIT_TEST_MODE));
- if(System.getProperty("hive.metastore.warehouse.dir") != null) {
+ if(System.getProperty("test.warehouse.dir") != null) {
/*when running in unit test mode, pass this property to HCat,
which will in turn pass it to Hive to make sure that Hive
tries to write to a directory that exists.*/
args.add("-D");
- args.add("hive.metastore.warehouse.dir=" + System.getProperty("hive.metastore.warehouse.dir"));
+ args.add("hive.metastore.warehouse.dir=" + System.getProperty("test.warehouse.dir"));
}
return args;
}
@@ -645,7 +645,7 @@ public class HcatDelegator extends Launc
} catch (HcatException e) {
if (e.execBean.stderr.contains("SemanticException") &&
e.execBean.stderr.contains("Partition not found")) {
- String emsg = "Partition " + partition + " for table "
+ String emsg = "Partition " + partition + " for table "
+ table + " does not exist" + db + "." + table + " does not exist";
return JsonBuilder.create()
.put("error", emsg)
Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java Tue Nov 5 07:01:32 2013
@@ -128,7 +128,7 @@ public class HiveDelegator extends Launc
if (appConf.hiveArchive() != null && !appConf.hiveArchive().equals(""))
{
- args.add("-archives");
+ args.add(ARCHIVES);
args.add(appConf.hiveArchive());
}
Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java Tue Nov 5 07:01:32 2013
@@ -43,14 +43,14 @@ public class JarDelegator extends Launch
String libjars, String files,
List<String> jarArgs, List<String> defines,
String statusdir, String callback,
- boolean usehcatalog, String completedUrl,
+ boolean usesHcatalog, String completedUrl,
boolean enablelog, JobType jobType)
throws NotAuthorizedException, BadParam, BusyException, QueueException,
ExecuteException, IOException, InterruptedException {
runAs = user;
List<String> args = makeArgs(jar, mainClass,
libjars, files, jarArgs, defines,
- statusdir, usehcatalog, completedUrl, enablelog, jobType);
+ statusdir, usesHcatalog, completedUrl, enablelog, jobType);
return enqueueController(user, userArgs, callback, args);
}
@@ -58,7 +58,7 @@ public class JarDelegator extends Launch
private List<String> makeArgs(String jar, String mainClass,
String libjars, String files,
List<String> jarArgs, List<String> defines,
- String statusdir, boolean usehcatalog, String completedUrl,
+ String statusdir, boolean usesHcatalog, String completedUrl,
boolean enablelog, JobType jobType)
throws BadParam, IOException, InterruptedException {
ArrayList<String> args = new ArrayList<String>();
@@ -72,7 +72,7 @@ public class JarDelegator extends Launch
TempletonUtils.addCmdForWindows(args);
//check if the rest command specified explicitly to use hcatalog
- if(usehcatalog){
+ if(usesHcatalog){
addHiveMetaStoreTokenArg();
}
Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java Tue Nov 5 07:01:32 2013
@@ -97,6 +97,9 @@ public class LauncherDelegator extends T
private String queueAsUser(UserGroupInformation ugi, final List<String> args)
throws IOException, InterruptedException {
+ if(LOG.isDebugEnabled()) {
+ LOG.debug("Launching job: " + args);
+ }
return ugi.doAs(new PrivilegedExceptionAction<String>() {
public String run() throws Exception {
String[] array = new String[args.size()];
Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ListDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ListDelegator.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ListDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ListDelegator.java Tue Nov 5 07:01:32 2013
@@ -26,7 +26,6 @@ import org.apache.hadoop.hive.shims.Hado
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.mapred.JobStatus;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hive.hcatalog.templeton.tool.JobState;
/**
* List jobs owned by a user.
@@ -50,17 +49,9 @@ public class ListDelegator extends Templ
if (jobs != null) {
for (JobStatus job : jobs) {
- JobState state = null;
- try {
- String id = job.getJobID().toString();
- state = new JobState(id, Main.getAppConfigInstance());
- if (showall || user.equals(state.getUser()))
- ids.add(id);
- } finally {
- if (state != null) {
- state.close();
- }
- }
+ String id = job.getJobID().toString();
+ if (showall || user.equals(job.getUsername()))
+ ids.add(id);
}
}
Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java Tue Nov 5 07:01:32 2013
@@ -206,10 +206,13 @@ public class Main {
public FilterHolder makeAuthFilter() {
FilterHolder authFilter = new FilterHolder(AuthFilter.class);
if (UserGroupInformation.isSecurityEnabled()) {
+ //http://hadoop.apache.org/docs/r1.1.1/api/org/apache/hadoop/security/authentication/server/AuthenticationFilter.html
authFilter.setInitParameter("dfs.web.authentication.signature.secret",
conf.kerberosSecret());
+ //https://svn.apache.org/repos/asf/hadoop/common/branches/branch-1.2/src/packages/templates/conf/hdfs-site.xml
authFilter.setInitParameter("dfs.web.authentication.kerberos.principal",
conf.kerberosPrincipal());
+ //http://https://svn.apache.org/repos/asf/hadoop/common/branches/branch-1.2/src/packages/templates/conf/hdfs-site.xml
authFilter.setInitParameter("dfs.web.authentication.kerberos.keytab",
conf.kerberosKeytab());
}
Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java Tue Nov 5 07:01:32 2013
@@ -29,6 +29,7 @@ import java.util.Map;
import org.apache.commons.exec.ExecuteException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hive.hcatalog.templeton.tool.JobSubmissionConstants;
import org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob;
import org.apache.hive.hcatalog.templeton.tool.TempletonUtils;
@@ -47,13 +48,13 @@ public class PigDelegator extends Launch
String execute, String srcFile,
List<String> pigArgs, String otherFiles,
String statusdir, String callback,
- boolean usehcatalog, String completedUrl, boolean enablelog)
+ boolean usesHcatalog, String completedUrl, boolean enablelog)
throws NotAuthorizedException, BadParam, BusyException, QueueException,
ExecuteException, IOException, InterruptedException {
runAs = user;
List<String> args = makeArgs(execute,
srcFile, pigArgs,
- otherFiles, statusdir, usehcatalog, completedUrl, enablelog);
+ otherFiles, statusdir, usesHcatalog, completedUrl, enablelog);
return enqueueController(user, userArgs, callback, args);
}
@@ -64,7 +65,7 @@ public class PigDelegator extends Launch
* @param pigArgs pig command line arguments
* @param otherFiles files to be copied to the map reduce cluster
* @param statusdir status dir location
- * @param usehcatalog whether the command uses hcatalog/needs to connect
+ * @param usesHcatalog whether the command uses hcatalog/needs to connect
* to hive metastore server
* @param completedUrl call back url
* @return list of arguments
@@ -74,10 +75,14 @@ public class PigDelegator extends Launch
*/
private List<String> makeArgs(String execute, String srcFile,
List<String> pigArgs, String otherFiles,
- String statusdir, boolean usehcatalog,
+ String statusdir, boolean usesHcatalog,
String completedUrl, boolean enablelog)
throws BadParam, IOException, InterruptedException {
ArrayList<String> args = new ArrayList<String>();
+ //check if the REST command specified explicitly to use hcatalog
+ // or if it says that implicitly using the pig -useHCatalog arg
+ boolean needsMetastoreAccess = usesHcatalog || hasPigArgUseHcat(pigArgs);
+
try {
ArrayList<String> allFiles = new ArrayList<String>();
if (TempletonUtils.isset(srcFile)) {
@@ -89,12 +94,32 @@ public class PigDelegator extends Launch
}
args.addAll(makeLauncherArgs(appConf, statusdir, completedUrl, allFiles, enablelog, JobType.PIG));
- if (appConf.pigArchive() != null && !appConf.pigArchive().equals(""))
- {
- args.add("-archives");
- args.add(appConf.pigArchive());
+ boolean shipPigTar = appConf.pigArchive() != null && !appConf.pigArchive().equals("");
+ boolean shipHiveTar = needsMetastoreAccess && appConf.hiveArchive() != null
+ && !appConf.hiveArchive().equals("");
+ if(shipPigTar || shipHiveTar) {
+ args.add(ARCHIVES);
+ StringBuilder archives = new StringBuilder();
+ if(shipPigTar) {
+ archives.append(appConf.pigArchive());
+ }
+ if(shipPigTar && shipHiveTar) {
+ archives.append(",");
+ }
+ if(shipHiveTar) {
+ archives.append(appConf.hiveArchive());
+ }
+ args.add(archives.toString());
+ }
+ if(shipHiveTar) {
+ addDef(args, JobSubmissionConstants.PigConstants.HIVE_HOME,
+ appConf.get(AppConfig.HIVE_HOME_PATH));
+ addDef(args, JobSubmissionConstants.PigConstants.HCAT_HOME,
+ appConf.get(AppConfig.HCAT_HOME_PATH));
+ //Pig which uses HCat will pass this to HCat so that it can find the metastore
+ addDef(args, JobSubmissionConstants.PigConstants.PIG_OPTS,
+ appConf.get(AppConfig.HIVE_PROPS_NAME));
}
-
args.add("--");
TempletonUtils.addCmdForWindows(args);
args.add(appConf.pigPath());
@@ -104,9 +129,7 @@ public class PigDelegator extends Launch
for (String pigArg : pigArgs) {
args.add(TempletonUtils.quoteForWindows(pigArg));
}
- //check if the REST command specified explicitly to use hcatalog
- // or if it says that implicitly using the pig -useHCatalog arg
- if(usehcatalog || hasPigArgUseHcat(pigArgs)){
+ if(needsMetastoreAccess) {
addHiveMetaStoreTokenArg();
}
Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java Tue Nov 5 07:01:32 2013
@@ -57,12 +57,10 @@ public class QueueStatusBean {
this.profile = profile;
id = profile.getJobID().toString();
- parentId = state.getId();
- if (id.equals(parentId))
- parentId = null;
+ parentId = state.getParent();
percentComplete = state.getPercentComplete();
exitValue = state.getExitValue();
- user = state.getUser();
+ user = profile.getUser();
callback = state.getCallback();
completed = state.getCompleteStatus();
userargs = state.getUserArgs();
Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java Tue Nov 5 07:01:32 2013
@@ -635,7 +635,7 @@ public class Server {
/**
* Run a MapReduce Jar job.
* Params correspond to the REST api params
- * @param usehcatalog if {@code true}, means the Jar uses HCat and thus needs to access
+ * @param usesHcatalog if {@code true}, means the Jar uses HCat and thus needs to access
* metastore, which requires additional steps for WebHCat to perform in a secure cluster.
* @param callback URL which WebHCat will call when the hive job finishes
* @see org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob
@@ -651,7 +651,7 @@ public class Server {
@FormParam("define") List<String> defines,
@FormParam("statusdir") String statusdir,
@FormParam("callback") String callback,
- @FormParam("usehcatalog") boolean usehcatalog,
+ @FormParam("usehcatalog") boolean usesHcatalog,
@FormParam("enablelog") boolean enablelog)
throws NotAuthorizedException, BusyException, BadParam, QueueException,
ExecuteException, IOException, InterruptedException {
@@ -677,14 +677,14 @@ public class Server {
return d.run(getDoAsUser(), userArgs,
jar, mainClass,
libjars, files, args, defines,
- statusdir, callback, usehcatalog, getCompletedUrl(), enablelog, JobType.JAR);
+ statusdir, callback, usesHcatalog, getCompletedUrl(), enablelog, JobType.JAR);
}
/**
* Run a Pig job.
- * Params correspond to the REST api params. If '-useHCatalog' is in the {@code pigArgs, usehcatalog},
+ * Params correspond to the REST api params. If '-useHCatalog' is in the {@code pigArgs, usesHcatalog},
* is interpreted as true.
- * @param usehcatalog if {@code true}, means the Pig script uses HCat and thus needs to access
+ * @param usesHcatalog if {@code true}, means the Pig script uses HCat and thus needs to access
* metastore, which requires additional steps for WebHCat to perform in a secure cluster.
* This does nothing to ensure that Pig is installed on target node in the cluster.
* @param callback URL which WebHCat will call when the hive job finishes
@@ -699,7 +699,7 @@ public class Server {
@FormParam("files") String otherFiles,
@FormParam("statusdir") String statusdir,
@FormParam("callback") String callback,
- @FormParam("usehcatalog") boolean usehcatalog,
+ @FormParam("usehcatalog") boolean usesHcatalog,
@FormParam("enablelog") boolean enablelog)
throws NotAuthorizedException, BusyException, BadParam, QueueException,
ExecuteException, IOException, InterruptedException {
@@ -725,7 +725,7 @@ public class Server {
return d.run(getDoAsUser(), userArgs,
execute, srcFile,
pigArgs, otherFiles,
- statusdir, callback, usehcatalog, getCompletedUrl(), enablelog);
+ statusdir, callback, usesHcatalog, getCompletedUrl(), enablelog);
}
/**
@@ -855,15 +855,59 @@ public class Server {
}
/**
- * Return all the known job ids for this user.
+ * Return all the known job ids for this user based on the optional filter conditions.
+ * <p>
+ * Example usages:
+ * 1. curl -s 'http://localhost:50111/templeton/v1/jobs?user.name=hsubramaniyan'
+ * Return all the Job IDs submitted by hsubramaniyan
+ * 2. curl -s
+ * 'http://localhost:50111/templeton/v1/jobs?user.name=hsubramaniyan&showall=true'
+ * Return all the Job IDs that are visible to hsubramaniyan
+ * 3. curl -s
+ * 'http://localhost:50111/templeton/v1/jobs?user.name=hsubramaniyan&jobid=job_201312091733_0003'
+ * Return all the Job IDs for hsubramaniyan after job_201312091733_0003.
+ * 4. curl -s 'http://localhost:50111/templeton/v1/jobs?
+ * user.name=hsubramaniyan&jobid=job_201312091733_0003&numrecords=5'
+ * Return the first 5(atmost) Job IDs submitted by hsubramaniyan after job_201312091733_0003.
+ * 5. curl -s
+ * 'http://localhost:50111/templeton/v1/jobs?user.name=hsubramaniyan&numrecords=5'
+ * Return the first 5(atmost) Job IDs submitted by hsubramaniyan after sorting the Job ID list
+ * lexicographically.
+ * </p>
+ * <p>
+ * Supporting pagination using "jobid" and "numrecords" parameters:
+ * Step 1: Get the start "jobid" = job_xxx_000, "numrecords" = n
+ * Step 2: Issue a curl command by specifying the user-defined "numrecords" and "jobid"
+ * Step 3: If list obtained from Step 2 has size equal to "numrecords", retrieve the list's
+ * last record and get the Job Id of the last record as job_yyy_k, else quit.
+ * Step 4: set "jobid"=job_yyy_k and go to step 2.
+ * </p>
+ * @param fields If "fields" set to "*", the request will return full details of the job.
+ * If "fields" is missing, will only return the job ID. Currently the value can only
+ * be "*", other values are not allowed and will throw exception.
+ * @param showall If "showall" is set to "true", the request will return all jobs the user
+ * has permission to view, not only the jobs belonging to the user.
+ * @param jobid If "jobid" is present, the records whose Job Id is lexicographically greater
+ * than "jobid" are only returned. For example, if "jobid" = "job_201312091733_0001",
+ * the jobs whose Job ID is greater than "job_201312091733_0001" are returned. The number of
+ * records returned depends on the value of "numrecords".
+ * @param numrecords If the "jobid" and "numrecords" parameters are present, the top #numrecords
+ * records appearing after "jobid" will be returned after sorting the Job Id list
+ * lexicographically.
+ * If "jobid" parameter is missing and "numrecords" is present, the top #numrecords will
+ * be returned after lexicographically sorting the Job Id list. If "jobid" parameter is present
+ * and "numrecords" is missing, all the records whose Job Id is greater than "jobid" are returned.
+ * @return list of job items based on the filter conditions specified by the user.
*/
@GET
@Path("jobs")
@Produces({MediaType.APPLICATION_JSON})
public List<JobItemBean> showJobList(@QueryParam("fields") String fields,
- @QueryParam("showall") boolean showall)
+ @QueryParam("showall") boolean showall,
+ @QueryParam("jobid") String jobid,
+ @QueryParam("numrecords") String numrecords)
throws NotAuthorizedException, BadParam, IOException, InterruptedException {
-
+
verifyUser();
boolean showDetails = false;
@@ -877,7 +921,46 @@ public class Server {
ListDelegator ld = new ListDelegator(appConf);
List<String> list = ld.run(getDoAsUser(), showall);
List<JobItemBean> detailList = new ArrayList<JobItemBean>();
+ int currRecord = 0;
+ int numRecords;
+
+ // Parse numrecords to an integer
+ try {
+ if (numrecords != null) {
+ numRecords = Integer.parseInt(numrecords);
+ if (numRecords <= 0) {
+ throw new BadParam("numrecords should be an integer > 0");
+ }
+ }
+ else {
+ numRecords = -1;
+ }
+ }
+ catch(Exception e) {
+ throw new BadParam("Invalid numrecords format: numrecords should be an integer > 0");
+ }
+
+ // Sort the list lexicographically
+ Collections.sort(list);
+
for (String job : list) {
+ // If numRecords = -1, fetch all records.
+ // Hence skip all the below checks when numRecords = -1.
+ if (numRecords != -1) {
+ // If currRecord >= numRecords, we have already fetched the top #numRecords
+ if (currRecord >= numRecords) {
+ break;
+ }
+ // If the current record needs to be returned based on the
+ // filter conditions specified by the user, increment the counter
+ else if ((jobid != null && job.compareTo(jobid) > 0) || jobid == null) {
+ currRecord++;
+ }
+ // The current record should not be included in the output detailList.
+ else {
+ continue;
+ }
+ }
JobItemBean jobItem = new JobItemBean();
jobItem.id = job;
if (showDetails) {
Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StatusDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StatusDelegator.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StatusDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StatusDelegator.java Tue Nov 5 07:01:32 2013
@@ -31,7 +31,13 @@ import org.apache.hadoop.mapred.JobStatu
import org.apache.hive.hcatalog.templeton.tool.JobState;
/**
- * Fetch the status of a given job id in the queue.
+ * Fetch the status of a given job id in the queue. There are three sources of the info
+ * 1. Query result from JobTracker
+ * 2. JobState saved by TempletonControllerJob when monitoring the TempletonControllerJob
+ * 3. TempletonControllerJob put a JobState for every job it launches, so child job can
+ * retrieve its parent job by its JobState
+ *
+ * Currently there is no permission restriction, any user can query any job
*/
public class StatusDelegator extends TempletonDelegator {
private static final Log LOG = LogFactory.getLog(StatusDelegator.class);
@@ -63,40 +69,19 @@ public class StatusDelegator extends Tem
}
}
- public static QueueStatusBean makeStatus(WebHCatJTShim tracker,
+ static QueueStatusBean makeStatus(WebHCatJTShim tracker,
JobID jobid,
- String childid,
JobState state)
throws BadParam, IOException {
- JobID bestid = jobid;
- if (childid != null)
- bestid = StatusDelegator.StringToJobID(childid);
-
- JobStatus status = tracker.getJobStatus(bestid);
- JobProfile profile = tracker.getJobProfile(bestid);
-
- if (status == null || profile == null) {
- if (bestid != jobid) { // Corrupt childid, retry.
- LOG.error("Corrupt child id " + childid + " for " + jobid);
- bestid = jobid;
- status = tracker.getJobStatus(bestid);
- profile = tracker.getJobProfile(bestid);
- }
- }
+ JobStatus status = tracker.getJobStatus(jobid);
+ JobProfile profile = tracker.getJobProfile(jobid);
if (status == null || profile == null) // No such job.
- throw new BadParam("Could not find job " + bestid);
+ throw new BadParam("Could not find job " + jobid);
return new QueueStatusBean(state, status, profile);
}
- public static QueueStatusBean makeStatus(WebHCatJTShim tracker,
- JobID jobid,
- JobState state)
- throws BadParam, IOException {
- return makeStatus(tracker, jobid, state.getChildId(), state);
- }
-
/**
* A version of JobID.forName with our app specific error handling.
*/
Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/TempletonDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/TempletonDelegator.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/TempletonDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/TempletonDelegator.java Tue Nov 5 07:01:32 2013
@@ -24,6 +24,11 @@ package org.apache.hive.hcatalog.templet
* or hive.
*/
public class TempletonDelegator {
+ /**
+ * http://hadoop.apache.org/docs/r1.0.4/commands_manual.html#Generic+Options
+ */
+ public static final String ARCHIVES = "-archives";
+
protected AppConfig appConf;
public TempletonDelegator(AppConfig appConf) {
Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java Tue Nov 5 07:01:32 2013
@@ -126,19 +126,6 @@ public class JobState {
}
/**
- * The child id of TempletonControllerJob
- */
- public String getChildId()
- throws IOException {
- return getField("childid");
- }
-
- public void setChildId(String childid)
- throws IOException {
- setField("childid", childid);
- }
-
- /**
* Add a jobid to the list of children of this job.
*
* @param jobid
@@ -151,6 +138,9 @@ public class JobState {
} catch (Exception e) {
// There are none or they're not readable.
}
+ if (jobids==null) {
+ jobids = "";
+ }
if (!jobids.equals("")) {
jobids += ",";
}
@@ -159,6 +149,18 @@ public class JobState {
}
/**
+ * Set parent job of this job
+ * @param id
+ */
+ public void setParent(String id) throws IOException {
+ setField("parent", id);
+ }
+
+ public String getParent() throws IOException {
+ return getField("parent");
+ }
+
+ /**
* Get a list of jobstates for jobs that are children of this job.
* @throws IOException
*/
@@ -171,31 +173,6 @@ public class JobState {
}
/**
- * Save a comma-separated list of jobids that are children
- * of this job.
- * @param jobids
- * @throws IOException
- */
- public void setChildren(String jobids) throws IOException {
- setField("children", jobids);
- }
-
- /**
- * Set the list of child jobs of this job
- * @param children
- */
- public void setChildren(List<JobState> children) throws IOException {
- String val = "";
- for (JobState jobstate : children) {
- if (!val.equals("")) {
- val += ",";
- }
- val += jobstate.getId();
- }
- setField("children", val);
- }
-
- /**
* The system exit value of the job.
*/
public Long getExitValue()
Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java Tue Nov 5 07:01:32 2013
@@ -34,4 +34,12 @@ public interface JobSubmissionConstants
public static final int WATCHER_TIMEOUT_SECS = 10;
public static final int KEEP_ALIVE_MSEC = 60 * 1000;
public static final String TOKEN_FILE_ARG_PLACEHOLDER = "__WEBHCAT_TOKEN_FILE_LOCATION__";
+ /**
+ * constants needed for Pig job submission
+ */
+ public static interface PigConstants {
+ public static final String HIVE_HOME = "HIVE_HOME";
+ public static final String HCAT_HOME = "HCAT_HOME";
+ public static final String PIG_OPTS = "PIG_OPTS";
+ }
}
Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java Tue Nov 5 07:01:32 2013
@@ -33,6 +33,7 @@ import org.apache.hive.hcatalog.templeto
import org.apache.hive.hcatalog.templeton.LauncherDelegator;
import java.io.BufferedReader;
+import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
@@ -65,8 +66,25 @@ public class LaunchMapper extends Mapper
* it will end up in 'syslog' of this Map task. For example, look for KeepAlive heartbeat msgs.
*/
private static final Log LOG = LogFactory.getLog(LaunchMapper.class);
-
-
+ /**
+ * When a Pig job is submitted and it uses HCat, WebHCat may be configured to ship hive tar
+ * to the target node. Pig on the target node needs some env vars configured.
+ */
+ private static void handlePigEnvVars(Configuration conf, Map<String, String> env) {
+ if(conf.get(PigConstants.HIVE_HOME) != null) {
+ env.put(PigConstants.HIVE_HOME, new File(conf.get(PigConstants.HIVE_HOME)).getAbsolutePath());
+ }
+ if(conf.get(PigConstants.HCAT_HOME) != null) {
+ env.put(PigConstants.HCAT_HOME, new File(conf.get(PigConstants.HCAT_HOME)).getAbsolutePath());
+ }
+ if(conf.get(PigConstants.PIG_OPTS) != null) {
+ StringBuilder pigOpts = new StringBuilder();
+ for(String prop : conf.get(PigConstants.PIG_OPTS).split(",")) {
+ pigOpts.append("-D").append(prop).append(" ");
+ }
+ env.put(PigConstants.PIG_OPTS, pigOpts.toString());
+ }
+ }
protected Process startJob(Context context, String user, String overrideClasspath)
throws IOException, InterruptedException {
Configuration conf = context.getConfiguration();
@@ -79,8 +97,8 @@ public class LaunchMapper extends Mapper
removeEnv.add("hadoop-command");
removeEnv.add("CLASS");
removeEnv.add("mapredcommand");
- Map<String, String> env = TempletonUtils.hadoopUserEnv(user,
- overrideClasspath);
+ Map<String, String> env = TempletonUtils.hadoopUserEnv(user, overrideClasspath);
+ handlePigEnvVars(conf, env);
List<String> jarArgsList = new LinkedList<String>(Arrays.asList(jarArgs));
String tokenFile = System.getenv("HADOOP_TOKEN_FILE_LOCATION");
@@ -261,8 +279,15 @@ public class LaunchMapper extends Mapper
if (percent != null || childid != null) {
state = new JobState(jobid.toString(), conf);
- state.setPercentComplete(percent);
- state.setChildId(childid);
+ if (percent != null) {
+ state.setPercentComplete(percent);
+ }
+ if (childid != null) {
+ JobState childState = new JobState(childid, conf);
+ childState.setParent(jobid.toString());
+ state.addChild(childid);
+ state.close();
+ }
}
} catch (IOException e) {
LOG.error("templeton: state error: ", e);
Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java Tue Nov 5 07:01:32 2013
@@ -22,6 +22,7 @@ import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.security.PrivilegedExceptionAction;
+import java.util.Arrays;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -182,6 +183,9 @@ public class TempletonControllerJob exte
@Override
public int run(String[] args) throws IOException, InterruptedException, ClassNotFoundException,
TException {
+ if(LOG.isDebugEnabled()) {
+ LOG.debug("Preparing to submit job: " + Arrays.toString(args));
+ }
Configuration conf = getConf();
conf.set(JAR_ARGS_NAME, TempletonUtils.encodeArray(args));
Modified: hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java Tue Nov 5 07:01:32 2013
@@ -44,7 +44,7 @@ import java.util.Map;
/**
* A set of tests exercising e2e WebHCat DDL APIs. These tests are somewhat
* between WebHCat e2e (hcatalog/src/tests/e2e/templeton) tests and simple58
- *
+ *
* unit tests. This will start a WebHCat server and make REST calls to it.
* It doesn't need Hadoop or (standalone) metastore to be running.
* Running this is much simpler than e2e tests.
@@ -75,7 +75,7 @@ public class TestWebHCatE2e {
LOG.warn("Unable to find free port; using default: " + webhcatPort);
}
templetonBaseUrl = templetonBaseUrl.replace("50111", Integer.toString(webhcatPort));
- templetonServer = new Main(new String[] {"-D" +
+ templetonServer = new Main(new String[] {"-D" +
AppConfig.UNIT_TEST_MODE + "=true", "-D" + AppConfig.PORT + "=" + webhcatPort});
LOG.info("Starting Main; WebHCat using port: " + webhcatPort);
templetonServer.run();
@@ -156,7 +156,7 @@ public class TestWebHCatE2e {
public void createDataBase() throws IOException {
Map<String, Object> props = new HashMap<String, Object>();
props.put("comment", "Hello, there");
- props.put("location", "file://" + System.getProperty("hive.metastore.warehouse.dir"));
+ props.put("location", System.getProperty("test.warehouse.dir"));
Map<String, String> props2 = new HashMap<String, String>();
props2.put("prop", "val");
props.put("properties", props2);
Modified: hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestJobIDParser.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestJobIDParser.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestJobIDParser.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestJobIDParser.java Tue Nov 5 07:01:32 2013
@@ -29,7 +29,7 @@ import junit.framework.Assert;
public class TestJobIDParser {
@Test
public void testParsePig() throws IOException {
- String errFileName = "../../src/test/data/status/pig";
+ String errFileName = "src/test/data/status/pig";
PigJobIDParser pigJobIDParser = new PigJobIDParser(errFileName, new Configuration());
List<String> jobs = pigJobIDParser.parseJobID();
Assert.assertEquals(jobs.size(), 1);
@@ -37,7 +37,7 @@ public class TestJobIDParser {
@Test
public void testParseHive() throws IOException {
- String errFileName = "../../src/test/data/status/hive";
+ String errFileName = "src/test/data/status/hive";
HiveJobIDParser hiveJobIDParser = new HiveJobIDParser(errFileName, new Configuration());
List<String> jobs = hiveJobIDParser.parseJobID();
Assert.assertEquals(jobs.size(), 1);
@@ -45,7 +45,7 @@ public class TestJobIDParser {
@Test
public void testParseJar() throws IOException {
- String errFileName = "../../src/test/data/status/jar";
+ String errFileName = "src/test/data/status/jar";
JarJobIDParser jarJobIDParser = new JarJobIDParser(errFileName, new Configuration());
List<String> jobs = jarJobIDParser.parseJobID();
Assert.assertEquals(jobs.size(), 1);
@@ -53,7 +53,7 @@ public class TestJobIDParser {
@Test
public void testParseStreaming() throws IOException {
- String errFileName = "../../src/test/data/status/streaming";
+ String errFileName = "src/test/data/status/streaming";
JarJobIDParser jarJobIDParser = new JarJobIDParser(errFileName, new Configuration());
List<String> jobs = jarJobIDParser.parseJobID();
Assert.assertEquals(jobs.size(), 1);
Modified: hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java Tue Nov 5 07:01:32 2013
@@ -35,7 +35,7 @@ public class TestTempletonUtils {
"2011-12-15 18:12:21,758 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher - More information at: http://localhost:50030/jobdetails.jsp?jobid=job_201112140012_0047",
"2011-12-15 18:12:46,907 [main] INFO org.apache.pig.tools.pigstats.SimplePigStats - Script Statistics: "
};
- public static final String testDataDir = System.getProperty("test.data.dir");
+ public static final String testDataDir = System.getProperty("test.tmp.dir");
File tmpFile;
File usrFile;
Modified: hive/branches/tez/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java?rev=1538880&r1=1538724&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java (original)
+++ hive/branches/tez/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java Tue Nov 5 07:01:32 2013
@@ -24,6 +24,8 @@ import java.io.FileReader;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType;
+
/**
* Suite for testing location. e.g. if "alter table alter partition
* location" is run, do the partitions end up in the correct location.
@@ -82,7 +84,7 @@ public class TestLocationQueries extends
return failedCount;
}
- public CheckResults(String outDir, String logDir, boolean miniMr,
+ public CheckResults(String outDir, String logDir, MiniClusterType miniMr,
String hadoopVer, String locationSubdir)
throws Exception
{
@@ -102,8 +104,9 @@ public class TestLocationQueries extends
File[] qfiles = setupQFiles(testNames);
QTestUtil[] qt = new QTestUtil[qfiles.length];
+
for (int i = 0; i < qfiles.length; i++) {
- qt[i] = new CheckResults(resDir, logDir, false, "0.20", "parta");
+ qt[i] = new CheckResults(resDir, logDir, MiniClusterType.none, "0.20", "parta");
qt[i].addFile(qfiles[i]);
qt[i].clearTestSideEffects();
}
Modified: hive/branches/tez/itests/qtest/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/itests/qtest/pom.xml?rev=1538880&r1=1538724&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/itests/qtest/pom.xml (original)
+++ hive/branches/tez/itests/qtest/pom.xml Tue Nov 5 07:01:32 2013
@@ -38,6 +38,7 @@
<execute.beeline.tests>false</execute.beeline.tests>
<minimr.query.files>list_bucket_dml_10.q,input16_cc.q,scriptfile1.q,scriptfile1_win.q,bucket4.q,bucketmapjoin6.q,disable_merge_for_bucketing.q,reduce_deduplicate.q,smb_mapjoin_8.q,join1.q,groupby2.q,bucketizedhiveinputformat.q,bucketmapjoin7.q,optrstat_groupby.q,bucket_num_reducers.q,bucket5.q,load_fs2.q,bucket_num_reducers2.q,infer_bucket_sort_merge.q,infer_bucket_sort_reducers_power_two.q,infer_bucket_sort_dyn_part.q,infer_bucket_sort_bucketed_table.q,infer_bucket_sort_map_operators.q,infer_bucket_sort_num_buckets.q,leftsemijoin_mr.q,schemeAuthority.q,schemeAuthority2.q,truncate_column_buckets.q,remote_script.q,,load_hdfs_file_with_space_in_the_name.q,parallel_orderby.q,import_exported_table.q</minimr.query.files>
<minimr.query.negative.files>cluster_tasklog_retrieval.q,minimr_broken_pipe.q,mapreduce_stack_trace.q,mapreduce_stack_trace_turnoff.q,mapreduce_stack_trace_hadoop20.q,mapreduce_stack_trace_turnoff_hadoop20.q</minimr.query.negative.files>
+ <minitez.query.files>tez_join_tests.q,tez_joins_explain.q,mrr.q,tez_dml.q</minitez.query.files>
<beeline.positive.exclude>add_part_exist.q,alter1.q,alter2.q,alter4.q,alter5.q,alter_rename_partition.q,alter_rename_partition_authorization.q,archive.q,archive_corrupt.q,archive_multi.q,archive_mr_1806.q,archive_multi_mr_1806.q,authorization_1.q,authorization_2.q,authorization_4.q,authorization_5.q,authorization_6.q,authorization_7.q,ba_table1.q,ba_table2.q,ba_table3.q,ba_table_udfs.q,binary_table_bincolserde.q,binary_table_colserde.q,cluster.q,columnarserde_create_shortcut.q,combine2.q,constant_prop.q,create_nested_type.q,create_or_replace_view.q,create_struct_table.q,create_union_table.q,database.q,database_location.q,database_properties.q,ddltime.q,describe_database_json.q,drop_database_removes_partition_dirs.q,escape1.q,escape2.q,exim_00_nonpart_empty.q,exim_01_nonpart.q,exim_02_00_part_empty.q,exim_02_part.q,exim_03_nonpart_over_compat.q,exim_04_all_part.q,exim_04_evolved_parts.q,exim_05_some_part.q,exim_06_one_part.q,exim_07_all_part_over_nonoverlap.q,exim_08_nonpart_rena
me.q,exim_09_part_spec_nonoverlap.q,exim_10_external_managed.q,exim_11_managed_external.q,exim_12_external_location.q,exim_13_managed_location.q,exim_14_managed_location_over_existing.q,exim_15_external_part.q,exim_16_part_external.q,exim_17_part_managed.q,exim_18_part_external.q,exim_19_00_part_external_location.q,exim_19_part_external_location.q,exim_20_part_managed_location.q,exim_21_export_authsuccess.q,exim_22_import_exist_authsuccess.q,exim_23_import_part_authsuccess.q,exim_24_import_nonexist_authsuccess.q,global_limit.q,groupby_complex_types.q,groupby_complex_types_multi_single_reducer.q,index_auth.q,index_auto.q,index_auto_empty.q,index_bitmap.q,index_bitmap1.q,index_bitmap2.q,index_bitmap3.q,index_bitmap_auto.q,index_bitmap_rc.q,index_compact.q,index_compact_1.q,index_compact_2.q,index_compact_3.q,index_stale_partitioned.q,init_file.q,input16.q,input16_cc.q,input46.q,input_columnarserde.q,input_dynamicserde.q,input_lazyserde.q,input_testxpath3.q,input_testxpath4.q,insert2_o
verwrite_partitions.q,insertexternal1.q,join_thrift.q,lateral_view.q,load_binary_data.q,load_exist_part_authsuccess.q,load_nonpart_authsuccess.q,load_part_authsuccess.q,loadpart_err.q,lock1.q,lock2.q,lock3.q,lock4.q,merge_dynamic_partition.q,multi_insert.q,multi_insert_move_tasks_share_dependencies.q,null_column.q,ppd_clusterby.q,query_with_semi.q,rename_column.q,sample6.q,sample_islocalmode_hook.q,set_processor_namespaces.q,show_tables.q,source.q,split_sample.q,str_to_map.q,transform1.q,udaf_collect_set.q,udaf_context_ngrams.q,udaf_histogram_numeric.q,udaf_ngrams.q,udaf_percentile_approx.q,udf_array.q,udf_bitmap_and.q,udf_bitmap_or.q,udf_explode.q,udf_format_number.q,udf_map.q,udf_map_keys.q,udf_map_values.q,udf_max.q,udf_min.q,udf_named_struct.q,udf_percentile.q,udf_printf.q,udf_sentences.q,udf_sort_array.q,udf_split.q,udf_struct.q,udf_substr.q,udf_translate.q,udf_union.q,udf_xpath.q,udtf_stack.q,view.q,virtual_column.q</beeline.positive.exclude>
</properties>
@@ -119,7 +120,6 @@
<classifier>tests</classifier>
</dependency>
</dependencies>
-
<profiles>
<profile>
<!-- replaces -Dclustermode=minimr which I am sure sure was used? -->
@@ -191,6 +191,11 @@
<version>${hadoop-23.version}</version>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ <version>${commons-logging.version}</version>
+ </dependency>
</dependencies>
</profile>
</profiles>
@@ -207,6 +212,7 @@
<configuration>
<target>
<property name="test.classpath" refid="maven.test.classpath"/>
+ <echo message="${test.classpath}"/>
<taskdef resource="net/sf/antcontrib/antcontrib.properties"
classpathref="maven.plugin.classpath" />
<taskdef name="qtestgen" classname="org.apache.hadoop.hive.ant.QTestGenTask"
@@ -257,7 +263,7 @@
templatePath="${basedir}/${hive.path.to.root}/ql/src/test/templates/" template="TestCliDriver.vm"
queryDirectory="${basedir}/${hive.path.to.root}/ql/src/test/queries/clientpositive/"
queryFile="${qfile}"
- excludeQueryFile="${minimr.query.files}"
+ excludeQueryFile="${minimr.query.files},${minitez.query.files}"
queryFileRegex="${qfile_regex}"
clusterMode="${clustermode}"
runDisabled="${run_disabled}"
@@ -294,9 +300,32 @@
resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientpositive/" className="TestMinimrCliDriver"
logFile="${project.build.directory}/testminimrclidrivergen.log"
logDirectory="${project.build.directory}/qfile-results/clientpositive/"
- hadoopVersion="${hadoopVersion}"
+ hadoopVersion="${active.hadoop.version}"
/>
+ <if>
+ <equals arg1="${active.hadoop.version}" arg2="${hadoop-23.version}"/>
+ <then>
+ <qtestgen hiveRootDirectory="${basedir}/${hive.path.to.root}/"
+ outputDirectory="${project.build.directory}/generated-test-sources/java/org/apache/hadoop/hive/cli/"
+ templatePath="${basedir}/${hive.path.to.root}/ql/src/test/templates/" template="TestCliDriver.vm"
+ queryDirectory="${basedir}/${hive.path.to.root}/ql/src/test/queries/clientpositive/"
+ queryFile="${qfile}"
+ includeQueryFile="${minitez.query.files}"
+ queryFileRegex="${qfile_regex}"
+ clusterMode="tez"
+ runDisabled="${run_disabled}"
+ resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientpositive/"
+ className="TestMiniTezCliDriver"
+ logFile="${project.build.directory}/testminitezclidrivergen.log"
+ logDirectory="${project.build.directory}/qfile-results/clientpositive/"
+ hadoopVersion="${active.hadoop.version}"
+ />
+ </then>
+ <else>
+ </else>
+ </if>
+
<!-- Negative Minimr -->
<qtestgen hiveRootDirectory="${basedir}/${hive.path.to.root}/"
outputDirectory="${project.build.directory}/generated-test-sources/java/org/apache/hadoop/hive/cli/"
Modified: hive/branches/tez/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java?rev=1538880&r1=1538724&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java (original)
+++ hive/branches/tez/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java Tue Nov 5 07:01:32 2013
@@ -18,13 +18,14 @@
package org.apache.hadoop.hive.hbase;
import org.apache.hadoop.hive.ql.QTestUtil;
+import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType;
/**
* HBaseQTestUtil initializes HBase-specific test fixtures.
*/
public class HBaseQTestUtil extends QTestUtil {
public HBaseQTestUtil(
- String outDir, String logDir, boolean miniMr, HBaseTestSetup setup)
+ String outDir, String logDir, MiniClusterType miniMr, HBaseTestSetup setup)
throws Exception {
super(outDir, logDir, miniMr, null);
@@ -32,6 +33,7 @@ public class HBaseQTestUtil extends QTes
super.init();
}
+ @Override
public void init() throws Exception {
// defer
}
Modified: hive/branches/tez/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1538880&r1=1538724&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/branches/tez/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java Tue Nov 5 07:01:32 2013
@@ -83,6 +83,7 @@ import org.apache.hadoop.hive.serde.serd
import org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer;
import org.apache.hadoop.hive.serde2.thrift.test.Complex;
import org.apache.hadoop.hive.shims.HadoopShims;
+import org.apache.hadoop.hive.shims.Hadoop23Shims;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
@@ -111,6 +112,7 @@ public class QTestUtil {
private final Set<String> qSortSet;
private static final String SORT_SUFFIX = ".sorted";
public static final HashSet<String> srcTables = new HashSet<String>();
+ private static MiniClusterType clusterType = MiniClusterType.none;
private ParseDriver pd;
private Hive db;
protected HiveConf conf;
@@ -204,7 +206,7 @@ public class QTestUtil {
}
public QTestUtil(String outDir, String logDir) throws Exception {
- this(outDir, logDir, false, "0.20");
+ this(outDir, logDir, MiniClusterType.none, "0.20");
}
public String getOutputDirectory() {
@@ -242,9 +244,8 @@ public class QTestUtil {
conf.setVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL,
"org.apache.hadoop.hive.metastore.VerifyingObjectStore");
- if (miniMr) {
+ if (mr != null) {
assert dfs != null;
- assert mr != null;
mr.setupConfiguration(conf);
@@ -297,21 +298,46 @@ public class QTestUtil {
return uriStr;
}
- public QTestUtil(String outDir, String logDir, boolean miniMr, String hadoopVer)
+ public enum MiniClusterType {
+ mr,
+ tez,
+ none;
+
+ public static MiniClusterType valueForString(String type) {
+ if (type.equals("miniMR")) {
+ return mr;
+ } else if (type.equals("tez")) {
+ return tez;
+ } else {
+ return none;
+ }
+ }
+ }
+
+ public QTestUtil(String outDir, String logDir, MiniClusterType clusterType, String hadoopVer)
throws Exception {
this.outDir = outDir;
this.logDir = logDir;
conf = new HiveConf(Driver.class);
- this.miniMr = miniMr;
+ this.miniMr = (clusterType == MiniClusterType.mr);
this.hadoopVer = getHadoopMainVersion(hadoopVer);
qMap = new TreeMap<String, String>();
qSkipSet = new HashSet<String>();
qSortSet = new HashSet<String>();
+ this.clusterType = clusterType;
- if (miniMr) {
- dfs = ShimLoader.getHadoopShims().getMiniDfs(conf, 4, true, null);
+ HadoopShims shims = ShimLoader.getHadoopShims();
+ if (clusterType != MiniClusterType.none) {
+ dfs = shims.getMiniDfs(conf, 4, true, null);
FileSystem fs = dfs.getFileSystem();
- mr = ShimLoader.getHadoopShims().getMiniMrCluster(conf, 4, getHdfsUriString(fs.getUri().toString()), 1);
+ if (clusterType == MiniClusterType.tez) {
+ if (!(shims instanceof Hadoop23Shims)) {
+ throw new Exception("Cannot run tez on hadoop-1, Version: "+this.hadoopVer);
+ }
+ mr = ((Hadoop23Shims)shims).getMiniTezCluster(conf, 4, getHdfsUriString(fs.getUri().toString()), 1);
+ } else {
+ mr = shims.getMiniMrCluster(conf, 4, getHdfsUriString(fs.getUri().toString()), 1);
+ }
}
initConf();
@@ -761,6 +787,11 @@ public class QTestUtil {
ss.err = new CachingPrintStream(fo, true, "UTF-8");
ss.setIsSilent(true);
SessionState oldSs = SessionState.get();
+
+ if (oldSs != null && clusterType == MiniClusterType.tez) {
+ oldSs.close();
+ }
+
if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
oldSs.out.close();
}
@@ -1442,7 +1473,7 @@ public class QTestUtil {
{
QTestUtil[] qt = new QTestUtil[qfiles.length];
for (int i = 0; i < qfiles.length; i++) {
- qt[i] = new QTestUtil(resDir, logDir, false, "0.20");
+ qt[i] = new QTestUtil(resDir, logDir, MiniClusterType.none, "0.20");
qt[i].addFile(qfiles[i]);
qt[i].clearTestSideEffects();
}