You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/10/06 06:00:54 UTC
svn commit: r1629563 [3/33] - in /hive/branches/spark: ./ accumulo-handler/
beeline/ beeline/src/java/org/apache/hive/beeline/ bin/ bin/ext/ common/
common/src/java/org/apache/hadoop/hive/conf/
common/src/test/org/apache/hadoop/hive/common/type/ contri...
Modified: hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java (original)
+++ hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java Mon Oct 6 04:00:39 2014
@@ -18,19 +18,25 @@
*/
package org.apache.hive.hcatalog.pig;
+import com.google.common.collect.ImmutableSet;
+
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collection;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
-
-import junit.framework.Assert;
+import java.util.Map;
+import java.util.Set;
import org.apache.hadoop.hive.cli.CliSessionState;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.CommandNeedRetryException;
import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.io.IOConstants;
+import org.apache.hadoop.hive.ql.io.StorageFormats;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.session.SessionState;
@@ -46,12 +52,20 @@ import org.apache.pig.impl.logicalLayer.
import org.apache.pig.impl.logicalLayer.schema.Schema;
import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
+import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assume.assumeTrue;
+
+@RunWith(Parameterized.class)
public class TestHCatLoaderComplexSchema {
//private static MiniCluster cluster = MiniCluster.buildCluster();
@@ -59,13 +73,33 @@ public class TestHCatLoaderComplexSchema
//private static Properties props;
private static final Logger LOG = LoggerFactory.getLogger(TestHCatLoaderComplexSchema.class);
- private void dropTable(String tablename) throws IOException, CommandNeedRetryException {
- driver.run("drop table " + tablename);
+ private static final Map<String, Set<String>> DISABLED_STORAGE_FORMATS =
+ new HashMap<String, Set<String>>() {{
+ put(IOConstants.AVRO, new HashSet<String>() {{
+ add("testSyntheticComplexSchema");
+ add("testTupleInBagInTupleInBag");
+ add("testMapWithComplexData");
+ }});
+ put(IOConstants.PARQUETFILE, new HashSet<String>() {{
+ add("testSyntheticComplexSchema");
+ add("testTupleInBagInTupleInBag");
+ add("testMapWithComplexData");
+ }});
+ }};
+
+ private String storageFormat;
+
+ @Parameterized.Parameters
+ public static Collection<Object[]> generateParameters() {
+ return StorageFormats.names();
+ }
+
+ public TestHCatLoaderComplexSchema(String storageFormat) {
+ this.storageFormat = storageFormat;
}
- protected String storageFormat() {
- return "RCFILE tblproperties('hcat.isd'='org.apache.hive.hcatalog.rcfile.RCFileInputDriver'," +
- "'hcat.osd'='org.apache.hive.hcatalog.rcfile.RCFileOutputDriver')";
+ private void dropTable(String tablename) throws IOException, CommandNeedRetryException {
+ driver.run("drop table " + tablename);
}
private void createTable(String tablename, String schema, String partitionedBy) throws IOException, CommandNeedRetryException {
@@ -74,7 +108,7 @@ public class TestHCatLoaderComplexSchema
if ((partitionedBy != null) && (!partitionedBy.trim().isEmpty())) {
createTable = createTable + "partitioned by (" + partitionedBy + ") ";
}
- createTable = createTable + "stored as " + storageFormat();
+ createTable = createTable + "stored as " + storageFormat;
LOG.info("Creating table:\n {}", createTable);
CommandProcessorResponse result = driver.run(createTable);
int retCode = result.getResponseCode();
@@ -89,7 +123,6 @@ public class TestHCatLoaderComplexSchema
@BeforeClass
public static void setUpBeforeClass() throws Exception {
-
HiveConf hiveConf = new HiveConf(TestHCatLoaderComplexSchema.class);
hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
@@ -98,7 +131,6 @@ public class TestHCatLoaderComplexSchema
SessionState.start(new CliSessionState(hiveConf));
//props = new Properties();
//props.setProperty("fs.default.name", cluster.getProperties().getProperty("fs.default.name"));
-
}
private static final TupleFactory tf = TupleFactory.getInstance();
@@ -118,6 +150,7 @@ public class TestHCatLoaderComplexSchema
*/
@Test
public void testSyntheticComplexSchema() throws Exception {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
String pigSchema =
"a: " +
"(" +
@@ -186,7 +219,6 @@ public class TestHCatLoaderComplexSchema
verifyWriteRead("testSyntheticComplexSchema", pigSchema, tableSchema, data, false);
verifyWriteRead("testSyntheticComplexSchema2", pigSchema, tableSchema2, data, true);
verifyWriteRead("testSyntheticComplexSchema2", pigSchema, tableSchema2, data, false);
-
}
private void verifyWriteRead(String tablename, String pigSchema, String tableSchema, List<Tuple> data, boolean provideSchemaToStorer)
@@ -219,7 +251,7 @@ public class TestHCatLoaderComplexSchema
}
Schema dumpedXSchema = server.dumpSchema("X");
- Assert.assertEquals(
+ assertEquals(
"expected " + dumpedASchema + " but was " + dumpedXSchema + " (ignoring field names)",
"",
compareIgnoreFiledNames(dumpedASchema, dumpedXSchema));
@@ -230,14 +262,14 @@ public class TestHCatLoaderComplexSchema
}
private void compareTuples(Tuple t1, Tuple t2) throws ExecException {
- Assert.assertEquals("Tuple Sizes don't match", t1.size(), t2.size());
+ assertEquals("Tuple Sizes don't match", t1.size(), t2.size());
for (int i = 0; i < t1.size(); i++) {
Object f1 = t1.get(i);
Object f2 = t2.get(i);
- Assert.assertNotNull("left", f1);
- Assert.assertNotNull("right", f2);
+ assertNotNull("left", f1);
+ assertNotNull("right", f2);
String msg = "right: " + f1 + ", left: " + f2;
- Assert.assertEquals(msg, noOrder(f1.toString()), noOrder(f2.toString()));
+ assertEquals(msg, noOrder(f1.toString()), noOrder(f2.toString()));
}
}
@@ -278,6 +310,7 @@ public class TestHCatLoaderComplexSchema
*/
@Test
public void testTupleInBagInTupleInBag() throws Exception {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
String pigSchema = "a: { b : ( c: { d: (i : long) } ) }";
String tableSchema = "a array< array< bigint > >";
@@ -297,11 +330,11 @@ public class TestHCatLoaderComplexSchema
verifyWriteRead("TupleInBagInTupleInBag3", pigSchema, tableSchema2, data, true);
verifyWriteRead("TupleInBagInTupleInBag4", pigSchema, tableSchema2, data, false);
-
}
@Test
public void testMapWithComplexData() throws Exception {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
String pigSchema = "a: long, b: map[]";
String tableSchema = "a bigint, b map<string, struct<aa:bigint, ab:string>>";
@@ -320,6 +353,5 @@ public class TestHCatLoaderComplexSchema
}
verifyWriteRead("testMapWithComplexData", pigSchema, tableSchema, data, true);
verifyWriteRead("testMapWithComplexData2", pigSchema, tableSchema, data, false);
-
}
}
Modified: hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java (original)
+++ hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java Mon Oct 6 04:00:39 2014
@@ -18,18 +18,27 @@
*/
package org.apache.hive.hcatalog.pig;
+import com.google.common.collect.ImmutableSet;
+
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.math.BigDecimal;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
+import java.util.Map;
import java.util.Properties;
+import java.util.Set;
import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+import org.apache.hadoop.hive.ql.io.IOConstants;
+import org.apache.hadoop.hive.ql.io.StorageFormats;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hive.hcatalog.HcatTestUtils;
@@ -47,20 +56,92 @@ import org.apache.pig.impl.util.LogUtils
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
-import org.junit.Assert;
+import org.junit.Before;
import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static org.junit.Assert.*;
+import static org.junit.Assume.assumeTrue;
+
+@RunWith(Parameterized.class)
public class TestHCatStorer extends HCatBaseTest {
private static final Logger LOG = LoggerFactory.getLogger(TestHCatStorer.class);
private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
+ private static final Map<String, Set<String>> DISABLED_STORAGE_FORMATS =
+ new HashMap<String, Set<String>>() {{
+ put(IOConstants.AVRO, new HashSet<String>() {{
+ add("testBagNStruct");
+ add("testDateCharTypes");
+ add("testDynamicPartitioningMultiPartColsInDataNoSpec");
+ add("testDynamicPartitioningMultiPartColsInDataPartialSpec");
+ add("testMultiPartColsInData");
+ add("testPartColsInData");
+ add("testStoreFuncAllSimpleTypes");
+ add("testStoreFuncSimple");
+ add("testStoreInPartiitonedTbl");
+ add("testStoreMultiTables");
+ add("testStoreWithNoCtorArgs");
+ add("testStoreWithNoSchema");
+ add("testWriteChar");
+ add("testWriteDate");
+ add("testWriteDate2");
+ add("testWriteDate3");
+ add("testWriteDecimal");
+ add("testWriteDecimalX");
+ add("testWriteDecimalXY");
+ add("testWriteSmallint");
+ add("testWriteTimestamp");
+ add("testWriteTinyint");
+ add("testWriteVarchar");
+ }});
+ put(IOConstants.PARQUETFILE, new HashSet<String>() {{
+ add("testBagNStruct");
+ add("testDateCharTypes");
+ add("testDynamicPartitioningMultiPartColsInDataNoSpec");
+ add("testDynamicPartitioningMultiPartColsInDataPartialSpec");
+ add("testMultiPartColsInData");
+ add("testPartColsInData");
+ add("testStoreFuncAllSimpleTypes");
+ add("testStoreFuncSimple");
+ add("testStoreInPartiitonedTbl");
+ add("testStoreMultiTables");
+ add("testStoreWithNoCtorArgs");
+ add("testStoreWithNoSchema");
+ add("testWriteChar");
+ add("testWriteDate");
+ add("testWriteDate2");
+ add("testWriteDate3");
+ add("testWriteDecimal");
+ add("testWriteDecimalX");
+ add("testWriteDecimalXY");
+ add("testWriteSmallint");
+ add("testWriteTimestamp");
+ add("testWriteTinyint");
+ add("testWriteVarchar");
+ }});
+ }};
+
+ private String storageFormat;
+
+ @Parameterized.Parameters
+ public static Collection<Object[]> generateParameters() {
+ return StorageFormats.names();
+ }
+
+ public TestHCatStorer(String storageFormat) {
+ this.storageFormat = storageFormat;
+ }
+
//Start: tests that check values from Pig that are out of range for target column
@Test
public void testWriteTinyint() throws Exception {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
pigValueRangeTest("junitTypeTest1", "tinyint", "int", null, Integer.toString(1), Integer.toString(1));
pigValueRangeTestOverflow("junitTypeTest1", "tinyint", "int", null, Integer.toString(300));
pigValueRangeTestOverflow("junitTypeTest2", "tinyint", "int", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Null,
@@ -71,6 +152,7 @@ public class TestHCatStorer extends HCat
@Test
public void testWriteSmallint() throws Exception {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
pigValueRangeTest("junitTypeTest1", "smallint", "int", null, Integer.toString(Short.MIN_VALUE),
Integer.toString(Short.MIN_VALUE));
pigValueRangeTestOverflow("junitTypeTest2", "smallint", "int", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Null,
@@ -81,6 +163,7 @@ public class TestHCatStorer extends HCat
@Test
public void testWriteChar() throws Exception {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
pigValueRangeTest("junitTypeTest1", "char(5)", "chararray", null, "xxx", "xxx ");
pigValueRangeTestOverflow("junitTypeTest1", "char(5)", "chararray", null, "too_long");
pigValueRangeTestOverflow("junitTypeTest2", "char(5)", "chararray", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Null,
@@ -91,6 +174,7 @@ public class TestHCatStorer extends HCat
@Test
public void testWriteVarchar() throws Exception {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
pigValueRangeTest("junitTypeTest1", "varchar(5)", "chararray", null, "xxx", "xxx");
pigValueRangeTestOverflow("junitTypeTest1", "varchar(5)", "chararray", null, "too_long");
pigValueRangeTestOverflow("junitTypeTest2", "varchar(5)", "chararray", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Null,
@@ -101,6 +185,7 @@ public class TestHCatStorer extends HCat
@Test
public void testWriteDecimalXY() throws Exception {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
pigValueRangeTest("junitTypeTest1", "decimal(5,2)", "bigdecimal", null, BigDecimal.valueOf(1.2).toString(),
BigDecimal.valueOf(1.2).toString());
pigValueRangeTestOverflow("junitTypeTest1", "decimal(5,2)", "bigdecimal", null, BigDecimal.valueOf(12345.12).toString());
@@ -112,6 +197,7 @@ public class TestHCatStorer extends HCat
@Test
public void testWriteDecimalX() throws Exception {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
//interestingly decimal(2) means decimal(2,0)
pigValueRangeTest("junitTypeTest1", "decimal(2)", "bigdecimal", null, BigDecimal.valueOf(12).toString(),
BigDecimal.valueOf(12).toString());
@@ -123,6 +209,7 @@ public class TestHCatStorer extends HCat
@Test
public void testWriteDecimal() throws Exception {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
//decimal means decimal(10,0)
pigValueRangeTest("junitTypeTest1", "decimal", "bigdecimal", null, BigDecimal.valueOf(1234567890).toString(),
BigDecimal.valueOf(1234567890).toString());
@@ -137,8 +224,10 @@ public class TestHCatStorer extends HCat
* include time to make sure it's 0
*/
private static final String FORMAT_4_DATE = "yyyy-MM-dd HH:mm:ss";
+
@Test
public void testWriteDate() throws Exception {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
DateTime d = new DateTime(1991,10,11,0,0);
pigValueRangeTest("junitTypeTest1", "date", "datetime", null, d.toString(),
d.toString(FORMAT_4_DATE), FORMAT_4_DATE);
@@ -157,6 +246,7 @@ public class TestHCatStorer extends HCat
@Test
public void testWriteDate3() throws Exception {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
DateTime d = new DateTime(1991,10,11,23,10,DateTimeZone.forOffsetHours(-11));
FrontendException fe = null;
//expect to fail since the time component is not 0
@@ -170,6 +260,7 @@ public class TestHCatStorer extends HCat
@Test
public void testWriteDate2() throws Exception {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
DateTime d = new DateTime(1991,11,12,0,0, DateTimeZone.forID("US/Eastern"));
pigValueRangeTest("junitTypeTest1", "date", "datetime", null, d.toString(),
d.toString(FORMAT_4_DATE), FORMAT_4_DATE);
@@ -193,6 +284,7 @@ public class TestHCatStorer extends HCat
*/
@Test
public void testWriteTimestamp() throws Exception {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
DateTime d = new DateTime(1991,10,11,14,23,30, 10);//uses default TZ
pigValueRangeTest("junitTypeTest1", "timestamp", "datetime", null, d.toString(),
d.toDateTime(DateTimeZone.getDefault()).toString());
@@ -229,13 +321,6 @@ public class TestHCatStorer extends HCat
}
/**
- * this should be overridden in subclass to test with different file formats
- */
- String getStorageFormat() {
- return "RCFILE";
- }
-
- /**
* This is used to test how Pig values of various data types which are out of range for Hive target
* column are handled. Currently the options are to raise an error or write NULL.
* 1. create a data file with 1 column, 1 row
@@ -258,7 +343,7 @@ public class TestHCatStorer extends HCat
throws Exception {
TestHCatLoader.dropTable(tblName, driver);
final String field = "f1";
- TestHCatLoader.createTable(tblName, field + " " + hiveType, null, driver, getStorageFormat());
+ TestHCatLoader.createTable(tblName, field + " " + hiveType, null, driver, storageFormat);
HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, new String[] {inputValue});
LOG.debug("File=" + INPUT_FILE_NAME);
dumpFile(INPUT_FILE_NAME);
@@ -287,11 +372,11 @@ public class TestHCatStorer extends HCat
//do nothing, fall through and verify the data
break;
case Throw:
- Assert.assertTrue("Expected a FrontendException", fe != null);
- Assert.assertEquals("Expected a different FrontendException.", fe.getMessage(), "Unable to store alias A");
+ assertTrue("Expected a FrontendException", fe != null);
+ assertEquals("Expected a different FrontendException.", fe.getMessage(), "Unable to store alias A");
return;//this test is done
default:
- Assert.assertFalse("Unexpected goal: " + goal, 1 == 1);
+ assertFalse("Unexpected goal: " + goal, 1 == 1);
}
}
logAndRegister(server, "B = load '" + tblName + "' using " + HCatLoader.class.getName() + "();", queryNumber);
@@ -310,17 +395,17 @@ public class TestHCatStorer extends HCat
Tuple t = itr.next();
if("date".equals(hiveType)) {
DateTime dateTime = (DateTime)t.get(0);
- Assert.assertTrue(format != null);
- Assert.assertEquals("Comparing Pig to Raw data for table " + tblName, expectedValue, dateTime== null ? null : dateTime.toString(format));
+ assertTrue(format != null);
+ assertEquals("Comparing Pig to Raw data for table " + tblName, expectedValue, dateTime== null ? null : dateTime.toString(format));
}
else {
- Assert.assertEquals("Comparing Pig to Raw data for table " + tblName, expectedValue, t.isNull(0) ? null : t.get(0).toString());
+ assertEquals("Comparing Pig to Raw data for table " + tblName, expectedValue, t.isNull(0) ? null : t.get(0).toString());
}
//see comment at "Dumping rows via SQL..." for why this doesn't work
- //Assert.assertEquals("Comparing Pig to Hive", t.get(0), l.get(0));
+ //assertEquals("Comparing Pig to Hive", t.get(0), l.get(0));
numRowsRead++;
}
- Assert.assertEquals("Expected " + 1 + " rows; got " + numRowsRead + " file=" + INPUT_FILE_NAME + "; table " +
+ assertEquals("Expected " + 1 + " rows; got " + numRowsRead + " file=" + INPUT_FILE_NAME + "; table " +
tblName, 1, numRowsRead);
/* Misc notes:
Unfortunately Timestamp.toString() adjusts the value for local TZ and 't' is a String
@@ -334,10 +419,11 @@ public class TestHCatStorer extends HCat
*/
@Test
public void testDateCharTypes() throws Exception {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
final String tblName = "junit_date_char";
TestHCatLoader.dropTable(tblName, driver);
TestHCatLoader.createTable(tblName,
- "id int, char5 char(5), varchar10 varchar(10), dec52 decimal(5,2)", null, driver, getStorageFormat());
+ "id int, char5 char(5), varchar10 varchar(10), dec52 decimal(5,2)", null, driver, storageFormat);
int NUM_ROWS = 5;
String[] rows = new String[NUM_ROWS];
for(int i = 0; i < NUM_ROWS; i++) {
@@ -376,12 +462,12 @@ public class TestHCatStorer extends HCat
rowFromPig.append(t.get(i)).append("\t");
}
rowFromPig.setLength(rowFromPig.length() - 1);
- Assert.assertEquals("Comparing Pig to Raw data", rows[numRowsRead], rowFromPig.toString());
+ assertEquals("Comparing Pig to Raw data", rows[numRowsRead], rowFromPig.toString());
//see comment at "Dumping rows via SQL..." for why this doesn't work (for all types)
- //Assert.assertEquals("Comparing Pig to Hive", rowFromPig.toString(), l.get(numRowsRead));
+ //assertEquals("Comparing Pig to Hive", rowFromPig.toString(), l.get(numRowsRead));
numRowsRead++;
}
- Assert.assertEquals("Expected " + NUM_ROWS + " rows; got " + numRowsRead + " file=" + INPUT_FILE_NAME, NUM_ROWS, numRowsRead);
+ assertEquals("Expected " + NUM_ROWS + " rows; got " + numRowsRead + " file=" + INPUT_FILE_NAME, NUM_ROWS, numRowsRead);
}
static void dumpFile(String fileName) throws Exception {
@@ -397,9 +483,10 @@ public class TestHCatStorer extends HCat
@Test
public void testPartColsInData() throws IOException, CommandNeedRetryException {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
driver.run("drop table junit_unparted");
- String createTable = "create table junit_unparted(a int) partitioned by (b string) stored as " + getStorageFormat();
+ String createTable = "create table junit_unparted(a int) partitioned by (b string) stored as " + storageFormat;
int retCode = driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
@@ -420,22 +507,23 @@ public class TestHCatStorer extends HCat
while (itr.hasNext()) {
Tuple t = itr.next();
- Assert.assertEquals(2, t.size());
- Assert.assertEquals(t.get(0), i);
- Assert.assertEquals(t.get(1), "1");
+ assertEquals(2, t.size());
+ assertEquals(t.get(0), i);
+ assertEquals(t.get(1), "1");
i++;
}
- Assert.assertFalse(itr.hasNext());
- Assert.assertEquals(LOOP_SIZE, i);
+ assertFalse(itr.hasNext());
+ assertEquals(LOOP_SIZE, i);
}
@Test
public void testMultiPartColsInData() throws IOException, CommandNeedRetryException {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
driver.run("drop table employee");
String createTable = "CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " +
- " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + getStorageFormat();
+ " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + storageFormat;
int retCode = driver.run(createTable).getResponseCode();
if (retCode != 0) {
@@ -464,20 +552,21 @@ public class TestHCatStorer extends HCat
driver.run("select * from employee");
ArrayList<String> results = new ArrayList<String>();
driver.getResults(results);
- Assert.assertEquals(4, results.size());
+ assertEquals(4, results.size());
Collections.sort(results);
- Assert.assertEquals(inputData[0], results.get(0));
- Assert.assertEquals(inputData[1], results.get(1));
- Assert.assertEquals(inputData[2], results.get(2));
- Assert.assertEquals(inputData[3], results.get(3));
+ assertEquals(inputData[0], results.get(0));
+ assertEquals(inputData[1], results.get(1));
+ assertEquals(inputData[2], results.get(2));
+ assertEquals(inputData[3], results.get(3));
driver.run("drop table employee");
}
@Test
public void testStoreInPartiitonedTbl() throws IOException, CommandNeedRetryException {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
driver.run("drop table junit_unparted");
- String createTable = "create table junit_unparted(a int) partitioned by (b string) stored as " + getStorageFormat();
+ String createTable = "create table junit_unparted(a int) partitioned by (b string) stored as " + storageFormat;
int retCode = driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
@@ -498,20 +587,21 @@ public class TestHCatStorer extends HCat
while (itr.hasNext()) {
Tuple t = itr.next();
- Assert.assertEquals(2, t.size());
- Assert.assertEquals(t.get(0), i);
- Assert.assertEquals(t.get(1), "1");
+ assertEquals(2, t.size());
+ assertEquals(t.get(0), i);
+ assertEquals(t.get(1), "1");
i++;
}
- Assert.assertFalse(itr.hasNext());
- Assert.assertEquals(11, i);
+ assertFalse(itr.hasNext());
+ assertEquals(11, i);
}
@Test
public void testNoAlias() throws IOException, CommandNeedRetryException {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
driver.run("drop table junit_parted");
- String createTable = "create table junit_parted(a int, b string) partitioned by (ds string) stored as " + getStorageFormat();
+ String createTable = "create table junit_parted(a int, b string) partitioned by (ds string) stored as " + storageFormat;
int retCode = driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
@@ -526,12 +616,12 @@ public class TestHCatStorer extends HCat
server.executeBatch();
} catch (PigException fe) {
PigException pe = LogUtils.getPigException(fe);
- Assert.assertTrue(pe instanceof FrontendException);
- Assert.assertEquals(PigHCatUtil.PIG_EXCEPTION_CODE, pe.getErrorCode());
- Assert.assertTrue(pe.getMessage().contains("Column name for a field is not specified. Please provide the full schema as an argument to HCatStorer."));
+ assertTrue(pe instanceof FrontendException);
+ assertEquals(PigHCatUtil.PIG_EXCEPTION_CODE, pe.getErrorCode());
+ assertTrue(pe.getMessage().contains("Column name for a field is not specified. Please provide the full schema as an argument to HCatStorer."));
errCaught = true;
}
- Assert.assertTrue(errCaught);
+ assertTrue(errCaught);
errCaught = false;
try {
server.setBatchOn();
@@ -541,20 +631,21 @@ public class TestHCatStorer extends HCat
server.executeBatch();
} catch (PigException fe) {
PigException pe = LogUtils.getPigException(fe);
- Assert.assertTrue(pe instanceof FrontendException);
- Assert.assertEquals(PigHCatUtil.PIG_EXCEPTION_CODE, pe.getErrorCode());
- Assert.assertTrue(pe.getMessage().contains("Column names should all be in lowercase. Invalid name found: B"));
+ assertTrue(pe instanceof FrontendException);
+ assertEquals(PigHCatUtil.PIG_EXCEPTION_CODE, pe.getErrorCode());
+ assertTrue(pe.getMessage().contains("Column names should all be in lowercase. Invalid name found: B"));
errCaught = true;
}
driver.run("drop table junit_parted");
- Assert.assertTrue(errCaught);
+ assertTrue(errCaught);
}
@Test
public void testStoreMultiTables() throws IOException, CommandNeedRetryException {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
driver.run("drop table junit_unparted");
- String createTable = "create table junit_unparted(a int, b string) stored as " + getStorageFormat();
+ String createTable = "create table junit_unparted(a int, b string) stored as " + storageFormat;
int retCode = driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
@@ -598,18 +689,19 @@ public class TestHCatStorer extends HCat
Iterator<String> itr = res.iterator();
for (int i = 0; i < LOOP_SIZE * LOOP_SIZE; i++) {
- Assert.assertEquals(input[i], itr.next());
+ assertEquals(input[i], itr.next());
}
- Assert.assertFalse(itr.hasNext());
+ assertFalse(itr.hasNext());
}
@Test
public void testStoreWithNoSchema() throws IOException, CommandNeedRetryException {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
driver.run("drop table junit_unparted");
- String createTable = "create table junit_unparted(a int, b string) stored as " + getStorageFormat();
+ String createTable = "create table junit_unparted(a int, b string) stored as " + storageFormat;
int retCode = driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
@@ -637,18 +729,19 @@ public class TestHCatStorer extends HCat
driver.run("drop table junit_unparted");
Iterator<String> itr = res.iterator();
for (int i = 0; i < LOOP_SIZE * LOOP_SIZE; i++) {
- Assert.assertEquals(input[i], itr.next());
+ assertEquals(input[i], itr.next());
}
- Assert.assertFalse(itr.hasNext());
+ assertFalse(itr.hasNext());
}
@Test
public void testStoreWithNoCtorArgs() throws IOException, CommandNeedRetryException {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
driver.run("drop table junit_unparted");
- String createTable = "create table junit_unparted(a int, b string) stored as " + getStorageFormat();
+ String createTable = "create table junit_unparted(a int, b string) stored as " + storageFormat;
int retCode = driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
@@ -676,18 +769,19 @@ public class TestHCatStorer extends HCat
driver.run("drop table junit_unparted");
Iterator<String> itr = res.iterator();
for (int i = 0; i < LOOP_SIZE * LOOP_SIZE; i++) {
- Assert.assertEquals(input[i], itr.next());
+ assertEquals(input[i], itr.next());
}
- Assert.assertFalse(itr.hasNext());
+ assertFalse(itr.hasNext());
}
@Test
public void testEmptyStore() throws IOException, CommandNeedRetryException {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
driver.run("drop table junit_unparted");
- String createTable = "create table junit_unparted(a int, b string) stored as " + getStorageFormat();
+ String createTable = "create table junit_unparted(a int, b string) stored as " + storageFormat;
int retCode = driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
@@ -715,15 +809,16 @@ public class TestHCatStorer extends HCat
driver.getResults(res);
driver.run("drop table junit_unparted");
Iterator<String> itr = res.iterator();
- Assert.assertFalse(itr.hasNext());
+ assertFalse(itr.hasNext());
}
@Test
public void testBagNStruct() throws IOException, CommandNeedRetryException {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
driver.run("drop table junit_unparted");
String createTable = "create table junit_unparted(b string,a struct<a1:int>, arr_of_struct array<string>, " +
- "arr_of_struct2 array<struct<s1:string,s2:string>>, arr_of_struct3 array<struct<s3:string>>) stored as " + getStorageFormat();
+ "arr_of_struct2 array<struct<s1:string,s2:string>>, arr_of_struct3 array<struct<s3:string>>) stored as " + storageFormat;
int retCode = driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
@@ -746,17 +841,18 @@ public class TestHCatStorer extends HCat
driver.getResults(res);
driver.run("drop table junit_unparted");
Iterator<String> itr = res.iterator();
- Assert.assertEquals("zookeeper\t{\"a1\":2}\t[\"pig\"]\t[{\"s1\":\"pnuts\",\"s2\":\"hdfs\"}]\t[{\"s3\":\"hadoop\"},{\"s3\":\"hcat\"}]", itr.next());
- Assert.assertEquals("chubby\t{\"a1\":2}\t[\"sawzall\"]\t[{\"s1\":\"bigtable\",\"s2\":\"gfs\"}]\t[{\"s3\":\"mapreduce\"},{\"s3\":\"hcat\"}]", itr.next());
- Assert.assertFalse(itr.hasNext());
+ assertEquals("zookeeper\t{\"a1\":2}\t[\"pig\"]\t[{\"s1\":\"pnuts\",\"s2\":\"hdfs\"}]\t[{\"s3\":\"hadoop\"},{\"s3\":\"hcat\"}]", itr.next());
+ assertEquals("chubby\t{\"a1\":2}\t[\"sawzall\"]\t[{\"s1\":\"bigtable\",\"s2\":\"gfs\"}]\t[{\"s3\":\"mapreduce\"},{\"s3\":\"hcat\"}]", itr.next());
+ assertFalse(itr.hasNext());
}
@Test
public void testStoreFuncAllSimpleTypes() throws IOException, CommandNeedRetryException {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
driver.run("drop table junit_unparted");
- String createTable = "create table junit_unparted(a int, b float, c double, d bigint, e string, h boolean, f binary, g binary) stored as " + getStorageFormat();
+ String createTable = "create table junit_unparted(a int, b float, c double, d bigint, e string, h boolean, f binary, g binary) stored as " + storageFormat;
int retCode = driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
@@ -783,10 +879,10 @@ public class TestHCatStorer extends HCat
Iterator<String> itr = res.iterator();
String next = itr.next();
- Assert.assertEquals("0\tNULL\tNULL\tNULL\tNULL\tNULL\tNULL\tNULL", next );
- Assert.assertEquals("NULL\t4.2\t2.2\t4\tlets hcat\ttrue\tbinary-data\tNULL", itr.next());
- Assert.assertEquals("3\t6.2999997\t3.3000000000000003\t6\tlets hcat\tfalse\tbinary-data\tNULL", itr.next());
- Assert.assertFalse(itr.hasNext());
+ assertEquals("0\tNULL\tNULL\tNULL\tNULL\tNULL\tNULL\tNULL", next );
+ assertEquals("NULL\t4.2\t2.2\t4\tlets hcat\ttrue\tbinary-data\tNULL", itr.next());
+ assertEquals("3\t6.2999997\t3.3000000000000003\t6\tlets hcat\tfalse\tbinary-data\tNULL", itr.next());
+ assertFalse(itr.hasNext());
server.registerQuery("B = load 'junit_unparted' using " + HCatLoader.class.getName() + ";");
Iterator<Tuple> iter = server.openIterator("B");
@@ -797,21 +893,22 @@ public class TestHCatStorer extends HCat
if (t.get(6) == null) {
num5nulls++;
} else {
- Assert.assertTrue(t.get(6) instanceof DataByteArray);
+ assertTrue(t.get(6) instanceof DataByteArray);
}
- Assert.assertNull(t.get(7));
+ assertNull(t.get(7));
count++;
}
- Assert.assertEquals(3, count);
- Assert.assertEquals(1, num5nulls);
+ assertEquals(3, count);
+ assertEquals(1, num5nulls);
driver.run("drop table junit_unparted");
}
@Test
public void testStoreFuncSimple() throws IOException, CommandNeedRetryException {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
driver.run("drop table junit_unparted");
- String createTable = "create table junit_unparted(a int, b string) stored as " + getStorageFormat();
+ String createTable = "create table junit_unparted(a int, b string) stored as " + storageFormat;
int retCode = driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
@@ -841,19 +938,20 @@ public class TestHCatStorer extends HCat
for (int i = 1; i <= LOOP_SIZE; i++) {
String si = i + "";
for (int j = 1; j <= LOOP_SIZE; j++) {
- Assert.assertEquals(si + "\t" + j, itr.next());
+ assertEquals(si + "\t" + j, itr.next());
}
}
- Assert.assertFalse(itr.hasNext());
+ assertFalse(itr.hasNext());
}
@Test
public void testDynamicPartitioningMultiPartColsInDataPartialSpec() throws IOException, CommandNeedRetryException {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
driver.run("drop table if exists employee");
String createTable = "CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " +
- " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + getStorageFormat();
+ " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + storageFormat;
int retCode = driver.run(createTable).getResponseCode();
if (retCode != 0) {
@@ -876,21 +974,22 @@ public class TestHCatStorer extends HCat
driver.run("select * from employee");
ArrayList<String> results = new ArrayList<String>();
driver.getResults(results);
- Assert.assertEquals(4, results.size());
+ assertEquals(4, results.size());
Collections.sort(results);
- Assert.assertEquals(inputData[0], results.get(0));
- Assert.assertEquals(inputData[1], results.get(1));
- Assert.assertEquals(inputData[2], results.get(2));
- Assert.assertEquals(inputData[3], results.get(3));
+ assertEquals(inputData[0], results.get(0));
+ assertEquals(inputData[1], results.get(1));
+ assertEquals(inputData[2], results.get(2));
+ assertEquals(inputData[3], results.get(3));
driver.run("drop table employee");
}
@Test
public void testDynamicPartitioningMultiPartColsInDataNoSpec() throws IOException, CommandNeedRetryException {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
driver.run("drop table if exists employee");
String createTable = "CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " +
- " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + getStorageFormat();
+ " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + storageFormat;
int retCode = driver.run(createTable).getResponseCode();
if (retCode != 0) {
@@ -913,21 +1012,22 @@ public class TestHCatStorer extends HCat
driver.run("select * from employee");
ArrayList<String> results = new ArrayList<String>();
driver.getResults(results);
- Assert.assertEquals(4, results.size());
+ assertEquals(4, results.size());
Collections.sort(results);
- Assert.assertEquals(inputData[0], results.get(0));
- Assert.assertEquals(inputData[1], results.get(1));
- Assert.assertEquals(inputData[2], results.get(2));
- Assert.assertEquals(inputData[3], results.get(3));
+ assertEquals(inputData[0], results.get(0));
+ assertEquals(inputData[1], results.get(1));
+ assertEquals(inputData[2], results.get(2));
+ assertEquals(inputData[3], results.get(3));
driver.run("drop table employee");
}
@Test
public void testDynamicPartitioningMultiPartColsNoDataInDataNoSpec() throws IOException, CommandNeedRetryException {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
driver.run("drop table if exists employee");
String createTable = "CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " +
- " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + getStorageFormat();
+ " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + storageFormat;
int retCode = driver.run(createTable).getResponseCode();
if (retCode != 0) {
@@ -947,15 +1047,16 @@ public class TestHCatStorer extends HCat
driver.run("select * from employee");
ArrayList<String> results = new ArrayList<String>();
driver.getResults(results);
- Assert.assertEquals(0, results.size());
+ assertEquals(0, results.size());
driver.run("drop table employee");
}
+
@Test
- public void testPartitionPublish()
- throws IOException, CommandNeedRetryException {
+ public void testPartitionPublish() throws IOException, CommandNeedRetryException {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
driver.run("drop table ptn_fail");
- String createTable = "create table ptn_fail(a int, c string) partitioned by (b string) stored as " + getStorageFormat();
+ String createTable = "create table ptn_fail(a int, c string) partitioned by (b string) stored as " + storageFormat;
int retCode = driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table.");
@@ -987,11 +1088,11 @@ public class TestHCatStorer extends HCat
ArrayList<String> res = new ArrayList<String>();
driver.getResults(res);
- Assert.assertEquals(0, res.size());
+ assertEquals(0, res.size());
// Make sure the partitions directory is not in hdfs.
- Assert.assertTrue((new File(TEST_WAREHOUSE_DIR + "/ptn_fail")).exists());
- Assert.assertFalse((new File(TEST_WAREHOUSE_DIR + "/ptn_fail/b=math"))
+ assertTrue((new File(TEST_WAREHOUSE_DIR + "/ptn_fail")).exists());
+ assertFalse((new File(TEST_WAREHOUSE_DIR + "/ptn_fail/b=math"))
.exists());
}
Modified: hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java (original)
+++ hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java Mon Oct 6 04:00:39 2014
@@ -18,18 +18,25 @@
*/
package org.apache.hive.hcatalog.pig;
+import com.google.common.collect.ImmutableSet;
+
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.Map;
+import java.util.Set;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hive.cli.CliSessionState;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.CommandNeedRetryException;
import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.io.IOConstants;
+import org.apache.hadoop.hive.ql.io.StorageFormats;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hive.hcatalog.common.HCatUtil;
@@ -41,13 +48,17 @@ import org.apache.pig.PigServer;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assume.assumeTrue;
+@RunWith(Parameterized.class)
public class TestHCatStorerMulti {
public static final String TEST_DATA_DIR = HCatUtil.makePathASafeFileName(
- System.getProperty("user.dir") + "/build/test/data/" +
- TestHCatStorerMulti.class.getCanonicalName() + "-" + System.currentTimeMillis());
+ System.getProperty("user.dir") + "/build/test/data/" +
+ TestHCatStorerMulti.class.getCanonicalName() + "-" + System.currentTimeMillis());
private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse";
private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
@@ -57,9 +68,29 @@ public class TestHCatStorerMulti {
private static Map<Integer, Pair<Integer, String>> basicInputData;
- protected String storageFormat() {
- return "RCFILE tblproperties('hcat.isd'='org.apache.hive.hcatalog.rcfile.RCFileInputDriver'," +
- "'hcat.osd'='org.apache.hive.hcatalog.rcfile.RCFileOutputDriver')";
+ private static final Map<String, Set<String>> DISABLED_STORAGE_FORMATS =
+ new HashMap<String, Set<String>>() {{
+ put(IOConstants.AVRO, new HashSet<String>() {{
+ add("testStoreBasicTable");
+ add("testStorePartitionedTable");
+ add("testStoreTableMulti");
+ }});
+ put(IOConstants.PARQUETFILE, new HashSet<String>() {{
+ add("testStoreBasicTable");
+ add("testStorePartitionedTable");
+ add("testStoreTableMulti");
+ }});
+ }};
+
+ private String storageFormat;
+
+ @Parameterized.Parameters
+ public static Collection<Object[]> generateParameters() {
+ return StorageFormats.names();
+ }
+
+ public TestHCatStorerMulti(String storageFormat) {
+ this.storageFormat = storageFormat;
}
private void dropTable(String tablename) throws IOException, CommandNeedRetryException {
@@ -72,7 +103,7 @@ public class TestHCatStorerMulti {
if ((partitionedBy != null) && (!partitionedBy.trim().isEmpty())) {
createTable = createTable + "partitioned by (" + partitionedBy + ") ";
}
- createTable = createTable + "stored as " + storageFormat();
+ createTable = createTable + "stored as " + storageFormat;
int retCode = driver.run(createTable).getResponseCode();
if (retCode != 0) {
throw new IOException("Failed to create table. [" + createTable + "], return code from hive driver : [" + retCode + "]");
@@ -85,6 +116,8 @@ public class TestHCatStorerMulti {
@Before
public void setUp() throws Exception {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
+
if (driver == null) {
HiveConf hiveConf = new HiveConf(this.getClass());
hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
@@ -105,6 +138,7 @@ public class TestHCatStorerMulti {
@Test
public void testStoreBasicTable() throws Exception {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
createTable(BASIC_TABLE, "a int, b string");
populateBasicFile();
@@ -124,6 +158,7 @@ public class TestHCatStorerMulti {
@Test
public void testStorePartitionedTable() throws Exception {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
createTable(PARTITIONED_TABLE, "a int, b string", "bkt string");
populateBasicFile();
@@ -147,6 +182,7 @@ public class TestHCatStorerMulti {
@Test
public void testStoreTableMulti() throws Exception {
+ assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
createTable(BASIC_TABLE, "a int, b string");
createTable(PARTITIONED_TABLE, "a int, b string", "bkt string");
Modified: hive/branches/spark/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/NotificationListener.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/NotificationListener.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/NotificationListener.java (original)
+++ hive/branches/spark/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/NotificationListener.java Mon Oct 6 04:00:39 2014
@@ -214,7 +214,7 @@ public class NotificationListener extend
HiveConf conf = handler.getHiveConf();
Table newTbl;
try {
- newTbl = handler.get_table(tbl.getDbName(), tbl.getTableName())
+ newTbl = handler.get_table_core(tbl.getDbName(), tbl.getTableName())
.deepCopy();
newTbl.getParameters().put(
HCatConstants.HCAT_MSGBUS_TOPIC_NAME,
Modified: hive/branches/spark/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml (original)
+++ hive/branches/spark/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml Mon Oct 6 04:00:39 2014
@@ -77,6 +77,11 @@
shipped to the target node in the cluster to execute Pig job which uses
HCat, Hive query, etc.</description>
</property>
+ <property>
+ <name>templeton.sqoop.path</name>
+ <value>${env.SQOOP_HOME}/bin/sqoop</value>
+ <description>The path to the Sqoop executable.</description>
+ </property>
<property>
<name>templeton.controller.mr.child.opts</name>
Modified: hive/branches/spark/hcatalog/src/test/e2e/templeton/deployers/env.sh
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/src/test/e2e/templeton/deployers/env.sh?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/src/test/e2e/templeton/deployers/env.sh (original)
+++ hive/branches/spark/hcatalog/src/test/e2e/templeton/deployers/env.sh Mon Oct 6 04:00:39 2014
@@ -30,7 +30,7 @@ export PIG_VERSION=0.12.2-SNAPSHOT
export PROJ_HOME=/Users/${USER}/dev/hive
export HIVE_HOME=${PROJ_HOME}/packaging/target/apache-hive-${HIVE_VERSION}-bin/apache-hive-${HIVE_VERSION}-bin
export HADOOP_HOME=/Users/${USER}/dev/hwxhadoop/hadoop-dist/target/hadoop-${HADOOP_VERSION}
-#export SQOOP_HOME=/
+export SQOOP_HOME=/Users/${USER}/dev/sqoop-1.4.4.bin__hadoop-2.0.4-alpha
#Make sure Pig is built for the Hadoop version you are running
export PIG_TAR_PATH=/Users/${USER}/dev/pig-${PIG_VERSION}-src/build
Modified: hive/branches/spark/hcatalog/src/test/e2e/templeton/tests/doas.conf
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/src/test/e2e/templeton/tests/doas.conf?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/src/test/e2e/templeton/tests/doas.conf (original)
+++ hive/branches/spark/hcatalog/src/test/e2e/templeton/tests/doas.conf Mon Oct 6 04:00:39 2014
@@ -109,7 +109,7 @@ $cfg =
'method' => 'GET',
'url' => ':TEMPLETON_URL:/templeton/v1/ddl/database/default/table/:UNAME:_doastab2/partition?user.name=:UNAME:&doAs=:DOAS:',
'status_code' => 500,
- 'json_field_substr_match' => {'error' => 'FAILED: AuthorizationException java\.security\.AccessControlException: action READ not permitted on path .* for user :DOAS:'},
+ 'json_field_substr_match' => {'error' => 'java\.security\.AccessControlException: Permission denied: user=:DOAS:, access=READ'},
},
{
@@ -118,7 +118,7 @@ $cfg =
'method' => 'DELETE',
'url' => ':TEMPLETON_URL:/templeton/v1/ddl/database/default/table/:UNAME:_doastab2?user.name=:UNAME:&doAs=:DOAS:',
'status_code' => 500,
- 'json_field_substr_match' => {'error' => 'java\.security\.AccessControlException: action WRITE not permitted on path .* for user :DOAS:'},
+ 'json_field_substr_match' => {'error' => 'java\.security\.AccessControlException: Permission denied: user=:DOAS:, access=READ'},
},
{
#descbe the table....
Modified: hive/branches/spark/hcatalog/src/test/e2e/templeton/tests/hcatperms.conf
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/src/test/e2e/templeton/tests/hcatperms.conf?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/src/test/e2e/templeton/tests/hcatperms.conf (original)
+++ hive/branches/spark/hcatalog/src/test/e2e/templeton/tests/hcatperms.conf Mon Oct 6 04:00:39 2014
@@ -375,6 +375,13 @@ $cfg =
{
'method' => 'DELETE',
'format_header' => 'Content-Type: application/json',
+ 'url' => ':TEMPLETON_URL:/templeton/v1/ddl/database/hcatperms_:TNUM:/table/permstable_:TNUM:',
+ 'user_name' => ':UNAME_GROUP:',
+ 'status_code' => 200,
+ },
+ {
+ 'method' => 'DELETE',
+ 'format_header' => 'Content-Type: application/json',
'url' => ':TEMPLETON_URL:/templeton/v1/ddl/database/hcatperms_:TNUM:?ifExists=true&option=cascade',
'user_name' => ':UNAME:',
'status_code' => 200,
@@ -677,9 +684,7 @@ $cfg =
'format_header' => 'Content-Type: application/json',
'user_name' => ':UNAME_OTHER:',
'status_code' => 500,
- 'json_field_substr_match' => {'error' => 'FAILED: AuthorizationException .*\.security\.AccessControlException: action READ not permitted on path .* for user :UNAME_OTHER:'},
-
-
+ 'json_field_substr_match' => {'error' => 'AccessControlException: Permission denied: user=:UNAME_OTHER:, access=READ'},
},
Modified: hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java (original)
+++ hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java Mon Oct 6 04:00:39 2014
@@ -168,7 +168,7 @@ public class HCatTable {
newTable.setTableType(TableType.MANAGED_TABLE.toString());
}
- if (this.comment != null) {
+ if (StringUtils.isNotBlank(this.comment)) {
newTable.putToParameters("comment", comment);
}
Modified: hive/branches/spark/hcatalog/webhcat/svr/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/webhcat/svr/pom.xml?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/webhcat/svr/pom.xml (original)
+++ hive/branches/spark/hcatalog/webhcat/svr/pom.xml Mon Oct 6 04:00:39 2014
@@ -49,6 +49,11 @@
<!-- inter-project -->
<dependency>
<groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-core</artifactId>
+ <version>${jersey.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
<version>${jersey.version}</version>
</dependency>
Modified: hive/branches/spark/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java (original)
+++ hive/branches/spark/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java Mon Oct 6 04:00:39 2014
@@ -32,7 +32,7 @@ public class QueueStatusBean {
public JobStatus status;
public JobProfile profile;
- public String id;
+ public final String id;
public String parentId;
public String percentComplete;
public Long exitValue;
@@ -40,8 +40,11 @@ public class QueueStatusBean {
public String callback;
public String completed;
public Map<String, Object> userargs;
+ public String msg;
- public QueueStatusBean() {
+ public QueueStatusBean(String jobId, String errMsg) {
+ this.id = jobId;
+ this.msg = errMsg;
}
/**
Modified: hive/branches/spark/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java (original)
+++ hive/branches/spark/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java Mon Oct 6 04:00:39 2014
@@ -1008,8 +1008,15 @@ public class Server {
jobItem.id = job;
if (showDetails) {
StatusDelegator sd = new StatusDelegator(appConf);
- QueueStatusBean statusBean = sd.run(getDoAsUser(), job);
- jobItem.detail = statusBean;
+ try {
+ jobItem.detail = sd.run(getDoAsUser(), job);
+ }
+ catch(Exception ex) {
+ /*if we could not get status for some reason, log it, and send empty status back with
+ * just the ID so that caller knows to even look in the log file*/
+ LOG.info("Failed to get status detail for jobId='" + job + "'", ex);
+ jobItem.detail = new QueueStatusBean(job, "Failed to retrieve status; see WebHCat logs");
+ }
}
detailList.add(jobItem);
}
Modified: hive/branches/spark/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java (original)
+++ hive/branches/spark/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java Mon Oct 6 04:00:39 2014
@@ -320,6 +320,6 @@ public class TestTempletonUtils {
result = TempletonUtils.findContainingJar(FileSystem.class, ".*hadoop.*\\.jar.*");
Assert.assertNotNull(result);
result = TempletonUtils.findContainingJar(HadoopShimsSecure.class, ".*unknownjar.*");
- Assert.assertNull(result);
+ Assert.assertNull("unexpectedly found jar for HadoopShimsSecure class: " + result, result);
}
}
Modified: hive/branches/spark/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java (original)
+++ hive/branches/spark/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java Mon Oct 6 04:00:39 2014
@@ -37,35 +37,51 @@ import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests information retrieved from hooks, in Kerberos mode.
*/
public class TestHs2HooksWithMiniKdc {
+ private static final Logger LOG = LoggerFactory.getLogger(TestHs2HooksWithMiniKdc.class);
+
public static class PostExecHook implements ExecuteWithHookContext {
- public static String userName = null;
- public static String ipAddress = null;
+ private static String userName;
+ private static String ipAddress;
+ private static String operation;
+ private static Throwable error;
public void run(HookContext hookContext) {
- if (hookContext.getHookType().equals(HookType.POST_EXEC_HOOK)) {
- Assert.assertNotNull(hookContext.getIpAddress(), "IP Address is null");
- ipAddress = hookContext.getIpAddress();
- Assert.assertNotNull(hookContext.getUserName(), "Username is null");
- userName = hookContext.getUserName();
+ try {
+ if (hookContext.getHookType().equals(HookType.POST_EXEC_HOOK)) {
+ ipAddress = hookContext.getIpAddress();
+ userName = hookContext.getUserName();
+ operation = hookContext.getOperationName();
+ }
+ } catch (Throwable t) {
+ LOG.error("Error in PostExecHook: " + t, t);
+ error = t;
}
}
}
public static class PreExecHook implements ExecuteWithHookContext {
- public static String userName = null;
- public static String ipAddress = null;
+ private static String userName;
+ private static String ipAddress;
+ private static String operation;
+ private static Throwable error;
public void run(HookContext hookContext) {
- if (hookContext.getHookType().equals(HookType.PRE_EXEC_HOOK)) {
- Assert.assertNotNull(hookContext.getIpAddress(), "IP Address is null");
- ipAddress = hookContext.getIpAddress();
- Assert.assertNotNull(hookContext.getUserName(), "Username is null");
- userName = hookContext.getUserName();
+ try {
+ if (hookContext.getHookType().equals(HookType.PRE_EXEC_HOOK)) {
+ ipAddress = hookContext.getIpAddress();
+ userName = hookContext.getUserName();
+ operation = hookContext.getOperationName();
+ }
+ } catch (Throwable t) {
+ LOG.error("Error in PreExecHook: " + t, t);
+ error = t;
}
}
}
@@ -108,22 +124,36 @@ public class TestHs2HooksWithMiniKdc {
/**
* Test get IpAddress and username from hook.
- * @throws Exception
*/
@Test
- public void testIpUserName() throws Exception {
+ public void testIpUserName() throws Throwable {
miniHiveKdc.loginUser(MiniHiveKdc.HIVE_TEST_USER_1);
hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL());
Statement stmt = hs2Conn.createStatement();
+ stmt.executeQuery("show databases");
stmt.executeQuery("show tables");
+ Throwable error = PostExecHook.error;
+ if (error != null) {
+ throw error;
+ }
+ error = PreExecHook.error;
+ if (error != null) {
+ throw error;
+ }
+ Assert.assertNotNull(PostExecHook.ipAddress, "ipaddress is null");
+ Assert.assertNotNull(PostExecHook.userName, "userName is null");
+ Assert.assertNotNull(PostExecHook.operation , "operation is null");
Assert.assertEquals(MiniHiveKdc.HIVE_TEST_USER_1, PostExecHook.userName);
- Assert.assertNotNull(PostExecHook.ipAddress);
- Assert.assertTrue(PostExecHook.ipAddress.contains("127.0.0.1"));
+ Assert.assertTrue(PostExecHook.ipAddress, PostExecHook.ipAddress.contains("127.0.0.1"));
+ Assert.assertEquals("SHOWTABLES", PostExecHook.operation);
+ Assert.assertNotNull(PreExecHook.ipAddress, "ipaddress is null");
+ Assert.assertNotNull(PreExecHook.userName, "userName is null");
+ Assert.assertNotNull(PreExecHook.operation , "operation is null");
Assert.assertEquals(MiniHiveKdc.HIVE_TEST_USER_1, PreExecHook.userName);
- Assert.assertNotNull(PreExecHook.ipAddress);
- Assert.assertTrue(PreExecHook.ipAddress.contains("127.0.0.1"));
+ Assert.assertTrue(PreExecHook.ipAddress, PreExecHook.ipAddress.contains("127.0.0.1"));
+ Assert.assertEquals("SHOWTABLES", PreExecHook.operation);
}
}
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java Mon Oct 6 04:00:39 2014
@@ -20,6 +20,7 @@
package org.apache.hadoop.hive.hooks;
import java.util.Properties;
+import java.sql.Statement;
import junit.framework.Assert;
@@ -32,38 +33,52 @@ import org.apache.hive.service.server.Hi
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests information retrieved from hooks.
*/
public class TestHs2Hooks {
-
+ private static final Logger LOG = LoggerFactory.getLogger(TestHs2Hooks.class);
private static HiveServer2 hiveServer2;
- public static class PreExecHook implements ExecuteWithHookContext {
- public static String userName = null;
- public static String ipAddress = null;
+ public static class PostExecHook implements ExecuteWithHookContext {
+ private static String userName;
+ private static String ipAddress;
+ private static String operation;
+ private static Throwable error;
public void run(HookContext hookContext) {
- if (hookContext.getHookType().equals(HookType.PRE_EXEC_HOOK)) {
- Assert.assertNotNull(hookContext.getIpAddress(), "IP Address is null");
- ipAddress = hookContext.getIpAddress();
- Assert.assertNotNull(hookContext.getUserName(), "Username is null");
- userName = hookContext.getUserName();
+ try {
+ if (hookContext.getHookType().equals(HookType.POST_EXEC_HOOK)) {
+ ipAddress = hookContext.getIpAddress();
+ userName = hookContext.getUserName();
+ operation = hookContext.getOperationName();
+ }
+ } catch (Throwable t) {
+ LOG.error("Error in PostExecHook: " + t, t);
+ error = t;
}
}
}
- public static class PostExecHook implements ExecuteWithHookContext {
- public static String userName = null;
- public static String ipAddress = null;
+ public static class PreExecHook implements ExecuteWithHookContext {
+ private static String userName;
+ private static String ipAddress;
+ private static String operation;
+ private static Throwable error;
public void run(HookContext hookContext) {
- if (hookContext.getHookType().equals(HookType.POST_EXEC_HOOK)) {
- Assert.assertNotNull(hookContext.getIpAddress(), "IP Address is null");
- ipAddress = hookContext.getIpAddress();
- Assert.assertNotNull(hookContext.getUserName(), "Username is null");
- userName = hookContext.getUserName();
+ try {
+ if (hookContext.getHookType().equals(HookType.PRE_EXEC_HOOK)) {
+ ipAddress = hookContext.getIpAddress();
+ userName = hookContext.getUserName();
+ operation = hookContext.getOperationName();
+ }
+ } catch (Throwable t) {
+ LOG.error("Error in PreExecHook: " + t, t);
+ error = t;
}
}
}
@@ -94,26 +109,39 @@ public class TestHs2Hooks {
/**
* Test get IpAddress and username from hook.
- * @throws Exception
*/
@Test
- public void testIpUserName() throws Exception {
+ public void testIpUserName() throws Throwable {
Properties connProp = new Properties();
connProp.setProperty("user", System.getProperty("user.name"));
connProp.setProperty("password", "");
HiveConnection connection = new HiveConnection("jdbc:hive2://localhost:10000/default", connProp);
- connection.createStatement().execute("show tables");
+ Statement stmt = connection.createStatement();
+ stmt.executeQuery("show databases");
+ stmt.executeQuery("show tables");
+ Throwable error = PostExecHook.error;
+ if (error != null) {
+ throw error;
+ }
+ error = PreExecHook.error;
+ if (error != null) {
+ throw error;
+ }
Assert.assertEquals(System.getProperty("user.name"), PostExecHook.userName);
- Assert.assertNotNull(PostExecHook.ipAddress);
- Assert.assertTrue(PostExecHook.ipAddress.contains("127.0.0.1"));
+ Assert.assertNotNull(PostExecHook.ipAddress, "ipaddress is null");
+ Assert.assertNotNull(PostExecHook.userName, "userName is null");
+ Assert.assertNotNull(PostExecHook.operation , "operation is null");
+ Assert.assertTrue(PostExecHook.ipAddress, PostExecHook.ipAddress.contains("127.0.0.1"));
+ Assert.assertEquals("SHOWTABLES", PostExecHook.operation);
Assert.assertEquals(System.getProperty("user.name"), PreExecHook.userName);
- Assert.assertNotNull(PreExecHook.ipAddress);
- Assert.assertTrue(PreExecHook.ipAddress.contains("127.0.0.1"));
-
- connection.close();
+ Assert.assertNotNull(PreExecHook.ipAddress, "ipaddress is null");
+ Assert.assertNotNull(PreExecHook.userName, "userName is null");
+ Assert.assertNotNull(PreExecHook.operation , "operation is null");
+ Assert.assertTrue(PreExecHook.ipAddress, PreExecHook.ipAddress.contains("127.0.0.1"));
+ Assert.assertEquals("SHOWTABLES", PreExecHook.operation);
}
}
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java Mon Oct 6 04:00:39 2014
@@ -1115,6 +1115,12 @@ public class TestJdbcDriver extends Test
}
+ public void testInvalidUrl() throws SQLException {
+ HiveDriver driver = new HiveDriver();
+
+ assertNull(driver.connect("jdbc:hive2://localhost:1000", null));
+ }
+
private static void assertDpi(DriverPropertyInfo dpi, String name,
String value) {
assertEquals("Invalid DriverPropertyInfo name", name, dpi.name);
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java Mon Oct 6 04:00:39 2014
@@ -76,6 +76,21 @@ public class TestHiveMetaStoreTxns {
}
@Test
+ public void testOpenTxnNotExcluded() throws Exception {
+ List<Long> tids = client.openTxns("me", 3).getTxn_ids();
+ Assert.assertEquals(1L, (long) tids.get(0));
+ Assert.assertEquals(2L, (long) tids.get(1));
+ Assert.assertEquals(3L, (long) tids.get(2));
+ client.rollbackTxn(1);
+ client.commitTxn(2);
+ ValidTxnList validTxns = client.getValidTxns(3);
+ Assert.assertFalse(validTxns.isTxnCommitted(1));
+ Assert.assertTrue(validTxns.isTxnCommitted(2));
+ Assert.assertTrue(validTxns.isTxnCommitted(3));
+ Assert.assertFalse(validTxns.isTxnCommitted(4));
+ }
+
+ @Test
public void testTxnRange() throws Exception {
ValidTxnList validTxns = client.getValidTxns();
Assert.assertEquals(ValidTxnList.RangeResponse.NONE,
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java Mon Oct 6 04:00:39 2014
@@ -193,43 +193,39 @@ public class TestMetaStoreEventListener
driver.run("create database " + dbName);
listSize++;
+ PreCreateDatabaseEvent preDbEvent = (PreCreateDatabaseEvent)(preNotifyList.get(preNotifyList.size() - 1));
Database db = msc.getDatabase(dbName);
assertEquals(listSize, notifyList.size());
- assertEquals(listSize, preNotifyList.size());
+ assertEquals(listSize + 1, preNotifyList.size());
+ validateCreateDb(db, preDbEvent.getDatabase());
CreateDatabaseEvent dbEvent = (CreateDatabaseEvent)(notifyList.get(listSize - 1));
assert dbEvent.getStatus();
validateCreateDb(db, dbEvent.getDatabase());
- PreCreateDatabaseEvent preDbEvent = (PreCreateDatabaseEvent)(preNotifyList.get(listSize - 1));
- validateCreateDb(db, preDbEvent.getDatabase());
driver.run("use " + dbName);
driver.run(String.format("create table %s (a string) partitioned by (b string)", tblName));
+ PreCreateTableEvent preTblEvent = (PreCreateTableEvent)(preNotifyList.get(preNotifyList.size() - 1));
listSize++;
Table tbl = msc.getTable(dbName, tblName);
+ validateCreateTable(tbl, preTblEvent.getTable());
assertEquals(notifyList.size(), listSize);
- assertEquals(preNotifyList.size(), listSize);
CreateTableEvent tblEvent = (CreateTableEvent)(notifyList.get(listSize - 1));
assert tblEvent.getStatus();
validateCreateTable(tbl, tblEvent.getTable());
- PreCreateTableEvent preTblEvent = (PreCreateTableEvent)(preNotifyList.get(listSize - 1));
- validateCreateTable(tbl, preTblEvent.getTable());
-
driver.run("alter table tmptbl add partition (b='2011')");
listSize++;
- Partition part = msc.getPartition("hive2038", "tmptbl", "b=2011");
assertEquals(notifyList.size(), listSize);
- assertEquals(preNotifyList.size(), listSize);
+ PreAddPartitionEvent prePartEvent = (PreAddPartitionEvent)(preNotifyList.get(preNotifyList.size() - 1));
AddPartitionEvent partEvent = (AddPartitionEvent)(notifyList.get(listSize-1));
assert partEvent.getStatus();
+ Partition part = msc.getPartition("hive2038", "tmptbl", "b=2011");
validateAddPartition(part, partEvent.getPartitions().get(0));
validateTableInAddPartition(tbl, partEvent.getTable());
-
- PreAddPartitionEvent prePartEvent = (PreAddPartitionEvent)(preNotifyList.get(listSize-1));
validateAddPartition(part, prePartEvent.getPartitions().get(0));
// Test adding multiple partitions in a single partition-set, atomically.
@@ -254,7 +250,8 @@ public class TestMetaStoreEventListener
driver.run(String.format("alter table %s touch partition (%s)", tblName, "b='2011'"));
listSize++;
assertEquals(notifyList.size(), listSize);
- assertEquals(preNotifyList.size(), listSize);
+ PreAlterPartitionEvent preAlterPartEvent =
+ (PreAlterPartitionEvent)preNotifyList.get(preNotifyList.size() - 1);
//the partition did not change,
// so the new partition should be similar to the original partition
@@ -266,40 +263,39 @@ public class TestMetaStoreEventListener
alterPartEvent.getOldPartition().getTableName(),
alterPartEvent.getOldPartition().getValues(), alterPartEvent.getNewPartition());
- PreAlterPartitionEvent preAlterPartEvent =
- (PreAlterPartitionEvent)preNotifyList.get(listSize - 1);
+
validateAlterPartition(origP, origP, preAlterPartEvent.getDbName(),
preAlterPartEvent.getTableName(), preAlterPartEvent.getNewPartition().getValues(),
preAlterPartEvent.getNewPartition());
List<String> part_vals = new ArrayList<String>();
part_vals.add("c=2012");
+ int preEventListSize;
+ preEventListSize = preNotifyList.size() + 1;
Partition newPart = msc.appendPartition(dbName, tblName, part_vals);
listSize++;
assertEquals(notifyList.size(), listSize);
- assertEquals(preNotifyList.size(), listSize);
+ assertEquals(preNotifyList.size(), preEventListSize);
AddPartitionEvent appendPartEvent =
(AddPartitionEvent)(notifyList.get(listSize-1));
validateAddPartition(newPart, appendPartEvent.getPartitions().get(0));
PreAddPartitionEvent preAppendPartEvent =
- (PreAddPartitionEvent)(preNotifyList.get(listSize-1));
+ (PreAddPartitionEvent)(preNotifyList.get(preNotifyList.size() - 1));
validateAddPartition(newPart, preAppendPartEvent.getPartitions().get(0));
driver.run(String.format("alter table %s rename to %s", tblName, renamed));
listSize++;
assertEquals(notifyList.size(), listSize);
- assertEquals(preNotifyList.size(), listSize);
+ PreAlterTableEvent preAlterTableE = (PreAlterTableEvent) preNotifyList.get(preNotifyList.size() - 1);
Table renamedTable = msc.getTable(dbName, renamed);
AlterTableEvent alterTableE = (AlterTableEvent) notifyList.get(listSize-1);
assert alterTableE.getStatus();
validateAlterTable(tbl, renamedTable, alterTableE.getOldTable(), alterTableE.getNewTable());
-
- PreAlterTableEvent preAlterTableE = (PreAlterTableEvent) preNotifyList.get(listSize-1);
validateAlterTable(tbl, renamedTable, preAlterTableE.getOldTable(),
preAlterTableE.getNewTable());
@@ -307,20 +303,17 @@ public class TestMetaStoreEventListener
driver.run(String.format("alter table %s rename to %s", renamed, tblName));
listSize++;
assertEquals(notifyList.size(), listSize);
- assertEquals(preNotifyList.size(), listSize);
driver.run(String.format("alter table %s ADD COLUMNS (c int)", tblName));
listSize++;
assertEquals(notifyList.size(), listSize);
- assertEquals(preNotifyList.size(), listSize);
+ preAlterTableE = (PreAlterTableEvent) preNotifyList.get(preNotifyList.size() - 1);
Table altTable = msc.getTable(dbName, tblName);
alterTableE = (AlterTableEvent) notifyList.get(listSize-1);
assert alterTableE.getStatus();
validateAlterTableColumns(tbl, altTable, alterTableE.getOldTable(), alterTableE.getNewTable());
-
- preAlterTableE = (PreAlterTableEvent) preNotifyList.get(listSize-1);
validateAlterTableColumns(tbl, altTable, preAlterTableE.getOldTable(),
preAlterTableE.getNewTable());
@@ -329,7 +322,6 @@ public class TestMetaStoreEventListener
msc.markPartitionForEvent("hive2038", "tmptbl", kvs, PartitionEventType.LOAD_DONE);
listSize++;
assertEquals(notifyList.size(), listSize);
- assertEquals(preNotifyList.size(), listSize);
LoadPartitionDoneEvent partMarkEvent = (LoadPartitionDoneEvent)notifyList.get(listSize - 1);
assert partMarkEvent.getStatus();
@@ -337,46 +329,42 @@ public class TestMetaStoreEventListener
partMarkEvent.getPartitionName());
PreLoadPartitionDoneEvent prePartMarkEvent =
- (PreLoadPartitionDoneEvent)preNotifyList.get(listSize - 1);
+ (PreLoadPartitionDoneEvent)preNotifyList.get(preNotifyList.size() - 1);
validateLoadPartitionDone("tmptbl", kvs, prePartMarkEvent.getTableName(),
prePartMarkEvent.getPartitionName());
driver.run(String.format("alter table %s drop partition (b='2011')", tblName));
listSize++;
assertEquals(notifyList.size(), listSize);
- assertEquals(preNotifyList.size(), listSize);
+ PreDropPartitionEvent preDropPart = (PreDropPartitionEvent) preNotifyList.get(preNotifyList
+ .size() - 1);
DropPartitionEvent dropPart = (DropPartitionEvent)notifyList.get(listSize - 1);
assert dropPart.getStatus();
validateDropPartition(part, dropPart.getPartition());
validateTableInDropPartition(tbl, dropPart.getTable());
- PreDropPartitionEvent preDropPart = (PreDropPartitionEvent)preNotifyList.get(listSize - 1);
validateDropPartition(part, preDropPart.getPartition());
validateTableInDropPartition(tbl, preDropPart.getTable());
driver.run("drop table " + tblName);
listSize++;
assertEquals(notifyList.size(), listSize);
- assertEquals(preNotifyList.size(), listSize);
+ PreDropTableEvent preDropTbl = (PreDropTableEvent)preNotifyList.get(preNotifyList.size() - 1);
DropTableEvent dropTbl = (DropTableEvent)notifyList.get(listSize-1);
assert dropTbl.getStatus();
validateDropTable(tbl, dropTbl.getTable());
-
- PreDropTableEvent preDropTbl = (PreDropTableEvent)preNotifyList.get(listSize-1);
validateDropTable(tbl, preDropTbl.getTable());
driver.run("drop database " + dbName);
listSize++;
assertEquals(notifyList.size(), listSize);
- assertEquals(preNotifyList.size(), listSize);
+ PreDropDatabaseEvent preDropDB = (PreDropDatabaseEvent)preNotifyList.get(preNotifyList.size() - 1);
DropDatabaseEvent dropDB = (DropDatabaseEvent)notifyList.get(listSize-1);
assert dropDB.getStatus();
validateDropDb(db, dropDB.getDatabase());
-
- PreDropDatabaseEvent preDropDB = (PreDropDatabaseEvent)preNotifyList.get(listSize-1);
validateDropDb(db, preDropDB.getDatabase());
SetProcessor.setVariable("metaconf:hive.metastore.try.direct.sql", "false");
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStoreIpAddress.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStoreIpAddress.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStoreIpAddress.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStoreIpAddress.java Mon Oct 6 04:00:39 2014
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.metastore
import junit.framework.TestCase;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.util.StringUtils;
@@ -49,6 +50,8 @@ public class TestRemoteHiveMetaStoreIpAd
int port = MetaStoreUtils.findFreePort();
System.out.println("Starting MetaStore Server on port " + port);
+ System.setProperty(ConfVars.METASTORE_EVENT_LISTENERS.varname,
+ IpAddressListener.class.getName());
MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge());
isServerStarted = true;
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java Mon Oct 6 04:00:39 2014
@@ -139,7 +139,7 @@ public class TestHiveHistory extends Tes
SessionState.start(ss);
- String cmd = "select a.key from src a";
+ String cmd = "select a.key+1 from src a";
Driver d = new Driver(conf);
int ret = d.run(cmd).getResponseCode();
if (ret != 0) {