You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/07/31 00:22:46 UTC
svn commit: r1508669 [2/39] - in /hive/branches/vectorization: ./
common/src/java/org/apache/hadoop/hive/conf/ conf/
contrib/src/test/results/clientpositive/ data/files/ eclipse-templates/
hcatalog/build-support/ant/ hcatalog/core/src/main/java/org/apa...
Modified: hive/branches/vectorization/hcatalog/build-support/ant/checkstyle.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/build-support/ant/checkstyle.xml?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/build-support/ant/checkstyle.xml (original)
+++ hive/branches/vectorization/hcatalog/build-support/ant/checkstyle.xml Tue Jul 30 22:22:35 2013
@@ -61,7 +61,11 @@
<exclude name="src/test/e2e/hcatalog/testdist/**"/> <!-- Test run results -->
<exclude name="src/test/e2e/hcatalog/tar/**"/> <!-- Test build area -->
<exclude name="src/test/e2e/hcatalog/udfs/java/*.jar"/> <!-- Test build area -->
- <exclude name="src/test/e2e/hcatalog/hcattests.tar"/> <!-- Test build artifact -->
+ <exclude name="src/test/e2e/hcatalog/hcattests.tar"/> <!-- Test build artifact -->
+ <exclude name="**/*.iml"/><!--intelliJ files-->
+ <exclude name="src/test/e2e/templeton/testdist/**"/> <!-- Test run results -->
+ <exclude name="src/test/e2e/templeton/tar/**"/> <!-- Test build area -->
+ <exclude name="src/test/e2e/templeton/hcattests.tar"/> <!-- Test build artifact -->
</fileset>
<formatter type="plain"/>
<formatter type="xml" toFile="${build.dir}/checkstyle/checkstyle_result.xml"/>
Modified: hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hcatalog/data/HCatRecordSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hcatalog/data/HCatRecordSerDe.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hcatalog/data/HCatRecordSerDe.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hcatalog/data/HCatRecordSerDe.java Tue Jul 30 22:22:35 2013
@@ -266,28 +266,29 @@ public class HCatRecordSerDe implements
private static Object serializePrimitiveField(Object field,
ObjectInspector fieldObjectInspector) {
- if (field != null && HCatContext.INSTANCE.getConf().isPresent()) {
+ Object f = ((PrimitiveObjectInspector) fieldObjectInspector).getPrimitiveJavaObject(field);
+ if (f != null && HCatContext.INSTANCE.getConf().isPresent()) {
Configuration conf = HCatContext.INSTANCE.getConf().get();
- if (field instanceof Boolean &&
+ if (f instanceof Boolean &&
conf.getBoolean(
HCatConstants.HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER,
HCatConstants.HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER_DEFAULT)) {
- return ((Boolean) field) ? 1 : 0;
- } else if (field instanceof Short &&
+ return ((Boolean) f) ? 1 : 0;
+ } else if (f instanceof Short &&
conf.getBoolean(
HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION,
HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT)) {
- return new Integer((Short) field);
- } else if (field instanceof Byte &&
+ return new Integer((Short) f);
+ } else if (f instanceof Byte &&
conf.getBoolean(
HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION,
HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT)) {
- return new Integer((Byte) field);
+ return new Integer((Byte) f);
}
}
- return ((PrimitiveObjectInspector) fieldObjectInspector).getPrimitiveJavaObject(field);
+ return f;
}
/**
Modified: hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hcatalog/data/schema/HCatSchemaUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hcatalog/data/schema/HCatSchemaUtils.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hcatalog/data/schema/HCatSchemaUtils.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hcatalog/data/schema/HCatSchemaUtils.java Tue Jul 30 22:22:35 2013
@@ -31,8 +31,6 @@ import org.apache.hadoop.hive.serde2.typ
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.hcatalog.common.HCatConstants;
-import org.apache.hcatalog.common.HCatContext;
import org.apache.hcatalog.common.HCatException;
import org.apache.hcatalog.data.schema.HCatFieldSchema.Type;
@@ -140,17 +138,9 @@ public class HCatSchemaUtils {
private static Type getPrimitiveHType(TypeInfo basePrimitiveTypeInfo) {
switch (((PrimitiveTypeInfo) basePrimitiveTypeInfo).getPrimitiveCategory()) {
case BOOLEAN:
- return (HCatContext.INSTANCE.getConf().isPresent() &&
- HCatContext.INSTANCE.getConf().get().getBoolean(
- HCatConstants.HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER,
- HCatConstants.HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER_DEFAULT)) ?
- Type.INT : Type.BOOLEAN;
+ return Type.BOOLEAN;
case BYTE:
- return (HCatContext.INSTANCE.getConf().isPresent() &&
- HCatContext.INSTANCE.getConf().get().getBoolean(
- HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION,
- HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT)) ?
- Type.INT : Type.TINYINT;
+ return Type.TINYINT;
case DOUBLE:
return Type.DOUBLE;
case FLOAT:
@@ -160,11 +150,7 @@ public class HCatSchemaUtils {
case LONG:
return Type.BIGINT;
case SHORT:
- return (HCatContext.INSTANCE.getConf().isPresent() &&
- HCatContext.INSTANCE.getConf().get().getBoolean(
- HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION,
- HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT)) ?
- Type.INT : Type.SMALLINT;
+ return Type.SMALLINT;
case STRING:
return Type.STRING;
case BINARY:
Modified: hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/PigHCatUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/PigHCatUtil.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/PigHCatUtil.java (original)
+++ hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/PigHCatUtil.java Tue Jul 30 22:22:35 2013
@@ -104,6 +104,10 @@ class PigHCatUtil {
}
}
+ static public boolean pigHasBooleanSupport(){
+ return pigHasBooleanSupport;
+ }
+
static public Pair<String, String> getDBTableNames(String location) throws IOException {
// the location string will be of the form:
// <database name>.<table name> - parse it and
Modified: hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoader.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoader.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoader.java (original)
+++ hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoader.java Tue Jul 30 22:22:35 2013
@@ -65,7 +65,7 @@ public class TestHCatLoader extends Test
private static int guardTestCount = 6; // ugh, instantiate using introspection in guardedSetupBeforeClass
private static boolean setupHasRun = false;
-
+
private static Map<Integer, Pair<Integer, String>> basicInputData;
protected String storageFormat() {
@@ -413,7 +413,7 @@ public class TestHCatLoader extends Test
File inputDataDir = new File(inputFileName).getParentFile();
inputDataDir.mkdir();
- String[] lines = new String[]{"llama\t1", "alpaca\t0"};
+ String[] lines = new String[]{"llama\ttrue", "alpaca\tfalse"};
HcatTestUtils.createTestDataFile(inputFileName, lines);
assertEquals(0, driver.run("drop table if exists " + tbl).getResponseCode());
@@ -433,13 +433,15 @@ public class TestHCatLoader extends Test
assertEquals("a", schema.getField(0).alias);
assertEquals(DataType.CHARARRAY, schema.getField(0).type);
assertEquals("b", schema.getField(1).alias);
- assertEquals(DataType.INTEGER, schema.getField(1).type);
+ if (PigHCatUtil.pigHasBooleanSupport()){
+ assertEquals(DataType.BOOLEAN, schema.getField(1).type);
+ } else {
+ assertEquals(DataType.INTEGER, schema.getField(1).type);
+ }
Iterator<Tuple> iterator = server.openIterator("data");
Tuple t = iterator.next();
assertEquals("llama", t.get(0));
- // TODO: Figure out how to load a text file into Hive with boolean columns. This next assert
- // passes because data was loaded as integers, not because it was converted.
assertEquals(1, t.get(1));
t = iterator.next();
assertEquals("alpaca", t.get(0));
Modified: hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoaderComplexSchema.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoaderComplexSchema.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoaderComplexSchema.java (original)
+++ hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoaderComplexSchema.java Tue Jul 30 22:22:35 2013
@@ -60,14 +60,18 @@ public class TestHCatLoaderComplexSchema
driver.run("drop table " + tablename);
}
+ protected String storageFormat() {
+ return "RCFILE tblproperties('hcat.isd'='org.apache.hcatalog.rcfile.RCFileInputDriver'," +
+ "'hcat.osd'='org.apache.hcatalog.rcfile.RCFileOutputDriver')";
+ }
+
private void createTable(String tablename, String schema, String partitionedBy) throws IOException, CommandNeedRetryException {
String createTable;
createTable = "create table " + tablename + "(" + schema + ") ";
if ((partitionedBy != null) && (!partitionedBy.trim().isEmpty())) {
createTable = createTable + "partitioned by (" + partitionedBy + ") ";
}
- createTable = createTable + "stored as RCFILE tblproperties('hcat.isd'='org.apache.hcatalog.rcfile.RCFileInputDriver'," +
- "'hcat.osd'='org.apache.hcatalog.rcfile.RCFileOutputDriver') ";
+ createTable = createTable + "stored as " + storageFormat();
LOG.info("Creating table:\n {}", createTable);
CommandProcessorResponse result = driver.run(createTable);
int retCode = result.getResponseCode();
Modified: hive/branches/vectorization/hcatalog/src/test/e2e/templeton/README.txt
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/src/test/e2e/templeton/README.txt?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/src/test/e2e/templeton/README.txt (original)
+++ hive/branches/vectorization/hcatalog/src/test/e2e/templeton/README.txt Tue Jul 30 22:22:35 2013
@@ -150,3 +150,14 @@ Also useful to add to conf/hadoop-env.sh
export HADOOP_OPTS="-Djava.security.krb5.realm=OX.AC.UK -Djava.security.krb5.kdc=kdc0.ox.ac.uk:kdc1.ox.ac.uk"
to prevent warning about SCDynamicStore which may throw some tests off
(http://stackoverflow.com/questions/7134723/hadoop-on-osx-unable-to-load-realm-info-from-scdynamicstore)
+
+
+Performance
+-----------
+It's a good idea to set fork.factor.conf.file={number of .conf files} and fork.factor.group to something > 1
+(see build.xml) to make these tests run faster. If doing this, make sure the Hadoop Cluster has
+enough map slots (10?) (mapred.tasktracker.map.tasks.maximum), otherwise test parallelism won't help.
+
+Adding Tests
+------------
+ToDo: add some guidelines
Modified: hive/branches/vectorization/hcatalog/src/test/e2e/templeton/build.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/src/test/e2e/templeton/build.xml?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/src/test/e2e/templeton/build.xml (original)
+++ hive/branches/vectorization/hcatalog/src/test/e2e/templeton/build.xml Tue Jul 30 22:22:35 2013
@@ -107,6 +107,7 @@
<arg value="${basedir}/tests/serverstatus.conf"/>
<arg value="${basedir}/tests/ddl.conf"/>
<arg value="${basedir}/tests/jobsubmission.conf"/>
+ <arg value="${basedir}/tests/jobsubmission2.conf"/>
</exec>
</target>
Modified: hive/branches/vectorization/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm (original)
+++ hive/branches/vectorization/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm Tue Jul 30 22:22:35 2013
@@ -681,7 +681,12 @@ sub compare
#try to get the call back url request until timeout
if ($result == 1 && defined $testCmd->{'check_call_back'}) {
my $d = $testCmd->{'http_daemon'};
- $d->timeout(300); #wait for 5 mins
+ if (defined $testCmd->{'timeout_seconds'}) {
+ $d->timeout($testCmd->{'timeout_seconds'})
+ }
+ else {
+ $d->timeout(300); #wait for 5 mins by default
+ }
my $url_requested;
$testCmd->{'callback_url'} =~ s/\$jobId/$json_hash->{'id'}/g;
print $log "Expanded callback url : <" . $testCmd->{'callback_url'} . ">\n";
@@ -732,6 +737,10 @@ sub compare
my $jobComplete;
my $NUM_RETRIES = 60;
my $SLEEP_BETWEEN_RETRIES = 5;
+ if (defined $testCmd->{'timeout_seconds'} && $testCmd->{'timeout_seconds'} > 0) {
+ $SLEEP_BETWEEN_RETRIES = ($testCmd->{'timeout_seconds'} / $NUM_RETRIES);
+ print $log "found timeout_seconds & set SLEEP_BETWEEN_RETRIES=$SLEEP_BETWEEN_RETRIES";
+ }
#first wait for job completion
while ($NUM_RETRIES-- > 0) {
Modified: hive/branches/vectorization/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerFactory.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerFactory.java (original)
+++ hive/branches/vectorization/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerFactory.java Tue Jul 30 22:22:35 2013
@@ -75,7 +75,7 @@ public class RevisionManagerFactory {
* Internally used by endpoint implementation to instantiate from different configuration setting.
* @param className
* @param conf
- * @return
+ * @return the opened revision manager
* @throws IOException
*/
static RevisionManager getOpenedRevisionManager(String className, Configuration conf) throws IOException {
Modified: hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonControllerJob.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonControllerJob.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonControllerJob.java (original)
+++ hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonControllerJob.java Tue Jul 30 22:22:35 2013
@@ -44,7 +44,6 @@ import org.apache.hadoop.io.NullWritable
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.Mapper;
@@ -68,9 +67,6 @@ import org.apache.hadoop.util.ToolRunner
* in hdfs files.
*/
public class TempletonControllerJob extends Configured implements Tool {
- static enum ControllerCounters {SIMPLE_COUNTER}
-
- ;
public static final String COPY_NAME = "templeton.copy";
public static final String STATUSDIR_NAME = "templeton.statusdir";
public static final String JAR_ARGS_NAME = "templeton.args";
@@ -157,14 +153,13 @@ public class TempletonControllerJob exte
conf.get(OVERRIDE_CLASSPATH));
String statusdir = conf.get(STATUSDIR_NAME);
- Counter cnt = context.getCounter(ControllerCounters.SIMPLE_COUNTER);
ExecutorService pool = Executors.newCachedThreadPool();
executeWatcher(pool, conf, context.getJobID(),
proc.getInputStream(), statusdir, STDOUT_FNAME);
executeWatcher(pool, conf, context.getJobID(),
proc.getErrorStream(), statusdir, STDERR_FNAME);
- KeepAlive keepAlive = startCounterKeepAlive(pool, cnt);
+ KeepAlive keepAlive = startCounterKeepAlive(pool, context);
proc.waitFor();
keepAlive.sendReport = false;
@@ -193,7 +188,7 @@ public class TempletonControllerJob exte
pool.execute(w);
}
- private KeepAlive startCounterKeepAlive(ExecutorService pool, Counter cnt)
+ private KeepAlive startCounterKeepAlive(ExecutorService pool, Context cnt)
throws IOException {
KeepAlive k = new KeepAlive(cnt);
pool.execute(k);
@@ -215,7 +210,7 @@ public class TempletonControllerJob exte
}
}
- public static class Watcher implements Runnable {
+ private static class Watcher implements Runnable {
private InputStream in;
private OutputStream out;
private JobID jobid;
@@ -279,11 +274,11 @@ public class TempletonControllerJob exte
}
}
- public static class KeepAlive implements Runnable {
- private Counter cnt;
- public boolean sendReport;
+ private static class KeepAlive implements Runnable {
+ private final Mapper.Context cnt;
+ private volatile boolean sendReport;
- public KeepAlive(Counter cnt) {
+ public KeepAlive(Mapper.Context cnt) {
this.cnt = cnt;
this.sendReport = true;
}
@@ -292,7 +287,7 @@ public class TempletonControllerJob exte
public void run() {
try {
while (sendReport) {
- cnt.increment(1);
+ cnt.progress();
Thread.sleep(KEEP_ALIVE_MSEC);
}
} catch (InterruptedException e) {
Modified: hive/branches/vectorization/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ivy/libraries.properties?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/ivy/libraries.properties (original)
+++ hive/branches/vectorization/ivy/libraries.properties Tue Jul 30 22:22:35 2013
@@ -22,12 +22,10 @@ ant-contrib.version=1.0b3
ant-task.version=2.0.10
antlr.version=3.4
antlr-runtime.version=3.4
-asm.version=3.1
avro.version=1.7.1
-datanucleus-connectionpool.version=2.0.3
-datanucleus-core.version=2.0.3
-datanucleus-enhancer.version=2.0.3
-datanucleus-rdbms.version=2.0.3
+datanucleus-api-jdo.version=3.2.1
+datanucleus-core.version=3.2.2
+datanucleus-rdbms.version=3.2.1
checkstyle.version=5.0
findbugs.version=1.3.9
BoneCP.version=0.7.1.RELEASE
Modified: hive/branches/vectorization/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveBaseResultSet.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveBaseResultSet.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveBaseResultSet.java (original)
+++ hive/branches/vectorization/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveBaseResultSet.java Tue Jul 30 22:22:35 2013
@@ -219,12 +219,20 @@ public abstract class HiveBaseResultSet
return null;
}
+ if (obj instanceof Date) {
+ return (Date) obj;
+ }
+
try {
- return Date.valueOf((String) obj);
+ if (obj instanceof String) {
+ return Date.valueOf((String)obj);
+ }
} catch (Exception e) {
throw new SQLException("Cannot convert column " + columnIndex
+ " to date: " + e.toString());
}
+
+ throw new SQLException("Illegal conversion");
}
public Date getDate(String columnName) throws SQLException {
Modified: hive/branches/vectorization/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java (original)
+++ hive/branches/vectorization/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java Tue Jul 30 22:22:35 2013
@@ -496,8 +496,7 @@ public class HivePreparedStatement imple
*/
public void setDate(int parameterIndex, Date x) throws SQLException {
- // TODO Auto-generated method stub
- throw new SQLException("Method not supported");
+ this.parameters.put(parameterIndex, x.toString());
}
/*
Modified: hive/branches/vectorization/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java (original)
+++ hive/branches/vectorization/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java Tue Jul 30 22:22:35 2013
@@ -110,6 +110,8 @@ public class HiveResultSetMetaData imple
return serdeConstants.INT_TYPE_NAME;
} else if ("bigint".equalsIgnoreCase(type)) {
return serdeConstants.BIGINT_TYPE_NAME;
+ } else if ("date".equalsIgnoreCase(type)) {
+ return serdeConstants.DATE_TYPE_NAME;
} else if ("timestamp".equalsIgnoreCase(type)) {
return serdeConstants.TIMESTAMP_TYPE_NAME;
} else if ("decimal".equalsIgnoreCase(type)) {
Modified: hive/branches/vectorization/jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcColumn.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcColumn.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcColumn.java (original)
+++ hive/branches/vectorization/jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcColumn.java Tue Jul 30 22:22:35 2013
@@ -74,6 +74,8 @@ public class JdbcColumn {
case Types.INTEGER:
case Types.BIGINT:
return columnPrecision(columnType) + 1; // allow +/-
+ case Types.DATE:
+ return 10;
case Types.TIMESTAMP:
return columnPrecision(columnType);
// see http://download.oracle.com/javase/6/docs/api/constant-values.html#java.lang.Float.MAX_EXPONENT
@@ -108,6 +110,8 @@ public class JdbcColumn {
return 7;
case Types.DOUBLE:
return 15;
+ case Types.DATE:
+ return 10;
case Types.TIMESTAMP:
return 29;
case Types.DECIMAL:
@@ -126,6 +130,7 @@ public class JdbcColumn {
case Types.SMALLINT:
case Types.INTEGER:
case Types.BIGINT:
+ case Types.DATE:
return 0;
case Types.FLOAT:
return 7;
Modified: hive/branches/vectorization/jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java (original)
+++ hive/branches/vectorization/jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java Tue Jul 30 22:22:35 2013
@@ -46,6 +46,8 @@ public class Utils {
return Types.INTEGER;
} else if ("bigint".equalsIgnoreCase(type)) {
return Types.BIGINT;
+ } else if ("date".equalsIgnoreCase(type)) {
+ return Types.DATE;
} else if ("timestamp".equalsIgnoreCase(type)) {
return Types.TIMESTAMP;
} else if ("decimal".equalsIgnoreCase(type)) {
Modified: hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java (original)
+++ hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java Tue Jul 30 22:22:35 2013
@@ -224,13 +224,20 @@ public abstract class HiveBaseResultSet
if (obj == null) {
return null;
}
-
+ if (obj instanceof Date) {
+ return (Date) obj;
+ }
try {
- return Date.valueOf((String) obj);
+ if (obj instanceof String) {
+ return Date.valueOf((String)obj);
+ }
} catch (Exception e) {
throw new SQLException("Cannot convert column " + columnIndex
+ " to date: " + e.toString(), e);
}
+ // If we fell through to here this is not a valid type conversion
+ throw new SQLException("Cannot convert column " + columnIndex
+ + " to date: Illegal conversion");
}
public Date getDate(String columnName) throws SQLException {
@@ -434,6 +441,15 @@ public abstract class HiveBaseResultSet
return null;
}
+ private Date getDateValue(TStringValue tStringValue) {
+ if (tStringValue.isSetValue()) {
+ wasNull = false;
+ return Date.valueOf(tStringValue.getValue());
+ }
+ wasNull = true;
+ return null;
+ }
+
private Timestamp getTimestampValue(TStringValue tStringValue) {
if (tStringValue.isSetValue()) {
wasNull = false;
@@ -495,6 +511,8 @@ public abstract class HiveBaseResultSet
return getStringValue(tColumnValue.getStringVal());
case BINARY_TYPE:
return getBinaryValue(tColumnValue.getStringVal());
+ case DATE_TYPE:
+ return getDateValue(tColumnValue.getStringVal());
case TIMESTAMP_TYPE:
return getTimestampValue(tColumnValue.getStringVal());
case DECIMAL_TYPE:
Modified: hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java (original)
+++ hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java Tue Jul 30 22:22:35 2013
@@ -43,10 +43,10 @@ import java.util.Calendar;
import java.util.HashMap;
import java.util.Map;
+import org.apache.hive.service.cli.thrift.TCLIService;
import org.apache.hive.service.cli.thrift.TExecuteStatementReq;
import org.apache.hive.service.cli.thrift.TExecuteStatementResp;
import org.apache.hive.service.cli.thrift.TOperationHandle;
-import org.apache.hive.service.cli.thrift.TCLIService;
import org.apache.hive.service.cli.thrift.TSessionHandle;
/**
@@ -510,8 +510,7 @@ public class HivePreparedStatement imple
*/
public void setDate(int parameterIndex, Date x) throws SQLException {
- // TODO Auto-generated method stub
- throw new SQLException("Method not supported");
+ this.parameters.put(parameterIndex, x.toString());
}
/*
Modified: hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java (original)
+++ hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java Tue Jul 30 22:22:35 2013
@@ -105,6 +105,8 @@ public class HiveResultSetMetaData imple
return serdeConstants.BIGINT_TYPE_NAME;
} else if ("timestamp".equalsIgnoreCase(type)) {
return serdeConstants.TIMESTAMP_TYPE_NAME;
+ } else if ("date".equalsIgnoreCase(type)) {
+ return serdeConstants.DATE_TYPE_NAME;
} else if ("decimal".equalsIgnoreCase(type)) {
return serdeConstants.DECIMAL_TYPE_NAME;
} else if ("binary".equalsIgnoreCase(type)) {
Modified: hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java (original)
+++ hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java Tue Jul 30 22:22:35 2013
@@ -75,6 +75,8 @@ public class JdbcColumn {
case Types.INTEGER:
case Types.BIGINT:
return columnPrecision(columnType) + 1; // allow +/-
+ case Types.DATE:
+ return 10;
case Types.TIMESTAMP:
return columnPrecision(columnType);
@@ -110,6 +112,8 @@ public class JdbcColumn {
return 7;
case Types.DOUBLE:
return 15;
+ case Types.DATE:
+ return 10;
case Types.TIMESTAMP:
return 29;
case Types.DECIMAL:
@@ -128,6 +132,7 @@ public class JdbcColumn {
case Types.SMALLINT:
case Types.INTEGER:
case Types.BIGINT:
+ case Types.DATE:
return 0;
case Types.FLOAT:
return 7;
Modified: hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/Utils.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/Utils.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/Utils.java (original)
+++ hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/Utils.java Tue Jul 30 22:22:35 2013
@@ -128,6 +128,8 @@ public class Utils {
return Types.INTEGER;
} else if ("bigint".equalsIgnoreCase(type)) {
return Types.BIGINT;
+ } else if ("date".equalsIgnoreCase(type)) {
+ return Types.DATE;
} else if ("timestamp".equalsIgnoreCase(type)) {
return Types.TIMESTAMP;
} else if ("decimal".equalsIgnoreCase(type)) {
Modified: hive/branches/vectorization/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (original)
+++ hive/branches/vectorization/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java Tue Jul 30 22:22:35 2013
@@ -150,7 +150,9 @@ public class TestJdbcDriver extends Test
+ " c15 struct<r:int,s:struct<a:int,b:string>>,"
+ " c16 array<struct<m:map<string,string>,n:int>>,"
+ " c17 timestamp, "
- + " c18 decimal) comment'" + dataTypeTableComment
+ + " c18 decimal,"
+ + " c19 binary,"
+ + " c20 date) comment'" + dataTypeTableComment
+"' partitioned by (dt STRING)");
assertFalse(res.next());
@@ -226,6 +228,7 @@ public class TestJdbcDriver extends Test
+ tableName
+ " where 'not?param?not?param' <> 'not_param??not_param' and ?=? "
+ " and 1=? and 2=? and 3.0=? and 4.0=? and 'test\\'string\"'=? and 5=? and ?=? "
+ + " and date '2012-01-01' = date ?"
+ " ) t select '2011-03-25' ddate,'China',true bv, 10 num limit 10";
///////////////////////////////////////////////
@@ -296,7 +299,7 @@ public class TestJdbcDriver extends Test
assertNotNull(
"Execute the invalid setted sql statement should throw exception",
expectedException);
-
+
// setObject to the yet unknown type java.util.Date
expectedException = null;
try {
@@ -326,6 +329,7 @@ public class TestJdbcDriver extends Test
ps.setObject(8, 5L); //setLong
ps.setObject(9, (byte) 1); //setByte
ps.setObject(10, (byte) 1); //setByte
+ ps.setString(11, "2012-01-01"); //setString
ps.setMaxRows(2);
return ps;
@@ -345,6 +349,7 @@ public class TestJdbcDriver extends Test
ps.setLong(8, 5L); //setLong
ps.setByte(9, (byte) 1); //setByte
ps.setByte(10, (byte) 1); //setByte
+ ps.setString(11, "2012-01-01"); //setString
ps.setMaxRows(2);
return ps;
@@ -438,6 +443,8 @@ public class TestJdbcDriver extends Test
assertEquals(null, res.getString(17));
assertEquals(null, res.getTimestamp(17));
assertEquals(null, res.getBigDecimal(18));
+ assertEquals(null, res.getString(20));
+ assertEquals(null, res.getDate(20));
// row 3
assertTrue(res.next());
@@ -460,6 +467,8 @@ public class TestJdbcDriver extends Test
assertEquals("2012-04-22 09:00:00.123456789", res.getString(17));
assertEquals("2012-04-22 09:00:00.123456789", res.getTimestamp(17).toString());
assertEquals("123456789.0123456", res.getBigDecimal(18).toString());
+ assertEquals("2013-01-01", res.getString(20));
+ assertEquals("2013-01-01", res.getDate(20).toString());
// test getBoolean rules on non-boolean columns
assertEquals(true, res.getBoolean(1));
@@ -850,13 +859,14 @@ public class TestJdbcDriver extends Test
ResultSet res = stmt.executeQuery(
"select c1, c2, c3, c4, c5 as a, c6, c7, c8, c9, c10, c11, c12, " +
- "c1*2, sentences(null, null, null) as b, c17, c18 from " + dataTypeTableName + " limit 1");
+ "c1*2, sentences(null, null, null) as b, c17, c18, c20 from " + dataTypeTableName +
+ " limit 1");
ResultSetMetaData meta = res.getMetaData();
ResultSet colRS = con.getMetaData().getColumns(null, null,
dataTypeTableName.toLowerCase(), null);
- assertEquals(16, meta.getColumnCount());
+ assertEquals(17, meta.getColumnCount());
assertTrue(colRS.next());
@@ -1066,6 +1076,13 @@ public class TestJdbcDriver extends Test
assertEquals(Integer.MAX_VALUE, meta.getPrecision(16));
assertEquals(Integer.MAX_VALUE, meta.getScale(16));
+ assertEquals("c20", meta.getColumnName(17));
+ assertEquals(Types.DATE, meta.getColumnType(17));
+ assertEquals("date", meta.getColumnTypeName(17));
+ assertEquals(10, meta.getColumnDisplaySize(17));
+ assertEquals(10, meta.getPrecision(17));
+ assertEquals(0, meta.getScale(17));
+
for (int i = 1; i <= meta.getColumnCount(); i++) {
assertFalse(meta.isAutoIncrement(i));
assertFalse(meta.isCurrency(i));
Modified: hive/branches/vectorization/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java (original)
+++ hive/branches/vectorization/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java Tue Jul 30 22:22:35 2013
@@ -150,7 +150,8 @@ public class TestJdbcDriver2 extends Tes
+ " c16 array<struct<m:map<string,string>,n:int>>,"
+ " c17 timestamp, "
+ " c18 decimal, "
- + " c19 binary) comment'" + dataTypeTableComment
+ + " c19 binary, "
+ + " c20 date) comment'" + dataTypeTableComment
+"' partitioned by (dt STRING)");
stmt.execute("load data local inpath '"
@@ -278,6 +279,7 @@ public class TestJdbcDriver2 extends Tes
+ tableName
+ " where 'not?param?not?param' <> 'not_param??not_param' and ?=? "
+ " and 1=? and 2=? and 3.0=? and 4.0=? and 'test\\'string\"'=? and 5=? and ?=? "
+ + " and date '2012-01-01' = date ?"
+ " ) t select '2011-03-25' ddate,'China',true bv, 10 num limit 10";
///////////////////////////////////////////////
@@ -297,6 +299,7 @@ public class TestJdbcDriver2 extends Tes
ps.setLong(8, 5L);
ps.setByte(9, (byte) 1);
ps.setByte(10, (byte) 1);
+ ps.setString(11, "2012-01-01");
ps.setMaxRows(2);
@@ -445,6 +448,8 @@ public class TestJdbcDriver2 extends Tes
assertEquals(null, res.getString(17));
assertEquals(null, res.getString(18));
assertEquals(null, res.getString(19));
+ assertEquals(null, res.getString(20));
+ assertEquals(null, res.getDate(20));
// row 2
assertTrue(res.next());
@@ -468,6 +473,8 @@ public class TestJdbcDriver2 extends Tes
assertEquals(null, res.getTimestamp(17));
assertEquals(null, res.getBigDecimal(18));
assertEquals(null, res.getString(19));
+ assertEquals(null, res.getString(20));
+ assertEquals(null, res.getDate(20));
// row 3
assertTrue(res.next());
@@ -491,6 +498,8 @@ public class TestJdbcDriver2 extends Tes
assertEquals("2012-04-22 09:00:00.123456789", res.getTimestamp(17).toString());
assertEquals("123456789.0123456", res.getBigDecimal(18).toString());
assertEquals("abcd", res.getString(19));
+ assertEquals("2013-01-01", res.getString(20));
+ assertEquals("2013-01-01", res.getDate(20).toString());
// test getBoolean rules on non-boolean columns
assertEquals(true, res.getBoolean(1));
@@ -899,13 +908,14 @@ public class TestJdbcDriver2 extends Tes
ResultSet res = stmt.executeQuery(
"select c1, c2, c3, c4, c5 as a, c6, c7, c8, c9, c10, c11, c12, " +
- "c1*2, sentences(null, null, null) as b, c17, c18 from " + dataTypeTableName + " limit 1");
+ "c1*2, sentences(null, null, null) as b, c17, c18, c20 from " + dataTypeTableName +
+ " limit 1");
ResultSetMetaData meta = res.getMetaData();
ResultSet colRS = con.getMetaData().getColumns(null, null,
dataTypeTableName.toLowerCase(), null);
- assertEquals(16, meta.getColumnCount());
+ assertEquals(17, meta.getColumnCount());
assertTrue(colRS.next());
@@ -1106,6 +1116,13 @@ public class TestJdbcDriver2 extends Tes
assertEquals(Integer.MAX_VALUE, meta.getPrecision(16));
assertEquals(Integer.MAX_VALUE, meta.getScale(16));
+ assertEquals("c20", meta.getColumnName(17));
+ assertEquals(Types.DATE, meta.getColumnType(17));
+ assertEquals("date", meta.getColumnTypeName(17));
+ assertEquals(10, meta.getColumnDisplaySize(17));
+ assertEquals(10, meta.getPrecision(17));
+ assertEquals(0, meta.getScale(17));
+
for (int i = 1; i <= meta.getColumnCount(); i++) {
assertFalse(meta.isAutoIncrement(i));
assertFalse(meta.isCurrency(i));
Modified: hive/branches/vectorization/metastore/build.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/metastore/build.xml?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/metastore/build.xml (original)
+++ hive/branches/vectorization/metastore/build.xml Tue Jul 30 22:22:35 2013
@@ -81,7 +81,7 @@
<target name="model-enhance" depends="model-compile" unless="enhanceModel.notRequired" >
<echo message="Project: ${ant.project.name}"/>
<taskdef name="datanucleusenhancer"
- classname="org.datanucleus.enhancer.tools.EnhancerTask">
+ classname="org.datanucleus.enhancer.EnhancerTask">
<classpath refid="classpath"/>
</taskdef>
Modified: hive/branches/vectorization/metastore/ivy.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/metastore/ivy.xml?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/metastore/ivy.xml (original)
+++ hive/branches/vectorization/metastore/ivy.xml Tue Jul 30 22:22:35 2013
@@ -33,18 +33,18 @@
conf="compile->default" />
<dependency org="com.jolbox" name="bonecp" rev="${BoneCP.version}"/>
<dependency org="commons-pool" name="commons-pool" rev="${commons-pool.version}"/>
- <dependency org="org.datanucleus" name="datanucleus-connectionpool" rev="${datanucleus-connectionpool.version}"
- transitive="false"/>
- <dependency org="org.datanucleus" name="datanucleus-core" rev="${datanucleus-core.version}"
- transitive="false"/>
- <dependency org="org.datanucleus" name="datanucleus-enhancer" rev="${datanucleus-enhancer.version}"
+ <dependency org="org.datanucleus" name="datanucleus-api-jdo" rev="${datanucleus-api-jdo.version}">
+ <exclude org="javax.jdo" module="jdo2-api"/>
+ <exclude org="junit" module="junit"/>
+ <exclude org="log4j" module="log4j"/>
+ </dependency>
+ <dependency org="org.datanucleus" name="datanucleus-core" rev="${datanucleus-core.version}"
transitive="false"/>
<dependency org="org.datanucleus" name="datanucleus-rdbms" rev="${datanucleus-rdbms.version}"
transitive="false"/>
<dependency org="javax.jdo" name="jdo-api" rev="${jdo-api.version}"
transitive="false"/>
<dependency org="org.apache.derby" name="derby" rev="${derby.version}"/>
- <dependency org="asm" name="asm" rev="${asm.version}"/>
</dependencies>
</ivy-module>
Modified: hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Tue Jul 30 22:22:35 2013
@@ -4374,10 +4374,11 @@ public class HiveMetaStore extends Thrif
conf.getVar(HiveConf.ConfVars.METASTORE_KERBEROS_KEYTAB_FILE),
conf.getVar(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL));
// start delegation token manager
- saslServer.startDelegationTokenSecretManager(conf);
+ HMSHandler hmsHandler = new HMSHandler("new db based metaserver", conf);
+ saslServer.startDelegationTokenSecretManager(conf, hmsHandler);
transFactory = saslServer.createTransportFactory();
- processor = saslServer.wrapProcessor(new ThriftHiveMetastore.Processor<IHMSHandler>(
- newHMSHandler("new db based metaserver", conf)));
+ processor = saslServer.wrapProcessor(
+ new ThriftHiveMetastore.Processor<HMSHandler>(hmsHandler));
LOG.info("Starting DB backed MetaStore Server in Secure Mode");
} else {
// we are in unsecure mode.
Modified: hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java (original)
+++ hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java Tue Jul 30 22:22:35 2013
@@ -43,6 +43,7 @@ import javax.jdo.PersistenceManagerFacto
import javax.jdo.Query;
import javax.jdo.Transaction;
import javax.jdo.datastore.DataStoreCache;
+import javax.jdo.identity.IntIdentity;
import org.antlr.runtime.CharStream;
import org.antlr.runtime.CommonTokenStream;
@@ -96,9 +97,11 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.model.MColumnDescriptor;
import org.apache.hadoop.hive.metastore.model.MDBPrivilege;
import org.apache.hadoop.hive.metastore.model.MDatabase;
+import org.apache.hadoop.hive.metastore.model.MDelegationToken;
import org.apache.hadoop.hive.metastore.model.MFieldSchema;
import org.apache.hadoop.hive.metastore.model.MGlobalPrivilege;
import org.apache.hadoop.hive.metastore.model.MIndex;
+import org.apache.hadoop.hive.metastore.model.MMasterKey;
import org.apache.hadoop.hive.metastore.model.MOrder;
import org.apache.hadoop.hive.metastore.model.MPartition;
import org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege;
@@ -5269,4 +5272,204 @@ public class ObjectStore implements RawS
return delCnt;
}
+ private MDelegationToken getTokenFrom(String tokenId) {
+ Query query = pm.newQuery(MDelegationToken.class, "tokenIdentifier == tokenId");
+ query.declareParameters("java.lang.String tokenId");
+ query.setUnique(true);
+ return (MDelegationToken)query.execute(tokenId);
+ }
+
+ @Override
+ public boolean addToken(String tokenId, String delegationToken) {
+
+ LOG.debug("Begin executing addToken");
+ boolean committed = false;
+ MDelegationToken token;
+ try{
+ openTransaction();
+ token = getTokenFrom(tokenId);
+ if (token == null) {
+ // add Token, only if it already doesn't exist
+ pm.makePersistent(new MDelegationToken(tokenId, delegationToken));
+ }
+ committed = commitTransaction();
+ } finally {
+ if(!committed) {
+ rollbackTransaction();
+ }
+ }
+ LOG.debug("Done executing addToken with status : " + committed);
+ return committed && (token == null);
+ }
+
+ @Override
+ public boolean removeToken(String tokenId) {
+
+ LOG.debug("Begin executing removeToken");
+ boolean committed = false;
+ MDelegationToken token;
+ try{
+ openTransaction();
+ token = getTokenFrom(tokenId);
+ if (null != token) {
+ pm.deletePersistent(token);
+ }
+ committed = commitTransaction();
+ } finally {
+ if(!committed) {
+ rollbackTransaction();
+ }
+ }
+ LOG.debug("Done executing removeToken with status : " + committed);
+ return committed && (token != null);
+ }
+
+ @Override
+ public String getToken(String tokenId) {
+
+ LOG.debug("Begin executing getToken");
+ boolean committed = false;
+ MDelegationToken token;
+ try{
+ openTransaction();
+ token = getTokenFrom(tokenId);
+ if (null != token) {
+ pm.retrieve(token);
+ }
+ committed = commitTransaction();
+ } finally {
+ if(!committed) {
+ rollbackTransaction();
+ }
+ }
+ LOG.debug("Done executing getToken with status : " + committed);
+ return (null == token) ? null : token.getTokenStr();
+ }
+
+ @Override
+ public List<String> getAllTokenIdentifiers() {
+
+ LOG.debug("Begin executing getAllTokenIdentifiers");
+ boolean committed = false;
+ List<MDelegationToken> tokens;
+ try{
+ openTransaction();
+ Query query = pm.newQuery(MDelegationToken.class);
+ tokens = (List<MDelegationToken>) query.execute();
+ pm.retrieveAll(tokens);
+ committed = commitTransaction();
+ } finally {
+ if(!committed) {
+ rollbackTransaction();
+ }
+ }
+ LOG.debug("Done executing getAllTokenIdentifers with status : " + committed);
+ List<String> tokenIdents = new ArrayList<String>(tokens.size());
+
+ for (MDelegationToken token : tokens) {
+ tokenIdents.add(token.getTokenIdentifier());
+ }
+ return tokenIdents;
+ }
+
+ @Override
+ public int addMasterKey(String key) throws MetaException{
+ LOG.debug("Begin executing addMasterKey");
+ boolean committed = false;
+ MMasterKey masterKey = new MMasterKey(key);
+ try{
+ openTransaction();
+ pm.makePersistent(masterKey);
+ committed = commitTransaction();
+ } finally {
+ if(!committed) {
+ rollbackTransaction();
+ }
+ }
+ LOG.debug("Done executing addMasterKey with status : " + committed);
+ if (committed) {
+ return ((IntIdentity)pm.getObjectId(masterKey)).getKey();
+ } else {
+ throw new MetaException("Failed to add master key.");
+ }
+ }
+
+ @Override
+ public void updateMasterKey(Integer id, String key) throws NoSuchObjectException, MetaException {
+ LOG.debug("Begin executing updateMasterKey");
+ boolean committed = false;
+ MMasterKey masterKey;
+ try{
+ openTransaction();
+ Query query = pm.newQuery(MMasterKey.class, "keyId == id");
+ query.declareParameters("java.lang.Integer id");
+ query.setUnique(true);
+ masterKey = (MMasterKey)query.execute(id);
+ if (null != masterKey) {
+ masterKey.setMasterKey(key);
+ }
+ committed = commitTransaction();
+ } finally {
+ if(!committed) {
+ rollbackTransaction();
+ }
+ }
+ LOG.debug("Done executing updateMasterKey with status : " + committed);
+ if (null == masterKey) {
+ throw new NoSuchObjectException("No key found with keyId: " + id);
+ }
+ if (!committed) {
+ throw new MetaException("Though key is found, failed to update it. " + id);
+ }
+ }
+
+ @Override
+ public boolean removeMasterKey(Integer id) {
+ LOG.debug("Begin executing removeMasterKey");
+ boolean success = false;
+ MMasterKey masterKey;
+ try{
+ openTransaction();
+ Query query = pm.newQuery(MMasterKey.class, "keyId == id");
+ query.declareParameters("java.lang.Integer id");
+ query.setUnique(true);
+ masterKey = (MMasterKey)query.execute(id);
+ if (null != masterKey) {
+ pm.deletePersistent(masterKey);
+ }
+ success = commitTransaction();
+ } finally {
+ if(!success) {
+ rollbackTransaction();
+ }
+ }
+ LOG.debug("Done executing removeMasterKey with status : " + success);
+ return (null != masterKey) && success;
+ }
+
+ @Override
+ public String[] getMasterKeys() {
+ LOG.debug("Begin executing getMasterKeys");
+ boolean committed = false;
+ List<MMasterKey> keys;
+ try{
+ openTransaction();
+ Query query = pm.newQuery(MMasterKey.class);
+ keys = (List<MMasterKey>) query.execute();
+ pm.retrieveAll(keys);
+ committed = commitTransaction();
+ } finally {
+ if(!committed) {
+ rollbackTransaction();
+ }
+ }
+ LOG.debug("Done executing getMasterKeys with status : " + committed);
+ String[] masterKeys = new String[keys.size()];
+
+ for (int i = 0; i < keys.size(); i++) {
+ masterKeys[i] = keys.get(i).getMasterKey();
+ }
+ return masterKeys;
+ }
+
}
Modified: hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java (original)
+++ hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java Tue Jul 30 22:22:35 2013
@@ -419,6 +419,21 @@ public interface RawStore extends Config
public abstract long cleanupEvents();
+ public abstract boolean addToken(String tokenIdentifier, String delegationToken);
+ public abstract boolean removeToken(String tokenIdentifier);
+
+ public abstract String getToken(String tokenIdentifier);
+
+ public abstract List<String> getAllTokenIdentifiers();
+
+ public abstract int addMasterKey(String key) throws MetaException;
+
+ public abstract void updateMasterKey(Integer seqNo, String key)
+ throws NoSuchObjectException, MetaException;
+
+ public abstract boolean removeMasterKey(Integer keySeq);
+
+ public abstract String[] getMasterKeys();
}
Modified: hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java (original)
+++ hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java Tue Jul 30 22:22:35 2013
@@ -17,9 +17,7 @@
*/
package org.apache.hadoop.hive.metastore.parser;
-import java.util.ArrayList;
import java.util.HashMap;
-import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Stack;
@@ -28,9 +26,9 @@ import org.antlr.runtime.ANTLRStringStre
import org.antlr.runtime.CharStream;
import org.apache.hadoop.hive.common.FileUtils;
import org.apache.hadoop.hive.metastore.Warehouse;
-import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import com.google.common.collect.Sets;
@@ -158,6 +156,7 @@ public class ExpressionTree {
public static class LeafNode extends TreeNode {
public String keyName;
public Operator operator;
+ /** Constant expression side of the operator. Can currently be a String or a Long. */
public Object value;
public boolean isReverseOrder = false;
private static final String PARAM_PREFIX = "hive_filter_param_";
@@ -196,7 +195,7 @@ public class ExpressionTree {
String paramKeyName = keyName.substring(hive_metastoreConstants.HIVE_FILTER_FIELD_PARAMS.length());
keyName = "this.parameters.get(\"" + paramKeyName + "\")";
//value is persisted as a string in the db, so make sure it's a string here
- // in case we get an integer.
+ // in case we get a long.
value = value.toString();
} else {
throw new MetaException("Invalid key name in filter. " +
@@ -210,8 +209,8 @@ public class ExpressionTree {
* generates a statement of the form:
* key1 operator value2 (&& | || ) key2 operator value2 ...
*
- * Currently supported types for value are String and Integer.
- * The LIKE operator for Integers is unsupported.
+ * Currently supported types for value are String and Long.
+ * The LIKE operator for Longs is unsupported.
*/
private String generateJDOFilterGeneral(Map<String, Object> params)
throws MetaException {
@@ -257,23 +256,37 @@ public class ExpressionTree {
"> is not a partitioning key for the table");
}
- //Can only support partitions whose types are string
- if( ! table.getPartitionKeys().get(partitionColumnIndex).
- getType().equals(org.apache.hadoop.hive.serde.serdeConstants.STRING_TYPE_NAME) ) {
- throw new MetaException
- ("Filtering is supported only on partition keys of type string");
+ String keyType = table.getPartitionKeys().get(partitionColumnIndex).getType();
+ boolean isIntegralSupported = doesOperatorSupportIntegral(operator);
+
+ // Can only support partitions whose types are string, or maybe integers
+ if (!keyType.equals(org.apache.hadoop.hive.serde.serdeConstants.STRING_TYPE_NAME)
+ && (!isIntegralSupported || !isIntegralType(keyType))) {
+ throw new MetaException("Filtering is supported only on partition keys of type " +
+ "string" + (isIntegralSupported ? ", or integral types" : ""));
+ }
+
+ boolean isStringValue = value instanceof String;
+ if (!isStringValue && (!isIntegralSupported || !(value instanceof Long))) {
+ throw new MetaException("Filtering is supported only on partition keys of type " +
+ "string" + (isIntegralSupported ? ", or integral types" : ""));
}
- String valueParam = null;
+ String valueAsString = null;
try {
- valueParam = (String) value;
+ valueAsString = isStringValue ? (String) value : Long.toString((Long) value);
} catch (ClassCastException e) {
- throw new MetaException("Filtering is supported only on partition keys of type string");
+ throw new MetaException("Unable to cast the constexpr to "
+ + (isStringValue ? "string" : "long"));
}
String paramName = PARAM_PREFIX + params.size();
- params.put(paramName, valueParam);
- String filter;
+ params.put(paramName, valueAsString);
+ boolean isOpEquals = operator == Operator.EQUALS;
+ if (isOpEquals || operator == Operator.NOTEQUALS || operator == Operator.NOTEQUALS2) {
+ return makeFilterForEquals(keyName, valueAsString, paramName, params,
+ partitionColumnIndex, partitionColumnCount, isOpEquals);
+ }
String keyEqual = FileUtils.escapePathName(keyName) + "=";
int keyEqualLength = keyEqual.length();
@@ -286,43 +299,52 @@ public class ExpressionTree {
valString = "partitionName.substring(partitionName.indexOf(\"" + keyEqual + "\")+" + keyEqualLength + ").substring(0, partitionName.substring(partitionName.indexOf(\"" + keyEqual + "\")+" + keyEqualLength + ").indexOf(\"/\"))";
}
- //Handle "a > 10" and "10 > a" appropriately
- if (isReverseOrder){
- //For LIKE, the value should be on the RHS
- if( operator == Operator.LIKE ) {
+ if (operator == Operator.LIKE) {
+ if (isReverseOrder) {
+ //For LIKE, the value should be on the RHS
throw new MetaException(
- "Value should be on the RHS for LIKE operator : " +
- "Key <" + keyName + ">");
- } else if (operator == Operator.EQUALS) {
- filter = makeFilterForEquals(keyName, valueParam, paramName, params,
- partitionColumnIndex, partitionColumnCount);
- } else {
- filter = paramName +
- " " + operator.getJdoOp() + " " + valString;
- }
- } else {
- if (operator == Operator.LIKE ) {
- //generate this.values.get(i).matches("abc%")
- filter = " " + valString + "."
- + operator.getJdoOp() + "(" + paramName + ") ";
- } else if (operator == Operator.EQUALS) {
- filter = makeFilterForEquals(keyName, valueParam, paramName, params,
- partitionColumnIndex, partitionColumnCount);
- } else {
- filter = " " + valString + " "
- + operator.getJdoOp() + " " + paramName;
+ "Value should be on the RHS for LIKE operator : Key <" + keyName + ">");
}
+ //generate this.values.get(i).matches("abc%")
+ return " " + valString + "." + operator.getJdoOp() + "(" + paramName + ") ";
}
- return filter;
+
+ // TODO: support for other ops for numbers to be handled in HIVE-4888.
+ return isReverseOrder
+ ? paramName + " " + operator.getJdoOp() + " " + valString
+ : " " + valString + " " + operator.getJdoOp() + " " + paramName;
+ }
+
+ /**
+ * @param operator operator
+ * @return true iff filter pushdown for this operator can be done for integral types.
+ */
+ private static boolean doesOperatorSupportIntegral(Operator operator) {
+ return (operator == Operator.EQUALS)
+ || (operator == Operator.NOTEQUALS)
+ || (operator == Operator.NOTEQUALS2);
+ }
+
+ /**
+ * @param type type
+ * @return true iff type is an integral type.
+ */
+ private static boolean isIntegralType(String type) {
+ return type.equals(org.apache.hadoop.hive.serde.serdeConstants.TINYINT_TYPE_NAME)
+ || type.equals(org.apache.hadoop.hive.serde.serdeConstants.SMALLINT_TYPE_NAME)
+ || type.equals(org.apache.hadoop.hive.serde.serdeConstants.INT_TYPE_NAME)
+ || type.equals(org.apache.hadoop.hive.serde.serdeConstants.BIGINT_TYPE_NAME);
}
}
/**
- * For equals, we can make the JDO query much faster by filtering based on the
- * partition name. For a condition like ds="2010-10-01", we can see if there
- * are any partitions with a name that contains the substring "ds=2010-10-01/"
+ * For equals and not-equals, we can make the JDO query much faster by filtering
+ * based on the partition name. For a condition like ds="2010-10-01", we can see
+ * if there are any partitions with a name that contains the substring "ds=2010-10-01/"
* False matches aren't possible since "=" is escaped for partition names
* and the trailing '/' ensures that we won't get a match with ds=2010-10-011
+ * Note that filters on integral type equality also work correctly by virtue of
+ * comparing them as part of ds=1234 string.
*
* Two cases to keep in mind: Case with only one partition column (no '/'s)
* Case where the partition key column is at the end of the name. (no
@@ -332,11 +354,12 @@ public class ExpressionTree {
* @param value
* @param paramName name of the parameter to use for JDOQL
* @param params a map from the parameter name to their values
+ * @param isEq whether the operator is equals, or not-equals.
* @return
* @throws MetaException
*/
- private static String makeFilterForEquals(String keyName, String value,
- String paramName, Map<String, Object> params, int keyPos, int keyCount)
+ private static String makeFilterForEquals(String keyName, String value, String paramName,
+ Map<String, Object> params, int keyPos, int keyCount, boolean isEq)
throws MetaException {
Map<String, String> partKeyToVal = new HashMap<String, String>();
partKeyToVal.put(keyName, value);
@@ -348,22 +371,25 @@ public class ExpressionTree {
if (keyCount == 1) {
// Case where this is no other partition columns
params.put(paramName, escapedNameFragment);
- fltr.append("partitionName == ").append(paramName);
+ fltr.append("partitionName ").append(isEq ? "== " : "!= ").append(paramName);
} else if (keyPos + 1 == keyCount) {
// Case where the partition column is at the end of the name. There will
// be a leading '/' but no trailing '/'
params.put(paramName, "/" + escapedNameFragment);
- fltr.append("partitionName.endsWith(").append(paramName).append(')');
+ fltr.append(isEq ? "" : "!").append("partitionName.endsWith(")
+ .append(paramName).append(')');
} else if (keyPos == 0) {
// Case where the parttion column is at the beginning of the name. There will
// be a trailing '/' but no leading '/'
params.put(paramName, escapedNameFragment + "/");
- fltr.append("partitionName.startsWith(").append(paramName).append(')');
+ fltr.append(isEq ? "" : "!").append("partitionName.startsWith(")
+ .append(paramName).append(')');
} else {
// Case where the partition column is in the middle of the name. There will
// be a leading '/' and an trailing '/'
params.put(paramName, "/" + escapedNameFragment + "/");
- fltr.append("partitionName.indexOf(").append(paramName).append(") >= 0");
+ fltr.append("partitionName.indexOf(").append(paramName).append(")")
+ .append(isEq ? ">= 0" : "< 0");
}
return fltr.toString();
}
Modified: hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/parser/Filter.g
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/parser/Filter.g?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/parser/Filter.g (original)
+++ hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/parser/Filter.g Tue Jul 30 22:22:35 2013
@@ -100,10 +100,10 @@ operatorExpression
) { val = TrimQuotes(value.getText()); }
|
(
- (key = Identifier op = operator value = IntLiteral)
+ (key = Identifier op = operator value = IntegralLiteral)
|
- (value = IntLiteral op = operator key = Identifier) { isReverseOrder = true; }
- ) { val = Integer.parseInt(value.getText()); }
+ (value = IntegralLiteral op = operator key = Identifier) { isReverseOrder = true; }
+ ) { val = Long.parseLong(value.getText()); }
)
{
LeafNode node = new LeafNode();
@@ -157,9 +157,9 @@ StringLiteral
;
-IntLiteral
+IntegralLiteral
:
- (Digit)+
+ ('-')? (Digit)+
;
Identifier
Modified: hive/branches/vectorization/metastore/src/model/package.jdo
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/metastore/src/model/package.jdo?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/metastore/src/model/package.jdo (original)
+++ hive/branches/vectorization/metastore/src/model/package.jdo Tue Jul 30 22:22:35 2013
@@ -91,10 +91,13 @@
<embedded>
<field name="name">
<column name="FIELD_NAME" length="128" jdbc-type="VARCHAR"/>
- </field>
+ </field>
<field name="type">
<column name="FIELD_TYPE" length="767" jdbc-type="VARCHAR" allows-null="false"/>
</field>
+ <field name="comment" >
+ <column name="COMMENT" length="256" jdbc-type="VARCHAR" allows-null="true"/>
+ </field>
</embedded>
</element>
</field>
@@ -228,6 +231,9 @@
<field name="type">
<column name="TYPE_NAME" length="4000" jdbc-type="VARCHAR" allows-null="false"/>
</field>
+ <field name="comment">
+ <column name="COMMENT" length="256" jdbc-type="VARCHAR" allows-null="true"/>
+ </field>
</embedded>
</element>
</field>
@@ -262,7 +268,9 @@
<field name="outputFormat">
<column name="OUTPUT_FORMAT" length="4000" jdbc-type="VARCHAR"/>
</field>
- <field name="isCompressed"/>
+ <field name="isCompressed">
+ <column name="IS_COMPRESSED"/>
+ </field>
<field name="isStoredAsSubDirectories">
<column name="IS_STOREDASSUBDIRECTORIES"/>
</field>
@@ -754,6 +762,30 @@
</field>
</class>
+
+ <class name="MMasterKey" table="MASTER_KEYS" identity-type="application" detachable="true">
+
+ <field name="keyId" primary-key="true" value-strategy="identity">
+ <column name="KEY_ID" jdbc-type="integer" />
+ </field>
+
+ <field name="masterKey">
+ <column name="MASTER_KEY" length="767" jdbc-type="VARCHAR" />
+ </field>
+
+ </class>
+
+ <class name="MDelegationToken" table="DELEGATION_TOKENS" identity-type="application" detachable="true">
+
+ <field name="tokenIdentifier" primary-key="true">
+ <column name="TOKEN_IDENT" length="767" jdbc-type="VARCHAR" />
+ </field>
+
+ <field name="tokenStr">
+ <column name="TOKEN" length="767" jdbc-type="VARCHAR" />
+ </field>
+
+ </class>
<class name="MTableColumnStatistics" table="TAB_COL_STATS" identity-type="datastore" detachable="true">
<datastore-identity>
@@ -870,3 +902,4 @@
</package>
</jdo>
+
Modified: hive/branches/vectorization/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java (original)
+++ hive/branches/vectorization/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java Tue Jul 30 22:22:35 2013
@@ -19,6 +19,7 @@
package org.apache.hadoop.hive.metastore;
import java.util.List;
+import java.util.ArrayList;
import java.util.Map;
import org.apache.hadoop.conf.Configurable;
@@ -527,4 +528,36 @@ public class DummyRawStoreControlledComm
InvalidInputException {
return objectStore.updatePartitionColumnStatistics(statsObj, partVals);
}
+
+ public boolean addToken(String tokenIdentifier, String delegationToken) {
+ return false;
+ }
+
+ public boolean removeToken(String tokenIdentifier) {
+ return false;
+ }
+
+ public String getToken(String tokenIdentifier) {
+ return "";
+ }
+
+ public List<String> getAllTokenIdentifiers() {
+ return new ArrayList<String>();
+ }
+
+ public int addMasterKey(String key) throws MetaException {
+ return -1;
+ }
+
+ public void updateMasterKey(Integer seqNo, String key)
+ throws NoSuchObjectException, MetaException {}
+
+ public boolean removeMasterKey(Integer keySeq) {
+ return false;
+ }
+
+ public String[] getMasterKeys() {
+ return new String[0];
+ }
+
}
Modified: hive/branches/vectorization/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java (original)
+++ hive/branches/vectorization/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java Tue Jul 30 22:22:35 2013
@@ -515,6 +515,45 @@ public class DummyRawStoreForJdoConnecti
}
@Override
+ public boolean addToken(String tokenIdentifier, String delegationToken) {
+ return false;
+ }
+
+ @Override
+ public boolean removeToken(String tokenIdentifier) {
+ return false;
+ }
+
+ @Override
+ public String getToken(String tokenIdentifier) {
+ return null;
+ }
+
+ @Override
+ public List<String> getAllTokenIdentifiers() {
+ return null;
+ }
+
+ @Override
+ public int addMasterKey(String key) {
+ return 0;
+ }
+
+ @Override
+ public void updateMasterKey(Integer seqNo, String key) {
+ }
+
+ @Override
+ public boolean removeMasterKey(Integer keySeq) {
+ return false;
+ }
+
+ @Override
+ public String[] getMasterKeys() {
+ return null;
+ }
+
+ @Override
public ColumnStatistics getTableColumnStatistics(String dbName, String tableName, String colName)
throws MetaException, NoSuchObjectException {
return null;
Modified: hive/branches/vectorization/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java?rev=1508669&r1=1508668&r2=1508669&view=diff
==============================================================================
--- hive/branches/vectorization/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java (original)
+++ hive/branches/vectorization/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java Tue Jul 30 22:22:35 2013
@@ -64,6 +64,8 @@ import org.apache.hadoop.hive.serde.serd
import org.apache.hadoop.util.StringUtils;
import org.apache.thrift.TException;
+import com.google.common.collect.Lists;
+
public abstract class TestHiveMetaStore extends TestCase {
protected static HiveMetaStoreClient client;
protected static HiveConf hiveConf;
@@ -1919,31 +1921,6 @@ public abstract class TestHiveMetaStore
String dbName = "filterdb";
String tblName = "filtertbl";
- List<String> vals = new ArrayList<String>(3);
- vals.add("p11");
- vals.add("p21");
- vals.add("p31");
- List <String> vals2 = new ArrayList<String>(3);
- vals2.add("p11");
- vals2.add("p22");
- vals2.add("p31");
- List <String> vals3 = new ArrayList<String>(3);
- vals3.add("p12");
- vals3.add("p21");
- vals3.add("p31");
- List <String> vals4 = new ArrayList<String>(3);
- vals4.add("p12");
- vals4.add("p23");
- vals4.add("p31");
- List <String> vals5 = new ArrayList<String>(3);
- vals5.add("p13");
- vals5.add("p24");
- vals5.add("p31");
- List <String> vals6 = new ArrayList<String>(3);
- vals6.add("p13");
- vals6.add("p25");
- vals6.add("p31");
-
silentDropDatabase(dbName);
Database db = new Database();
@@ -1981,21 +1958,49 @@ public abstract class TestHiveMetaStore
tbl = client.getTable(dbName, tblName);
- add_partition(client, tbl, vals, "part1");
- add_partition(client, tbl, vals2, "part2");
- add_partition(client, tbl, vals3, "part3");
- add_partition(client, tbl, vals4, "part4");
- add_partition(client, tbl, vals5, "part5");
- add_partition(client, tbl, vals6, "part6");
+ add_partition(client, tbl, Lists.newArrayList("p11", "p21", "31"), "part1");
+ add_partition(client, tbl, Lists.newArrayList("p11", "p22", "32"), "part2");
+ add_partition(client, tbl, Lists.newArrayList("p12", "p21", "31"), "part3");
+ add_partition(client, tbl, Lists.newArrayList("p12", "p23", "32"), "part4");
+ add_partition(client, tbl, Lists.newArrayList("p13", "p24", "31"), "part5");
+ add_partition(client, tbl, Lists.newArrayList("p13", "p25", "-33"), "part6");
+ // Test equals operator for strings and integers.
checkFilter(client, dbName, tblName, "p1 = \"p11\"", 2);
checkFilter(client, dbName, tblName, "p1 = \"p12\"", 2);
checkFilter(client, dbName, tblName, "p2 = \"p21\"", 2);
checkFilter(client, dbName, tblName, "p2 = \"p23\"", 1);
+ checkFilter(client, dbName, tblName, "p3 = 31", 3);
+ checkFilter(client, dbName, tblName, "p3 = 33", 0);
+ checkFilter(client, dbName, tblName, "p3 = -33", 1);
checkFilter(client, dbName, tblName, "p1 = \"p11\" and p2=\"p22\"", 1);
checkFilter(client, dbName, tblName, "p1 = \"p11\" or p2=\"p23\"", 3);
checkFilter(client, dbName, tblName, "p1 = \"p11\" or p1=\"p12\"", 4);
+ checkFilter(client, dbName, tblName, "p1 = \"p11\" or p1=\"p12\"", 4);
+ checkFilter(client, dbName, tblName, "p1 = \"p11\" or p1=\"p12\"", 4);
+ checkFilter(client, dbName, tblName, "p1 = \"p11\" and p3 = 31", 1);
+ checkFilter(client, dbName, tblName, "p3 = -33 or p1 = \"p12\"", 3);
+ // Test not-equals operator for strings and integers.
+ checkFilter(client, dbName, tblName, "p1 != \"p11\"", 4);
+ checkFilter(client, dbName, tblName, "p2 != \"p23\"", 5);
+ checkFilter(client, dbName, tblName, "p2 != \"p33\"", 6);
+ checkFilter(client, dbName, tblName, "p3 != 32", 4);
+ checkFilter(client, dbName, tblName, "p3 != 8589934592", 6);
+ checkFilter(client, dbName, tblName, "p1 != \"p11\" and p1 != \"p12\"", 2);
+ checkFilter(client, dbName, tblName, "p1 != \"p11\" and p2 != \"p22\"", 4);
+ checkFilter(client, dbName, tblName, "p1 != \"p11\" or p2 != \"p22\"", 5);
+ checkFilter(client, dbName, tblName, "p1 != \"p12\" and p2 != \"p25\"", 3);
+ checkFilter(client, dbName, tblName, "p1 != \"p12\" or p2 != \"p25\"", 6);
+ checkFilter(client, dbName, tblName, "p3 != -33 or p1 != \"p13\"", 5);
+ checkFilter(client, dbName, tblName, "p1 != \"p11\" and p3 = 31", 2);
+ checkFilter(client, dbName, tblName, "p3 != 31 and p1 = \"p12\"", 1);
+
+ // Test reverse order.
+ checkFilter(client, dbName, tblName, "31 != p3 and p1 = \"p12\"", 1);
+ checkFilter(client, dbName, tblName, "\"p23\" = p2", 1);
+
+ // Test and/or more...
checkFilter(client, dbName, tblName,
"p1 = \"p11\" or (p1=\"p12\" and p2=\"p21\")", 3);
checkFilter(client, dbName, tblName,
@@ -2007,11 +2012,11 @@ public abstract class TestHiveMetaStore
checkFilter(client, dbName, tblName,
"p1=\"p12\" and p2=\"p27\" Or p2=\"p21\"", 2);
+ // Test gt/lt/lte/gte/like for strings.
checkFilter(client, dbName, tblName, "p1 > \"p12\"", 2);
checkFilter(client, dbName, tblName, "p1 >= \"p12\"", 4);
checkFilter(client, dbName, tblName, "p1 < \"p12\"", 2);
checkFilter(client, dbName, tblName, "p1 <= \"p12\"", 4);
- checkFilter(client, dbName, tblName, "p1 <> \"p12\"", 4);
checkFilter(client, dbName, tblName, "p1 like \"p1.*\"", 6);
checkFilter(client, dbName, tblName, "p2 like \"p.*3\"", 1);
@@ -2033,6 +2038,17 @@ public abstract class TestHiveMetaStore
assertTrue("Filter on int partition key", me.getMessage().contains(
"Filtering is supported only on partition keys of type string"));
+ try {
+ client.listPartitionsByFilter(dbName,
+ tblName, "p3 >= 31", (short) -1);
+ } catch(MetaException e) {
+ me = e;
+ }
+ assertNotNull(me);
+ assertTrue("Filter on int partition key", me.getMessage().contains(
+ "Filtering is supported only on partition keys of type string"));
+
+
me = null;
try {
client.listPartitionsByFilter(dbName,