You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/09/21 17:49:50 UTC
svn commit: r1525254 [1/4] - in /hive/branches/vectorization: ./
beeline/src/java/org/apache/hive/beeline/
beeline/src/test/org/apache/hive/beeline/src/test/
jdbc/src/java/org/apache/hive/jdbc/ jdbc/src/test/org/apache/hive/jdbc/
ql/src/java/org/apache...
Author: hashutosh
Date: Sat Sep 21 15:49:49 2013
New Revision: 1525254
URL: http://svn.apache.org/r1525254
Log:
Merged in with latest trunk
Added:
hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/JdbcColumnAttributes.java
- copied unchanged from r1525253, hive/trunk/jdbc/src/java/org/apache/hive/jdbc/JdbcColumnAttributes.java
hive/branches/vectorization/ql/src/test/queries/clientpositive/add_part_multiple.q
- copied unchanged from r1525253, hive/trunk/ql/src/test/queries/clientpositive/add_part_multiple.q
hive/branches/vectorization/ql/src/test/results/clientpositive/add_part_multiple.q.out
- copied unchanged from r1525253, hive/trunk/ql/src/test/results/clientpositive/add_part_multiple.q.out
hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeQualifierValue.java
- copied unchanged from r1525253, hive/trunk/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeQualifierValue.java
hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeQualifiers.java
- copied unchanged from r1525253, hive/trunk/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeQualifiers.java
hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/TypeQualifiers.java
- copied unchanged from r1525253, hive/trunk/service/src/java/org/apache/hive/service/cli/TypeQualifiers.java
Modified:
hive/branches/vectorization/ (props changed)
hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/BeeLine.java
hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/BeeLine.properties
hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java
hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java
hive/branches/vectorization/beeline/src/test/org/apache/hive/beeline/src/test/TestBeeLineWithArgs.java
hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java
hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java
hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java
hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/Utils.java
hive/branches/vectorization/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java
hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java
hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
hive/branches/vectorization/ql/src/test/results/clientpositive/create_view_partitioned.q.out
hive/branches/vectorization/service/if/TCLIService.thrift
hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_constants.cpp
hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_constants.h
hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_types.cpp
hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_types.h
hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java
hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCLIServiceConstants.java
hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TColumn.java
hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TExecuteStatementReq.java
hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTablesReq.java
hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOpenSessionReq.java
hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOpenSessionResp.java
hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TPrimitiveTypeEntry.java
hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TProtocolVersion.java
hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRow.java
hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRowSet.java
hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStatus.java
hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStructTypeEntry.java
hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTableSchema.java
hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeDesc.java
hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeId.java
hive/branches/vectorization/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TUnionTypeEntry.java
hive/branches/vectorization/service/src/gen/thrift/gen-py/TCLIService/constants.py
hive/branches/vectorization/service/src/gen/thrift/gen-py/TCLIService/ttypes.py
hive/branches/vectorization/service/src/gen/thrift/gen-rb/t_c_l_i_service_constants.rb
hive/branches/vectorization/service/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb
hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/ColumnDescriptor.java
hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/ColumnValue.java
hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/Type.java
hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/TypeDescriptor.java
Propchange: hive/branches/vectorization/
------------------------------------------------------------------------------
Merged /hive/trunk:r1524859-1525253
Modified: hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/BeeLine.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/BeeLine.java?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/BeeLine.java (original)
+++ hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/BeeLine.java Sat Sep 21 15:49:49 2013
@@ -96,7 +96,7 @@ import jline.SimpleCompletor;
public class BeeLine {
private static final ResourceBundle resourceBundle =
ResourceBundle.getBundle(BeeLine.class.getName());
- private BeeLineSignalHandler signalHandler = null;
+ private final BeeLineSignalHandler signalHandler = null;
private static final String separator = System.getProperty("line.separator");
private boolean exit = false;
private final DatabaseConnections connections = new DatabaseConnections();
@@ -125,6 +125,8 @@ public class BeeLine {
private static final int ERRNO_ARGS = 1;
private static final int ERRNO_OTHER = 2;
+ private static final String HIVE_VAR_PREFIX = "--hivevar";
+
private final Map<Object, Object> formats = map(new Object[] {
"vertical", new VerticalOutputFormat(this),
"table", new TableOutputFormat(this),
@@ -504,6 +506,16 @@ public class BeeLine {
return false;
}
+ // Parse hive variables
+ if (args[i].equals(HIVE_VAR_PREFIX)) {
+ String[] parts = split(args[++i], "=");
+ if (parts.length != 2) {
+ return false;
+ }
+ getOpts().getHiveVariables().put(parts[0], parts[1]);
+ continue;
+ }
+
// -- arguments are treated as properties
if (args[i].startsWith("--")) {
String[] parts = split(args[i].substring(2), "=");
Modified: hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/BeeLine.properties
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/BeeLine.properties?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/BeeLine.properties (original)
+++ hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/BeeLine.properties Sat Sep 21 15:49:49 2013
@@ -143,6 +143,10 @@ cmd-usage: Usage: java org.apache.hive.c
\ -d <driver class> the driver class to use\n \
\ -e <query> query that should be executed\n \
\ -f <file> script file that should be executed\n \
+\ --hivevar name=value hive variable name and value\n \
+\ This is Hive specific settings in which variables\n \
+\ can be set at session level and referenced in Hive\n \
+\ commands or queries.\n \
\ --color=[true/false] control whether color is used for display\n \
\ --showHeader=[true/false] show column names in query results\n \
\ --headerInterval=ROWS; the interval between which heades are displayed\n \
Modified: hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java (original)
+++ hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java Sat Sep 21 15:49:49 2013
@@ -31,8 +31,10 @@ import java.io.OutputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Arrays;
+import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
+import java.util.Map;
import java.util.Properties;
import java.util.TreeSet;
@@ -80,6 +82,8 @@ class BeeLineOpts implements Completor {
private String scriptFile = null;
+ private Map<String, String> hiveVariables = new HashMap<String, String>();
+
public BeeLineOpts(BeeLine beeLine, Properties props) {
this.beeLine = beeLine;
if (terminal.getTerminalWidth() > 0) {
@@ -421,4 +425,13 @@ class BeeLineOpts implements Completor {
public File getPropertiesFile() {
return rcFile;
}
+
+ public Map<String, String> getHiveVariables() {
+ return hiveVariables;
+ }
+
+ public void setHiveVariables(Map<String, String> hiveVariables) {
+ this.hiveVariables = hiveVariables;
+ }
+
}
Modified: hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java (original)
+++ hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java Sat Sep 21 15:49:49 2013
@@ -28,8 +28,10 @@ import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
+import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
+import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
@@ -52,9 +54,37 @@ class DatabaseConnection {
String username, String password) throws SQLException {
this.beeLine = beeLine;
this.driver = driver;
- this.url = url;
this.username = username;
this.password = password;
+ this.url = appendHiveVariables(beeLine, url);
+ }
+
+/**
+ * Append hive variables specified on the command line to the connection url
+ * (after #). They will be set later on the session on the server side.
+ */
+ private static String appendHiveVariables(BeeLine beeLine, String url) {
+ StringBuilder sb = new StringBuilder( url );
+ Map<String, String> hiveVars = beeLine.getOpts().getHiveVariables();
+ if (hiveVars.size() > 0) {
+ if (url.indexOf("#") == -1) {
+ sb.append("#");
+ } else {
+ sb.append("&");
+ }
+ Set<Map.Entry<String, String>> vars = hiveVars.entrySet();
+ Iterator<Map.Entry<String, String>> it = vars.iterator();
+ while (it.hasNext()) {
+ Map.Entry<String, String> var = it.next();
+ sb.append(var.getKey());
+ sb.append("=");
+ sb.append(var.getValue());
+ if (it.hasNext()) {
+ sb.append("&");
+ }
+ }
+ }
+ return sb.toString();
}
Modified: hive/branches/vectorization/beeline/src/test/org/apache/hive/beeline/src/test/TestBeeLineWithArgs.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/beeline/src/test/org/apache/hive/beeline/src/test/TestBeeLineWithArgs.java?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/beeline/src/test/org/apache/hive/beeline/src/test/TestBeeLineWithArgs.java (original)
+++ hive/branches/vectorization/beeline/src/test/org/apache/hive/beeline/src/test/TestBeeLineWithArgs.java Sat Sep 21 15:49:49 2013
@@ -23,6 +23,8 @@ import java.io.File;
import java.io.FileOutputStream;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
+import java.util.ArrayList;
+import java.util.List;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hive.beeline.BeeLine;
@@ -38,14 +40,19 @@ import org.junit.Test;
*/
//public class TestBeeLineWithArgs extends TestCase {
public class TestBeeLineWithArgs {
-
// Default location of HiveServer2
- final static String BASE_JDBC_URL = BeeLine.BEELINE_DEFAULT_JDBC_URL + "localhost:10000";
- //set JDBC_URL to something else in test case, if it needs to be customized
- String JDBC_URL = BASE_JDBC_URL;
+ final private static String JDBC_URL = BeeLine.BEELINE_DEFAULT_JDBC_URL + "localhost:10000";
private static HiveServer2 hiveServer2;
+ private List<String> getBaseArgs(String jdbcUrl) {
+ List<String> argList = new ArrayList<String>(8);
+ argList.add("-d");
+ argList.add(BeeLine.BEELINE_DEFAULT_JDBC_DRIVER);
+ argList.add("-u");
+ argList.add(jdbcUrl);
+ return argList;
+ }
/**
* Start up a local Hive Server 2 for these tests
*/
@@ -83,13 +90,13 @@ public class TestBeeLineWithArgs {
* @throws Any exception while executing
* @return The stderr and stdout from running the script
*/
- private String testCommandLineScript(String scriptFileName) throws Throwable {
- String[] args = {"-d", BeeLine.BEELINE_DEFAULT_JDBC_DRIVER, "-u", JDBC_URL, "-f", scriptFileName};
+ private String testCommandLineScript(List<String> argList) throws Throwable {
BeeLine beeLine = new BeeLine();
ByteArrayOutputStream os = new ByteArrayOutputStream();
PrintStream beelineOutputStream = new PrintStream(os);
beeLine.setOutputStream(beelineOutputStream);
beeLine.setErrorStream(beelineOutputStream);
+ String[] args = argList.toArray(new String[argList.size()]);
beeLine.begin(args, null);
String output = os.toString("UTF8");
@@ -106,7 +113,8 @@ public class TestBeeLineWithArgs {
* @param shouldMatch true if the pattern should be found, false if it should not
* @throws Exception on command execution error
*/
- private void testScriptFile(String testName, String scriptText, String expectedPattern, boolean shouldMatch) throws Throwable {
+ private void testScriptFile(String testName, String scriptText, String expectedPattern,
+ boolean shouldMatch, List<String> argList) throws Throwable {
long startTime = System.currentTimeMillis();
System.out.println(">>> STARTED " + testName);
@@ -118,9 +126,12 @@ public class TestBeeLineWithArgs {
os.print(scriptText);
os.close();
+ argList.add("-f");
+ argList.add(scriptFile.getAbsolutePath());
+
if(shouldMatch) {
try {
- String output = testCommandLineScript(scriptFile.getAbsolutePath());
+ String output = testCommandLineScript(argList);
long elapsedTime = (System.currentTimeMillis() - startTime)/1000;
String time = "(" + elapsedTime + "s)";
if (output.contains(expectedPattern)) {
@@ -136,7 +147,7 @@ public class TestBeeLineWithArgs {
}
} else {
try {
- String output = testCommandLineScript(scriptFile.getAbsolutePath());
+ String output = testCommandLineScript(argList);
long elapsedTime = (System.currentTimeMillis() - startTime)/1000;
String time = "(" + elapsedTime + "s)";
if (output.contains(expectedPattern)) {
@@ -166,7 +177,25 @@ public class TestBeeLineWithArgs {
final String TEST_NAME = "testPositiveScriptFile";
final String SCRIPT_TEXT = "show databases;\n";
final String EXPECTED_PATTERN = " default ";
- testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true);
+ List<String> argList = getBaseArgs(JDBC_URL);
+ testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
+ }
+
+ /**
+ * Test Beeline -hivevar option. User can specify --hivevar name=value on Beeline command line.
+ * In the script, user should be able to use it in the form of ${name}, which will be substituted with
+ * the value.
+ * @throws Throwable
+ */
+ @Test
+ public void testBeelineCommandLineHiveVariable() throws Throwable {
+ List<String> argList = getBaseArgs(JDBC_URL);
+ argList.add("--hivevar");
+ argList.add("DUMMY_TBL=dummy");
+ final String TEST_NAME = "testHiveCommandLineHiveVariable";
+ final String SCRIPT_TEXT = "create table ${DUMMY_TBL} (d int);\nshow tables;\n";
+ final String EXPECTED_PATTERN = "dummy";
+ testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
}
/**
@@ -176,10 +205,11 @@ public class TestBeeLineWithArgs {
*/
@Test
public void testBreakOnErrorScriptFile() throws Throwable {
+ List<String> argList = getBaseArgs(JDBC_URL);
final String TEST_NAME = "testBreakOnErrorScriptFile";
final String SCRIPT_TEXT = "select * from abcdefg01;\nshow databases;\n";
final String EXPECTED_PATTERN = " default ";
- testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, false);
+ testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, false, argList);
}
/**
@@ -198,8 +228,12 @@ public class TestBeeLineWithArgs {
File scriptFile = File.createTempFile("beelinenegative", "temp");
scriptFile.delete();
+ List<String> argList = getBaseArgs(JDBC_URL);
+ argList.add("-f");
+ argList.add(scriptFile.getAbsolutePath());
+
try {
- String output = testCommandLineScript(scriptFile.getAbsolutePath());
+ String output = testCommandLineScript(argList);
long elapsedTime = (System.currentTimeMillis() - startTime)/1000;
String time = "(" + elapsedTime + "s)";
if (output.contains(EXPECTED_PATTERN)) {
@@ -243,11 +277,11 @@ public class TestBeeLineWithArgs {
@Test
public void testHiveVarSubstitution() throws Throwable {
- JDBC_URL = BASE_JDBC_URL + "#D_TBL=dummy_t";
+ List<String> argList = getBaseArgs(JDBC_URL + "#D_TBL=dummy_t");
final String TEST_NAME = "testHiveVarSubstitution";
final String SCRIPT_TEXT = "create table ${D_TBL} (d int);\nshow tables;\n";
final String EXPECTED_PATTERN = "dummy_t";
- testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true);
+ testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
}
}
Modified: hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java (original)
+++ hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java Sat Sep 21 15:49:49 2013
@@ -64,6 +64,7 @@ public abstract class HiveBaseResultSet
protected TRow row;
protected List<String> columnNames;
protected List<String> columnTypes;
+ protected List<JdbcColumnAttributes> columnAttributes;
private TableSchema schema;
@@ -351,7 +352,7 @@ public abstract class HiveBaseResultSet
}
public ResultSetMetaData getMetaData() throws SQLException {
- return new HiveResultSetMetaData(columnNames, columnTypes);
+ return new HiveResultSetMetaData(columnNames, columnTypes, columnAttributes);
}
public Reader getNCharacterStream(int arg0) throws SQLException {
@@ -509,6 +510,8 @@ public abstract class HiveBaseResultSet
return getDoubleValue(tColumnValue.getDoubleVal());
case STRING_TYPE:
return getStringValue(tColumnValue.getStringVal());
+ case VARCHAR_TYPE:
+ return getStringValue(tColumnValue.getStringVal());
case BINARY_TYPE:
return getBinaryValue(tColumnValue.getStringVal());
case DATE_TYPE:
Modified: hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java (original)
+++ hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java Sat Sep 21 15:49:49 2013
@@ -101,10 +101,10 @@ public class HiveConnection implements j
openTransport(uri, connParams.getHost(), connParams.getPort(), connParams.getSessionVars());
}
- // add supported protocols: V1 and V2 supported
+ // add supported protocols
supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V1);
-
supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V2);
+ supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V3);
// open client session
openSession(uri);
Modified: hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java (original)
+++ hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java Sat Sep 21 15:49:49 2013
@@ -30,6 +30,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hive.service.cli.TableSchema;
import org.apache.hive.service.cli.thrift.TCLIService;
+import org.apache.hive.service.cli.thrift.TCLIServiceConstants;
import org.apache.hive.service.cli.thrift.TColumnDesc;
import org.apache.hive.service.cli.thrift.TFetchOrientation;
import org.apache.hive.service.cli.thrift.TFetchResultsReq;
@@ -37,9 +38,12 @@ import org.apache.hive.service.cli.thrif
import org.apache.hive.service.cli.thrift.TGetResultSetMetadataReq;
import org.apache.hive.service.cli.thrift.TGetResultSetMetadataResp;
import org.apache.hive.service.cli.thrift.TOperationHandle;
+import org.apache.hive.service.cli.thrift.TPrimitiveTypeEntry;
import org.apache.hive.service.cli.thrift.TRow;
import org.apache.hive.service.cli.thrift.TSessionHandle;
import org.apache.hive.service.cli.thrift.TTableSchema;
+import org.apache.hive.service.cli.thrift.TTypeQualifierValue;
+import org.apache.hive.service.cli.thrift.TTypeQualifiers;
/**
* HiveQueryResultSet.
@@ -51,6 +55,7 @@ public class HiveQueryResultSet extends
private TCLIService.Iface client;
private TOperationHandle stmtHandle;
+ private HiveStatement hiveStatement;
private TSessionHandle sessHandle;
private int maxRows;
private int fetchSize;
@@ -66,6 +71,7 @@ public class HiveQueryResultSet extends
private TCLIService.Iface client = null;
private TOperationHandle stmtHandle = null;
private TSessionHandle sessHandle = null;
+ private HiveStatement hiveStatement = null;
/**
* Sets the limit for the maximum number of rows that any ResultSet object produced by this
@@ -76,6 +82,7 @@ public class HiveQueryResultSet extends
private boolean retrieveSchema = true;
private List<String> colNames;
private List<String> colTypes;
+ private List<JdbcColumnAttributes> colAttributes;
private int fetchSize = 50;
private boolean emptyResultSet = false;
@@ -94,16 +101,34 @@ public class HiveQueryResultSet extends
return this;
}
+ public Builder setHiveStatement(HiveStatement hiveStatement) {
+ this.hiveStatement = hiveStatement;
+ return this;
+ }
+
public Builder setMaxRows(int maxRows) {
this.maxRows = maxRows;
return this;
}
public Builder setSchema(List<String> colNames, List<String> colTypes) {
+ // no column attributes provided - create list of null attributes.
+ List<JdbcColumnAttributes> colAttributes =
+ new ArrayList<JdbcColumnAttributes>();
+ for (int idx = 0; idx < colTypes.size(); ++idx) {
+ colAttributes.add(null);
+ }
+ return setSchema(colNames, colTypes, colAttributes);
+ }
+
+ public Builder setSchema(List<String> colNames, List<String> colTypes,
+ List<JdbcColumnAttributes> colAttributes) {
this.colNames = new ArrayList<String>();
this.colNames.addAll(colNames);
this.colTypes = new ArrayList<String>();
this.colTypes.addAll(colTypes);
+ this.colAttributes = new ArrayList<JdbcColumnAttributes>();
+ this.colAttributes.addAll(colAttributes);
this.retrieveSchema = false;
return this;
}
@@ -128,13 +153,14 @@ public class HiveQueryResultSet extends
this.stmtHandle = builder.stmtHandle;
this.sessHandle = builder.sessHandle;
this.fetchSize = builder.fetchSize;
+ this.hiveStatement = builder.hiveStatement;
columnNames = new ArrayList<String>();
columnTypes = new ArrayList<String>();
+ columnAttributes = new ArrayList<JdbcColumnAttributes>();
if (builder.retrieveSchema) {
retrieveSchema();
} else {
- this.columnNames.addAll(builder.colNames);
- this.columnTypes.addAll(builder.colTypes);
+ this.setSchema(builder.colNames, builder.colTypes, builder.colAttributes);
}
this.emptyResultSet = builder.emptyResultSet;
if (builder.emptyResultSet) {
@@ -145,6 +171,32 @@ public class HiveQueryResultSet extends
}
/**
+ * Generate ColumnAttributes object from a TTypeQualifiers
+ * @param primitiveTypeEntry primitive type
+ * @return generated ColumnAttributes, or null
+ */
+ private static JdbcColumnAttributes getColumnAttributes(
+ TPrimitiveTypeEntry primitiveTypeEntry) {
+ JdbcColumnAttributes ret = null;
+ if (primitiveTypeEntry.isSetTypeQualifiers()) {
+ TTypeQualifiers tq = primitiveTypeEntry.getTypeQualifiers();
+ switch (primitiveTypeEntry.getType()) {
+ case VARCHAR_TYPE:
+ TTypeQualifierValue val =
+ tq.getQualifiers().get(TCLIServiceConstants.CHARACTER_MAXIMUM_LENGTH);
+ if (val != null) {
+ // precision is char length
+ ret = new JdbcColumnAttributes(val.getI32Value(), 0);
+ }
+ break;
+ default:
+ break;
+ }
+ }
+ return ret;
+ }
+
+ /**
* Retrieve schema from the server
*/
private void retrieveSchema() throws SQLException {
@@ -172,9 +224,11 @@ public class HiveQueryResultSet extends
}
String columnName = columns.get(pos).getColumnName();
columnNames.add(columnName);
- String columnTypeName = TYPE_NAMES.get(
- columns.get(pos).getTypeDesc().getTypes().get(0).getPrimitiveEntry().getType());
+ TPrimitiveTypeEntry primitiveTypeEntry =
+ columns.get(pos).getTypeDesc().getTypes().get(0).getPrimitiveEntry();
+ String columnTypeName = TYPE_NAMES.get(primitiveTypeEntry.getType());
columnTypes.add(columnTypeName);
+ columnAttributes.add(getColumnAttributes(primitiveTypeEntry));
}
} catch (SQLException eS) {
throw eS; // rethrow the SQLException as is
@@ -189,16 +243,22 @@ public class HiveQueryResultSet extends
* @param colNames
* @param colTypes
*/
- public void setSchema(List<String> colNames, List<String> colTypes) {
+ private void setSchema(List<String> colNames, List<String> colTypes,
+ List<JdbcColumnAttributes> colAttributes) {
columnNames.addAll(colNames);
columnTypes.addAll(colTypes);
+ columnAttributes.addAll(colAttributes);
}
@Override
public void close() throws SQLException {
+ if (hiveStatement != null) {
+ hiveStatement.closeClientOperation();
+ }
// Need reset during re-open when needed
client = null;
stmtHandle = null;
+ hiveStatement = null;
sessHandle = null;
isClosed = true;
}
Modified: hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java (original)
+++ hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java Sat Sep 21 15:49:49 2013
@@ -20,6 +20,7 @@ package org.apache.hive.jdbc;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
+import java.util.Arrays;
import java.util.List;
import org.apache.hadoop.hive.serde.serdeConstants;
@@ -31,11 +32,14 @@ import org.apache.hadoop.hive.serde.serd
public class HiveResultSetMetaData implements java.sql.ResultSetMetaData {
private final List<String> columnNames;
private final List<String> columnTypes;
+ private final List<JdbcColumnAttributes> columnAttributes;
public HiveResultSetMetaData(List<String> columnNames,
- List<String> columnTypes) {
+ List<String> columnTypes,
+ List<JdbcColumnAttributes> columnAttributes) {
this.columnNames = columnNames;
this.columnTypes = columnTypes;
+ this.columnAttributes = columnAttributes;
}
public String getCatalogName(int column) throws SQLException {
@@ -53,7 +57,7 @@ public class HiveResultSetMetaData imple
public int getColumnDisplaySize(int column) throws SQLException {
int columnType = getColumnType(column);
- return JdbcColumn.columnDisplaySize(columnType);
+ return JdbcColumn.columnDisplaySize(columnType, columnAttributes.get(column - 1));
}
public String getColumnLabel(int column) throws SQLException {
@@ -89,6 +93,8 @@ public class HiveResultSetMetaData imple
String type = columnTypes.get(column - 1);
if ("string".equalsIgnoreCase(type)) {
return serdeConstants.STRING_TYPE_NAME;
+ } else if ("varchar".equalsIgnoreCase(type)) {
+ return serdeConstants.VARCHAR_TYPE_NAME;
} else if ("float".equalsIgnoreCase(type)) {
return serdeConstants.FLOAT_TYPE_NAME;
} else if ("double".equalsIgnoreCase(type)) {
@@ -127,13 +133,13 @@ public class HiveResultSetMetaData imple
public int getPrecision(int column) throws SQLException {
int columnType = getColumnType(column);
- return JdbcColumn.columnPrecision(columnType);
+ return JdbcColumn.columnPrecision(columnType, columnAttributes.get(column - 1));
}
public int getScale(int column) throws SQLException {
int columnType = getColumnType(column);
- return JdbcColumn.columnScale(columnType);
+ return JdbcColumn.columnScale(columnType, columnAttributes.get(column - 1));
}
public String getSchemaName(int column) throws SQLException {
Modified: hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java (original)
+++ hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java Sat Sep 21 15:49:49 2013
@@ -43,7 +43,7 @@ import org.apache.hive.service.cli.thrif
*/
public class HiveStatement implements java.sql.Statement {
private TCLIService.Iface client;
- private TOperationHandle stmtHandle;
+ private TOperationHandle stmtHandle = null;
private final TSessionHandle sessHandle;
Map<String,String> sessConf = new HashMap<String,String>();
private int fetchSize = 50;
@@ -102,6 +102,10 @@ public class HiveStatement implements ja
throw new SQLException("Can't cancel after statement has been closed");
}
+ if (stmtHandle == null) {
+ return;
+ }
+
TCancelOperationReq cancelReq = new TCancelOperationReq();
cancelReq.setOperationHandle(stmtHandle);
try {
@@ -134,7 +138,7 @@ public class HiveStatement implements ja
warningChain = null;
}
- private void closeClientOperation() throws SQLException {
+ void closeClientOperation() throws SQLException {
try {
if (stmtHandle != null) {
TCloseOperationReq closeReq = new TCloseOperationReq();
@@ -149,17 +153,19 @@ public class HiveStatement implements ja
}
stmtHandle = null;
}
+
/*
* (non-Javadoc)
*
* @see java.sql.Statement#close()
*/
-
public void close() throws SQLException {
if (isClosed) {
return;
}
- closeClientOperation();
+ if (stmtHandle != null) {
+ closeClientOperation();
+ }
client = null;
resultSet = null;
isClosed = true;
@@ -182,7 +188,10 @@ public class HiveStatement implements ja
}
try {
- closeClientOperation();
+ if (stmtHandle != null) {
+ closeClientOperation();
+ }
+
TExecuteStatementReq execReq = new TExecuteStatementReq(sessHandle, sql);
execReq.setConfOverlay(sessConf);
TExecuteStatementResp execResp = client.ExecuteStatement(execReq);
@@ -236,7 +245,7 @@ public class HiveStatement implements ja
return false;
}
resultSet = new HiveQueryResultSet.Builder().setClient(client).setSessionHandle(sessHandle)
- .setStmtHandle(stmtHandle).setMaxRows(maxRows).setFetchSize(fetchSize)
+ .setStmtHandle(stmtHandle).setHiveStatement(this).setMaxRows(maxRows).setFetchSize(fetchSize)
.build();
return true;
}
Modified: hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java (original)
+++ hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java Sat Sep 21 15:49:49 2013
@@ -63,22 +63,26 @@ public class JdbcColumn {
return Utils.hiveTypeToSqlType(type);
}
- static int columnDisplaySize(int columnType) throws SQLException {
+ static int columnDisplaySize(int columnType, JdbcColumnAttributes columnAttributes)
+ throws SQLException {
// according to hiveTypeToSqlType possible options are:
switch(columnType) {
case Types.BOOLEAN:
- return columnPrecision(columnType);
+ return columnPrecision(columnType, columnAttributes);
case Types.VARCHAR:
+ if (columnAttributes != null) {
+ return columnAttributes.precision;
+ }
return Integer.MAX_VALUE; // hive has no max limit for strings
case Types.TINYINT:
case Types.SMALLINT:
case Types.INTEGER:
case Types.BIGINT:
- return columnPrecision(columnType) + 1; // allow +/-
+ return columnPrecision(columnType, columnAttributes) + 1; // allow +/-
case Types.DATE:
return 10;
case Types.TIMESTAMP:
- return columnPrecision(columnType);
+ return columnPrecision(columnType, columnAttributes);
// see http://download.oracle.com/javase/6/docs/api/constant-values.html#java.lang.Float.MAX_EXPONENT
case Types.FLOAT:
@@ -87,18 +91,25 @@ public class JdbcColumn {
case Types.DOUBLE:
return 25; // e.g. -(17#).e-####
case Types.DECIMAL:
+ if (columnAttributes != null) {
+ return columnAttributes.precision + 2; // '-' sign and '.'
+ }
return Integer.MAX_VALUE;
default:
throw new SQLException("Invalid column type: " + columnType);
}
}
- static int columnPrecision(int columnType) throws SQLException {
+ static int columnPrecision(int columnType, JdbcColumnAttributes columnAttributes)
+ throws SQLException {
// according to hiveTypeToSqlType possible options are:
switch(columnType) {
case Types.BOOLEAN:
return 1;
case Types.VARCHAR:
+ if (columnAttributes != null) {
+ return columnAttributes.precision;
+ }
return Integer.MAX_VALUE; // hive has no max limit for strings
case Types.TINYINT:
return 3;
@@ -117,13 +128,17 @@ public class JdbcColumn {
case Types.TIMESTAMP:
return 29;
case Types.DECIMAL:
+ if (columnAttributes != null) {
+ return columnAttributes.precision;
+ }
return Integer.MAX_VALUE;
default:
throw new SQLException("Invalid column type: " + columnType);
}
}
- static int columnScale(int columnType) throws SQLException {
+ static int columnScale(int columnType, JdbcColumnAttributes columnAttributes)
+ throws SQLException {
// according to hiveTypeToSqlType possible options are:
switch(columnType) {
case Types.BOOLEAN:
@@ -141,22 +156,15 @@ public class JdbcColumn {
case Types.TIMESTAMP:
return 9;
case Types.DECIMAL:
+ if (columnAttributes != null) {
+ return columnAttributes.scale;
+ }
return Integer.MAX_VALUE;
default:
throw new SQLException("Invalid column type: " + columnType);
}
}
- public Integer getColumnSize() throws SQLException {
- int precision = columnPrecision(Utils.hiveTypeToSqlType(type));
-
- return precision == 0 ? null : precision;
- }
-
- public Integer getDecimalDigits() throws SQLException {
- return columnScale(Utils.hiveTypeToSqlType(type));
- }
-
public Integer getNumPrecRadix() {
if (type.equalsIgnoreCase("tinyint")) {
return 10;
Modified: hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/Utils.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/Utils.java?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/Utils.java (original)
+++ hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/Utils.java Sat Sep 21 15:49:49 2013
@@ -114,6 +114,8 @@ public class Utils {
public static int hiveTypeToSqlType(String type) throws SQLException {
if ("string".equalsIgnoreCase(type)) {
return Types.VARCHAR;
+ } else if ("varchar".equalsIgnoreCase(type)) {
+ return Types.VARCHAR;
} else if ("float".equalsIgnoreCase(type)) {
return Types.FLOAT;
} else if ("double".equalsIgnoreCase(type)) {
Modified: hive/branches/vectorization/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java (original)
+++ hive/branches/vectorization/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java Sat Sep 21 15:49:49 2013
@@ -130,7 +130,7 @@ public class TestJdbcDriver2 extends Tes
stmt.execute("create table " + partitionedTableName
+ " (under_col int, value string) comment '"+partitionedTableComment
- +"' partitioned by (" + partitionedColumnName + " STRING)");
+ +"' partitioned by (" + partitionedColumnName + " STRING)");
// load data
stmt.execute("load data local inpath '"
@@ -145,7 +145,7 @@ public class TestJdbcDriver2 extends Tes
fail(ex.toString());
}
- stmt.execute("create table " + dataTypeTableName
+ stmt.execute("create table " + dataTypeTableName
+ " (c1 int, c2 boolean, c3 double, c4 string,"
+ " c5 array<int>, c6 map<int,string>, c7 map<string,string>,"
+ " c8 struct<r:string,s:int,t:double>,"
@@ -157,8 +157,10 @@ public class TestJdbcDriver2 extends Tes
+ " c17 timestamp, "
+ " c18 decimal, "
+ " c19 binary, "
- + " c20 date) comment'" + dataTypeTableComment
- +"' partitioned by (dt STRING)");
+ + " c20 date,"
+ + " c21 varchar(20)"
+ + ") comment'" + dataTypeTableComment
+ +"' partitioned by (dt STRING)");
stmt.execute("load data local inpath '"
+ dataTypeDataFilePath.toString() + "' into table " + dataTypeTableName
@@ -173,7 +175,7 @@ public class TestJdbcDriver2 extends Tes
// create view
stmt.execute("create view " + viewName + " comment '"+viewComment
- +"' as select * from "+ tableName);
+ +"' as select * from "+ tableName);
}
@Override
@@ -205,7 +207,7 @@ public class TestJdbcDriver2 extends Tes
public void testBadURL() throws Exception {
checkBadUrl("jdbc:hive2://localhost:10000;principal=test");
checkBadUrl("jdbc:hive2://localhost:10000;" +
- "principal=hive/HiveServer2Host@YOUR-REALM.COM");
+ "principal=hive/HiveServer2Host@YOUR-REALM.COM");
checkBadUrl("jdbc:hive2://localhost:10000test");
}
@@ -270,7 +272,7 @@ public class TestJdbcDriver2 extends Tes
ResultSet res = stmt.executeQuery(
"explain select c1, c2, c3, c4, c5 as a, c6, c7, c8, c9, c10, c11, c12, " +
- "c1*2, sentences(null, null, null) as b from " + dataTypeTableName + " limit 1");
+ "c1*2, sentences(null, null, null) as b from " + dataTypeTableName + " limit 1");
ResultSetMetaData md = res.getMetaData();
assertEquals(md.getColumnCount(), 1); // only one result column
@@ -288,7 +290,7 @@ public class TestJdbcDriver2 extends Tes
+ " and date '2012-01-01' = date ?"
+ " ) t select '2011-03-25' ddate,'China',true bv, 10 num limit 10";
- ///////////////////////////////////////////////
+ ///////////////////////////////////////////////
//////////////////// correct testcase
//////////////////////////////////////////////
try {
@@ -340,7 +342,7 @@ public class TestJdbcDriver2 extends Tes
fail(e.toString());
}
- ///////////////////////////////////////////////
+ ///////////////////////////////////////////////
//////////////////// other failure testcases
//////////////////////////////////////////////
// set nothing for prepared sql
@@ -506,15 +508,13 @@ public class TestJdbcDriver2 extends Tes
public void testNullResultSet() throws Exception {
List<String> setupQueries = new ArrayList<String>();
String testQuery;
- boolean hasResultSet;
Statement stmt = con.createStatement();
// -select- should return a ResultSet
try {
stmt.executeQuery("select * from " + tableName);
System.out.println("select: success");
- }
- catch(SQLException e) {
+ } catch(SQLException e) {
failWithExceptionMsg(e);
}
@@ -540,6 +540,39 @@ public class TestJdbcDriver2 extends Tes
stmt.close();
}
+ public void testCloseResultSet() throws Exception {
+ Statement stmt = con.createStatement();
+
+ // execute query, ignore exception if any
+ ResultSet res = stmt.executeQuery("select * from " + tableName);
+ // close ResultSet, ignore exception if any
+ res.close();
+ // A statement should be open even after ResultSet#close
+ assertFalse(stmt.isClosed());
+ // A Statement#cancel after ResultSet#close should be a no-op
+ try {
+ stmt.cancel();
+ } catch(SQLException e) {
+ failWithExceptionMsg(e);
+ }
+ stmt.close();
+
+ stmt = con.createStatement();
+ // execute query, ignore exception if any
+ res = stmt.executeQuery("select * from " + tableName);
+ // close ResultSet, ignore exception if any
+ res.close();
+ // A Statement#execute after ResultSet#close should be fine too
+ try {
+ stmt.executeQuery("select * from " + tableName);
+ } catch(SQLException e) {
+ failWithExceptionMsg(e);
+ }
+ // A Statement#close after ResultSet#close should close the statement
+ stmt.close();
+ assertTrue(stmt.isClosed());
+ }
+
public void testDataTypes() throws Exception {
Statement stmt = con.createStatement();
@@ -575,6 +608,7 @@ public class TestJdbcDriver2 extends Tes
assertEquals(null, res.getString(19));
assertEquals(null, res.getString(20));
assertEquals(null, res.getDate(20));
+ assertEquals(null, res.getString(21));
// row 2
assertTrue(res.next());
@@ -600,6 +634,7 @@ public class TestJdbcDriver2 extends Tes
assertEquals(null, res.getString(19));
assertEquals(null, res.getString(20));
assertEquals(null, res.getDate(20));
+ assertEquals(null, res.getString(21));
// row 3
assertTrue(res.next());
@@ -625,6 +660,7 @@ public class TestJdbcDriver2 extends Tes
assertEquals("abcd", res.getString(19));
assertEquals("2013-01-01", res.getString(20));
assertEquals("2013-01-01", res.getDate(20).toString());
+ assertEquals("abc123", res.getString(21));
// test getBoolean rules on non-boolean columns
assertEquals(true, res.getBoolean(1));
@@ -673,7 +709,7 @@ public class TestJdbcDriver2 extends Tes
ResultSetMetaData meta = res.getMetaData();
int expectedColCount = isPartitionTable ? 3 : 2;
assertEquals(
- "Unexpected column count", expectedColCount, meta.getColumnCount());
+ "Unexpected column count", expectedColCount, meta.getColumnCount());
boolean moreRow = res.next();
while (moreRow) {
@@ -740,7 +776,7 @@ public class TestJdbcDriver2 extends Tes
doTestErrorCase("SELECT invalid_column FROM " + tableName,
"Invalid table alias or column reference", invalidSyntaxSQLState, 10004);
doTestErrorCase("SELECT invalid_function(under_col) FROM " + tableName,
- "Invalid function", invalidSyntaxSQLState, 10011);
+ "Invalid function", invalidSyntaxSQLState, 10011);
// TODO: execute errors like this currently don't return good error
// codes and messages. This should be fixed.
@@ -817,8 +853,8 @@ public class TestJdbcDriver2 extends Tes
private void getTablesTest(String tableTypeName, String viewTypeName) throws SQLException {
Map<String, Object[]> tests = new HashMap<String, Object[]>();
tests.put("test%jdbc%", new Object[]{"testhivejdbcdriver_table"
- , "testhivejdbcdriverpartitionedtable"
- , "testhivejdbcdriverview"});
+ , "testhivejdbcdriverpartitionedtable"
+ , "testhivejdbcdriverview"});
tests.put("%jdbcdriver\\_table", new Object[]{"testhivejdbcdriver_table"});
tests.put("testhivejdbcdriver\\_table", new Object[]{"testhivejdbcdriver_table"});
tests.put("test_ivejdbcdri_er\\_table", new Object[]{"testhivejdbcdriver_table"});
@@ -826,8 +862,8 @@ public class TestJdbcDriver2 extends Tes
tests.put("test_ivejdbcdri_er%table", new Object[]{
"testhivejdbcdriver_table", "testhivejdbcdriverpartitionedtable" });
tests.put("%jdbc%", new Object[]{ "testhivejdbcdriver_table"
- , "testhivejdbcdriverpartitionedtable"
- , "testhivejdbcdriverview"});
+ , "testhivejdbcdriverpartitionedtable"
+ , "testhivejdbcdriverview"});
tests.put("", new Object[]{});
for (String checkPattern: tests.keySet()) {
@@ -861,7 +897,7 @@ public class TestJdbcDriver2 extends Tes
// only ask for the views.
ResultSet rs = (ResultSet)con.getMetaData().getTables("default", null, null
- , new String[]{viewTypeName});
+ , new String[]{viewTypeName});
int cnt=0;
while (rs.next()) {
cnt++;
@@ -888,7 +924,7 @@ public class TestJdbcDriver2 extends Tes
assertTrue(rs.next());
assertEquals("default", rs.getString(1));
-// assertNull(rs.getString(2));
+ // assertNull(rs.getString(2));
assertFalse(rs.next());
rs.close();
@@ -949,7 +985,7 @@ public class TestJdbcDriver2 extends Tes
tests.put(new String[]{"testhiveJDBC%", null}, 7);
tests.put(new String[]{"%jdbcdriver\\_table", null}, 2);
tests.put(new String[]{"%jdbcdriver\\_table%", "under\\_col"}, 1);
-// tests.put(new String[]{"%jdbcdriver\\_table%", "under\\_COL"}, 1);
+ // tests.put(new String[]{"%jdbcdriver\\_table%", "under\\_COL"}, 1);
tests.put(new String[]{"%jdbcdriver\\_table%", "under\\_co_"}, 1);
tests.put(new String[]{"%jdbcdriver\\_table%", "under_col"}, 1);
tests.put(new String[]{"%jdbcdriver\\_table%", "und%"}, 1);
@@ -969,16 +1005,16 @@ public class TestJdbcDriver2 extends Tes
String columnname = rs.getString("COLUMN_NAME");
int ordinalPos = rs.getInt("ORDINAL_POSITION");
switch(cnt) {
- case 0:
- assertEquals("Wrong column name found", "under_col", columnname);
- assertEquals("Wrong ordinal position found", ordinalPos, 1);
- break;
- case 1:
- assertEquals("Wrong column name found", "value", columnname);
- assertEquals("Wrong ordinal position found", ordinalPos, 2);
- break;
- default:
- break;
+ case 0:
+ assertEquals("Wrong column name found", "under_col", columnname);
+ assertEquals("Wrong ordinal position found", ordinalPos, 1);
+ break;
+ case 1:
+ assertEquals("Wrong column name found", "value", columnname);
+ assertEquals("Wrong ordinal position found", ordinalPos, 2);
+ break;
+ default:
+ break;
}
cnt++;
}
@@ -992,7 +1028,7 @@ public class TestJdbcDriver2 extends Tes
*/
public void testMetaDataGetColumnsMetaData() throws SQLException {
ResultSet rs = (ResultSet)con.getMetaData().getColumns(null, null
- , "testhivejdbcdriver\\_table", null);
+ , "testhivejdbcdriver\\_table", null);
ResultSetMetaData rsmd = rs.getMetaData();
@@ -1045,7 +1081,7 @@ public class TestJdbcDriver2 extends Tes
}
}
}
- */
+ */
public void testDescribeTable() throws SQLException {
Statement stmt = con.createStatement();
@@ -1088,14 +1124,14 @@ public class TestJdbcDriver2 extends Tes
ResultSet res = stmt.executeQuery(
"select c1, c2, c3, c4, c5 as a, c6, c7, c8, c9, c10, c11, c12, " +
- "c1*2, sentences(null, null, null) as b, c17, c18, c20 from " + dataTypeTableName +
+ "c1*2, sentences(null, null, null) as b, c17, c18, c20, c21 from " + dataTypeTableName +
" limit 1");
ResultSetMetaData meta = res.getMetaData();
ResultSet colRS = con.getMetaData().getColumns(null, null,
dataTypeTableName.toLowerCase(), null);
- assertEquals(17, meta.getColumnCount());
+ assertEquals(18, meta.getColumnCount());
assertTrue(colRS.next());
@@ -1303,6 +1339,14 @@ public class TestJdbcDriver2 extends Tes
assertEquals(10, meta.getPrecision(17));
assertEquals(0, meta.getScale(17));
+ assertEquals("c21", meta.getColumnName(18));
+ assertEquals(Types.VARCHAR, meta.getColumnType(18));
+ assertEquals("varchar", meta.getColumnTypeName(18));
+ // varchar columns should have correct display size/precision
+ assertEquals(20, meta.getColumnDisplaySize(18));
+ assertEquals(20, meta.getPrecision(18));
+ assertEquals(0, meta.getScale(18));
+
for (int i = 1; i <= meta.getColumnCount(); i++) {
assertFalse(meta.isAutoIncrement(i));
assertFalse(meta.isCurrency(i));
@@ -1312,10 +1356,10 @@ public class TestJdbcDriver2 extends Tes
// [url] [host] [port] [db]
private static final String[][] URL_PROPERTIES = new String[][] {
- {"jdbc:hive2://", "", "", "default"},
- {"jdbc:hive2://localhost:10001/default", "localhost", "10001", "default"},
- {"jdbc:hive2://localhost/notdefault", "localhost", "10000", "notdefault"},
- {"jdbc:hive2://foo:1243", "foo", "1243", "default"}};
+ {"jdbc:hive2://", "", "", "default"},
+ {"jdbc:hive2://localhost:10001/default", "localhost", "10001", "default"},
+ {"jdbc:hive2://localhost/notdefault", "localhost", "10000", "notdefault"},
+ {"jdbc:hive2://foo:1243", "foo", "1243", "default"}};
public void testDriverProperties() throws SQLException {
HiveDriver driver = new HiveDriver();
Modified: hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (original)
+++ hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Sat Sep 21 15:49:49 2013
@@ -195,6 +195,8 @@ public final class Utilities {
public static String HADOOP_LOCAL_FS = "file:///";
public static String MAP_PLAN_NAME = "map.xml";
public static String REDUCE_PLAN_NAME = "reduce.xml";
+ public static final String MAPRED_MAPPER_CLASS = "mapred.mapper.class";
+ public static final String MAPRED_REDUCER_CLASS = "mapred.reducer.class";
/**
* ReduceField:
@@ -269,9 +271,18 @@ public final class Utilities {
return (ReduceWork) getBaseWork(conf, REDUCE_PLAN_NAME);
}
+ /**
+ * Returns the Map or Reduce plan
+ * Side effect: the BaseWork returned is also placed in the gWorkMap
+ * @param conf
+ * @param name
+ * @return BaseWork based on the name supplied will return null if name is null
+ * @throws RuntimeException if the configuration files are not proper or if plan can not be loaded
+ */
private static BaseWork getBaseWork(Configuration conf, String name) {
BaseWork gWork = null;
Path path = null;
+ InputStream in = null;
try {
path = getPlanPath(conf, name);
assert path != null;
@@ -283,24 +294,26 @@ public final class Utilities {
} else {
localPath = new Path(name);
}
- InputStream in = new FileInputStream(localPath.toUri().getPath());
+ in = new FileInputStream(localPath.toUri().getPath());
if(MAP_PLAN_NAME.equals(name)){
- if (ExecMapper.class.getName().equals(conf.get("mapred.mapper.class"))){
+ if (ExecMapper.class.getName().equals(conf.get(MAPRED_MAPPER_CLASS))){
gWork = deserializePlan(in, MapWork.class, conf);
- } else if(RCFileMergeMapper.class.getName().equals(conf.get("mapred.mapper.class"))) {
+ } else if(RCFileMergeMapper.class.getName().equals(conf.get(MAPRED_MAPPER_CLASS))) {
gWork = deserializePlan(in, MergeWork.class, conf);
- } else if(ColumnTruncateMapper.class.getName().equals(conf.get("mapred.mapper.class"))) {
+ } else if(ColumnTruncateMapper.class.getName().equals(conf.get(MAPRED_MAPPER_CLASS))) {
gWork = deserializePlan(in, ColumnTruncateWork.class, conf);
- } else if(PartialScanMapper.class.getName().equals(conf.get("mapred.mapper.class"))) {
+ } else if(PartialScanMapper.class.getName().equals(conf.get(MAPRED_MAPPER_CLASS))) {
gWork = deserializePlan(in, PartialScanWork.class,conf);
} else {
- assert false;
+ throw new RuntimeException("unable to determine work from configuration ."
+ + MAPRED_MAPPER_CLASS + " was "+ conf.get(MAPRED_MAPPER_CLASS)) ;
}
- } else {
- if(ExecReducer.class.getName().equals(conf.get("mapred.reducer.class"))) {
+ } else if (REDUCE_PLAN_NAME.equals(name)) {
+ if(ExecReducer.class.getName().equals(conf.get(MAPRED_REDUCER_CLASS))) {
gWork = deserializePlan(in, ReduceWork.class, conf);
} else {
- assert false;
+ throw new RuntimeException("unable to determine work from configuration ."
+ + MAPRED_REDUCER_CLASS +" was "+ conf.get(MAPRED_REDUCER_CLASS)) ;
}
}
gWorkMap.put(path, gWork);
@@ -311,9 +324,14 @@ public final class Utilities {
LOG.debug("No plan file found: "+path);
return null;
} catch (Exception e) {
- e.printStackTrace();
LOG.error("Failed to load plan: "+path, e);
throw new RuntimeException(e);
+ } finally {
+ if (in != null) {
+ try {
+ in.close();
+ } catch (IOException cantBlameMeForTrying) { }
+ }
}
}
Modified: hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Sat Sep 21 15:49:49 2013
@@ -38,7 +38,6 @@ import java.util.Set;
import org.antlr.runtime.tree.CommonTree;
import org.antlr.runtime.tree.Tree;
-import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
@@ -130,7 +129,6 @@ import org.apache.hadoop.hive.serde.serd
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
@@ -2585,41 +2583,37 @@ public class DDLSemanticAnalyzer extends
private void analyzeAlterTableAddParts(CommonTree ast, boolean expectView)
throws SemanticException {
+ // ^(TOK_ALTERTABLE_ADDPARTS identifier ifNotExists? alterStatementSuffixAddPartitionsElement+)
String tblName = getUnescapedName((ASTNode)ast.getChild(0));
+ boolean ifNotExists = ast.getChild(1).getType() == HiveParser.TOK_IFNOTEXISTS;
+
Table tab = getTable(tblName, true);
boolean isView = tab.isView();
validateAlterTableType(tab, AlterTableTypes.ADDPARTITION, expectView);
inputs.add(new ReadEntity(tab));
- // partition name to value
- List<Map<String, String>> partSpecs = getPartitionSpecs(ast);
- addTablePartsOutputs(tblName, partSpecs);
+ List<AddPartitionDesc> partitionDescs = new ArrayList<AddPartitionDesc>();
- Iterator<Map<String, String>> partIter = partSpecs.iterator();
+ int numCh = ast.getChildCount();
+ int start = ifNotExists ? 2 : 1;
String currentLocation = null;
Map<String, String> currentPart = null;
- boolean ifNotExists = false;
- List<AddPartitionDesc> partitionDescs = new ArrayList<AddPartitionDesc>();
-
- int numCh = ast.getChildCount();
- for (int num = 1; num < numCh; num++) {
- CommonTree child = (CommonTree) ast.getChild(num);
+ for (int num = start; num < numCh; num++) {
+ ASTNode child = (ASTNode) ast.getChild(num);
switch (child.getToken().getType()) {
- case HiveParser.TOK_IFNOTEXISTS:
- ifNotExists = true;
- break;
case HiveParser.TOK_PARTSPEC:
if (currentPart != null) {
- validatePartitionValues(currentPart);
- AddPartitionDesc addPartitionDesc = new AddPartitionDesc(
- SessionState.get().getCurrentDatabase(), tblName, currentPart,
+ Partition partition = getPartitionForOutput(tab, currentPart);
+ if (partition == null || !ifNotExists) {
+ AddPartitionDesc addPartitionDesc = new AddPartitionDesc(
+ tab.getDbName(), tblName, currentPart,
currentLocation, ifNotExists, expectView);
- partitionDescs.add(addPartitionDesc);
+ partitionDescs.add(addPartitionDesc);
+ }
+ currentLocation = null;
}
- // create new partition, set values
- currentLocation = null;
- currentPart = partIter.next();
+ currentPart = getPartSpec(child);
break;
case HiveParser.TOK_PARTITIONLOCATION:
// if location specified, set in partition
@@ -2632,11 +2626,18 @@ public class DDLSemanticAnalyzer extends
// add the last one
if (currentPart != null) {
- validatePartitionValues(currentPart);
- AddPartitionDesc addPartitionDesc = new AddPartitionDesc(
- SessionState.get().getCurrentDatabase(), tblName, currentPart,
+ Partition partition = getPartitionForOutput(tab, currentPart);
+ if (partition == null || !ifNotExists) {
+ AddPartitionDesc addPartitionDesc = new AddPartitionDesc(
+ tab.getDbName(), tblName, currentPart,
currentLocation, ifNotExists, expectView);
- partitionDescs.add(addPartitionDesc);
+ partitionDescs.add(addPartitionDesc);
+ }
+ }
+
+ if (partitionDescs.isEmpty()) {
+ // nothing to do
+ return;
}
for (AddPartitionDesc addPartitionDesc : partitionDescs) {
@@ -2696,6 +2697,21 @@ public class DDLSemanticAnalyzer extends
}
}
+ private Partition getPartitionForOutput(Table tab, Map<String, String> currentPart)
+ throws SemanticException {
+ validatePartitionValues(currentPart);
+ try {
+ Partition partition = db.getPartition(tab, currentPart, false);
+ if (partition != null) {
+ outputs.add(new WriteEntity(partition));
+ }
+ return partition;
+ } catch (HiveException e) {
+ LOG.warn("wrong partition spec " + currentPart);
+ }
+ return null;
+ }
+
/**
* Rewrite the metadata for one or more partitions in a table. Useful when
* an external process modifies files on HDFS and you want the pre/post
Modified: hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (original)
+++ hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Sat Sep 21 15:49:49 2013
@@ -952,8 +952,12 @@ alterStatementChangeColPosition
alterStatementSuffixAddPartitions
@init { msgs.push("add partition statement"); }
@after { msgs.pop(); }
- : identifier KW_ADD ifNotExists? partitionSpec partitionLocation? (partitionSpec partitionLocation?)*
- -> ^(TOK_ALTERTABLE_ADDPARTS identifier ifNotExists? (partitionSpec partitionLocation?)+)
+ : identifier KW_ADD ifNotExists? alterStatementSuffixAddPartitionsElement+
+ -> ^(TOK_ALTERTABLE_ADDPARTS identifier ifNotExists? alterStatementSuffixAddPartitionsElement+)
+ ;
+
+alterStatementSuffixAddPartitionsElement
+ : partitionSpec partitionLocation?
;
alterStatementSuffixTouch
Modified: hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java (original)
+++ hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java Sat Sep 21 15:49:49 2013
@@ -131,6 +131,7 @@ public class AddPartitionDesc extends DD
/**
* @return location of partition in relation to table
*/
+ @Explain(displayName = "Location")
public String getLocation() {
return location;
}
@@ -150,6 +151,11 @@ public class AddPartitionDesc extends DD
return partSpec;
}
+ @Explain(displayName = "Spec")
+ public String getPartSpecString() {
+ return partSpec.toString();
+ }
+
/**
* @param partSpec
* partition specification
Modified: hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (original)
+++ hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java Sat Sep 21 15:49:49 2013
@@ -802,6 +802,7 @@ public class DDLWork implements Serializ
/**
* @return information about the partitions we want to add.
*/
+ @Explain(displayName = "Add Partition Operator")
public AddPartitionDesc getAddPartitionDesc() {
return addPartitionDesc;
}
Modified: hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Sat Sep 21 15:49:49 2013
@@ -897,6 +897,15 @@ public class QTestUtil {
}
}
+ private final Pattern[] xmlPlanMask = toPattern(new String[] {
+ "<java version=\".*\" class=\"java.beans.XMLDecoder\">",
+ "<string>.*/tmp/.*</string>",
+ "<string>file:.*</string>",
+ "<string>pfile:.*</string>",
+ "<string>[0-9]{10}</string>",
+ "<string>/.*/warehouse/.*</string>"
+ });
+
public int checkPlan(String tname, List<Task<? extends Serializable>> tasks) throws Exception {
if (tasks == null) {
@@ -916,17 +925,8 @@ public class QTestUtil {
Utilities.serializePlan(plan, ofs, conf);
}
- String[] patterns = new String[] {
- "<java version=\".*\" class=\"java.beans.XMLDecoder\">",
- "<string>.*/tmp/.*</string>",
- "<string>file:.*</string>",
- "<string>pfile:.*</string>",
- "<string>[0-9]{10}</string>",
- "<string>/.*/warehouse/.*</string>"
- };
-
fixXml4JDK7(outf.getPath());
- maskPatterns(patterns, outf.getPath());
+ maskPatterns(xmlPlanMask, outf.getPath());
int exitVal = executeDiffCommand(outf.getPath(), planFile, true, false);
@@ -1065,13 +1065,21 @@ public class QTestUtil {
* Get the value of the element in input. (Note: the returned value has no quotes.)
*/
private static String getElementValue(String line, String name) {
- assert(line.indexOf("<" + name + ">") != -1);
+ assert(line.contains("<" + name + ">"));
int start = line.indexOf("<" + name + ">") + name.length() + 2;
int end = line.indexOf("</" + name + ">");
return line.substring(start, end);
}
- private void maskPatterns(String[] patterns, String fname) throws Exception {
+ private Pattern[] toPattern(String[] patternStrs) {
+ Pattern[] patterns = new Pattern[patternStrs.length];
+ for (int i = 0; i < patternStrs.length; i++) {
+ patterns[i] = Pattern.compile(patternStrs[i]);
+ }
+ return patterns;
+ }
+
+ private void maskPatterns(Pattern[] patterns, String fname) throws Exception {
String maskPattern = "#### A masked pattern was here ####";
String line;
@@ -1092,8 +1100,8 @@ public class QTestUtil {
boolean lastWasMasked = false;
while (null != (line = in.readLine())) {
- for (String pattern : patterns) {
- line = line.replaceAll(pattern, maskPattern);
+ for (Pattern pattern : patterns) {
+ line = pattern.matcher(line).replaceAll(maskPattern);
}
if (line.equals(maskPattern)) {
@@ -1114,47 +1122,46 @@ public class QTestUtil {
out.close();
}
+ private final Pattern[] planMask = toPattern(new String[] {
+ ".*file:.*",
+ ".*pfile:.*",
+ ".*hdfs:.*",
+ ".*/tmp/.*",
+ ".*invalidscheme:.*",
+ ".*lastUpdateTime.*",
+ ".*lastAccessTime.*",
+ ".*lastModifiedTime.*",
+ ".*[Oo]wner.*",
+ ".*CreateTime.*",
+ ".*LastAccessTime.*",
+ ".*Location.*",
+ ".*LOCATION '.*",
+ ".*transient_lastDdlTime.*",
+ ".*last_modified_.*",
+ ".*at org.*",
+ ".*at sun.*",
+ ".*at java.*",
+ ".*at junit.*",
+ ".*Caused by:.*",
+ ".*LOCK_QUERYID:.*",
+ ".*LOCK_TIME:.*",
+ ".*grantTime.*",
+ ".*[.][.][.] [0-9]* more.*",
+ ".*job_[0-9_]*.*",
+ ".*job_local[0-9_]*.*",
+ ".*USING 'java -cp.*",
+ "^Deleted.*",
+ });
+
public int checkCliDriverResults(String tname) throws Exception {
String[] cmdArray;
- String[] patterns;
assert(qMap.containsKey(tname));
String outFileName = outPath(outDir, tname + ".out");
- patterns = new String[] {
- ".*file:.*",
- ".*pfile:.*",
- ".*hdfs:.*",
- ".*/tmp/.*",
- ".*invalidscheme:.*",
- ".*lastUpdateTime.*",
- ".*lastAccessTime.*",
- ".*lastModifiedTime.*",
- ".*[Oo]wner.*",
- ".*CreateTime.*",
- ".*LastAccessTime.*",
- ".*Location.*",
- ".*LOCATION '.*",
- ".*transient_lastDdlTime.*",
- ".*last_modified_.*",
- ".*at org.*",
- ".*at sun.*",
- ".*at java.*",
- ".*at junit.*",
- ".*Caused by:.*",
- ".*LOCK_QUERYID:.*",
- ".*LOCK_TIME:.*",
- ".*grantTime.*",
- ".*[.][.][.] [0-9]* more.*",
- ".*job_[0-9_]*.*",
- ".*job_local[0-9_]*.*",
- ".*USING 'java -cp.*",
- "^Deleted.*",
- };
-
File f = new File(logDir, tname + ".out");
- maskPatterns(patterns, f.getPath());
+ maskPatterns(planMask, f.getPath());
int exitVal = executeDiffCommand(f.getPath(),
outFileName, false,
qSortSet.contains(tname));
Modified: hive/branches/vectorization/ql/src/test/results/clientpositive/create_view_partitioned.q.out
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/results/clientpositive/create_view_partitioned.q.out?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/results/clientpositive/create_view_partitioned.q.out (original)
+++ hive/branches/vectorization/ql/src/test/results/clientpositive/create_view_partitioned.q.out Sat Sep 21 15:49:49 2013
@@ -133,14 +133,12 @@ PREHOOK: query: -- should work since we
ALTER VIEW vp1
ADD IF NOT EXISTS PARTITION (value='val_xyz')
PREHOOK: type: ALTERTABLE_ADDPARTS
-PREHOOK: Input: default@src
PREHOOK: Input: default@vp1
PREHOOK: Output: default@vp1@value=val_xyz
POSTHOOK: query: -- should work since we use IF NOT EXISTS
ALTER VIEW vp1
ADD IF NOT EXISTS PARTITION (value='val_xyz')
POSTHOOK: type: ALTERTABLE_ADDPARTS
-POSTHOOK: Input: default@src
POSTHOOK: Input: default@vp1
POSTHOOK: Output: default@vp1@value=val_xyz
PREHOOK: query: SHOW PARTITIONS vp1
Modified: hive/branches/vectorization/service/if/TCLIService.thrift
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/service/if/TCLIService.thrift?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/service/if/TCLIService.thrift (original)
+++ hive/branches/vectorization/service/if/TCLIService.thrift Sat Sep 21 15:49:49 2013
@@ -42,6 +42,9 @@ enum TProtocolVersion {
// V2 adds support for asynchronous execution
HIVE_CLI_SERVICE_PROTOCOL_V2
+
+ // V3 add varchar type, primitive type qualifiers
+ HIVE_CLI_SERVICE_PROTOCOL_V3
}
enum TTypeId {
@@ -62,7 +65,8 @@ enum TTypeId {
USER_DEFINED_TYPE,
DECIMAL_TYPE,
NULL_TYPE,
- DATE_TYPE
+ DATE_TYPE,
+ VARCHAR_TYPE
}
const set<TTypeId> PRIMITIVE_TYPES = [
@@ -79,6 +83,7 @@ const set<TTypeId> PRIMITIVE_TYPES = [
TTypeId.DECIMAL_TYPE,
TTypeId.NULL_TYPE
TTypeId.DATE_TYPE
+ TTypeId.VARCHAR_TYPE
]
const set<TTypeId> COMPLEX_TYPES = [
@@ -112,6 +117,7 @@ const map<TTypeId,string> TYPE_NAMES = {
TTypeId.DECIMAL_TYPE: "DECIMAL",
TTypeId.NULL_TYPE: "NULL"
TTypeId.DATE_TYPE: "DATE"
+ TTypeId.VARCHAR_TYPE: "VARCHAR"
}
// Thrift does not support recursively defined types or forward declarations,
@@ -159,11 +165,25 @@ const map<TTypeId,string> TYPE_NAMES = {
typedef i32 TTypeEntryPtr
+// Valid TTypeQualifiers key names
+const string CHARACTER_MAXIMUM_LENGTH = "characterMaximumLength"
+
+union TTypeQualifierValue {
+ 1: optional i32 i32Value
+ 2: optional string stringValue
+}
+
+// Type qualifiers for primitive type.
+struct TTypeQualifiers {
+ 1: required map <string, TTypeQualifierValue> qualifiers
+}
+
// Type entry for a primitive type.
struct TPrimitiveTypeEntry {
// The primitive type token. This must satisfy the condition
// that type is in the PRIMITIVE_TYPES set.
1: required TTypeId type
+ 2: optional TTypeQualifiers typeQualifiers
}
// Type entry for an ARRAY type.
@@ -458,7 +478,7 @@ struct TOperationHandle {
// which operations may be executed.
struct TOpenSessionReq {
// The version of the HiveServer2 protocol that the client is using.
- 1: required TProtocolVersion client_protocol = TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V2
+ 1: required TProtocolVersion client_protocol = TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V3
// Username and password for authentication.
// Depending on the authentication scheme being used,
@@ -477,7 +497,7 @@ struct TOpenSessionResp {
1: required TStatus status
// The protocol version that the server is using.
- 2: required TProtocolVersion serverProtocolVersion = TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V2
+ 2: required TProtocolVersion serverProtocolVersion = TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V3
// Session Handle
3: optional TSessionHandle sessionHandle
Modified: hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_constants.cpp
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_constants.cpp?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_constants.cpp (original)
+++ hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_constants.cpp Sat Sep 21 15:49:49 2013
@@ -24,6 +24,7 @@ TCLIServiceConstants::TCLIServiceConstan
PRIMITIVE_TYPES.insert((TTypeId::type)15);
PRIMITIVE_TYPES.insert((TTypeId::type)16);
PRIMITIVE_TYPES.insert((TTypeId::type)17);
+ PRIMITIVE_TYPES.insert((TTypeId::type)18);
COMPLEX_TYPES.insert((TTypeId::type)10);
COMPLEX_TYPES.insert((TTypeId::type)11);
@@ -51,6 +52,9 @@ TCLIServiceConstants::TCLIServiceConstan
TYPE_NAMES.insert(std::make_pair((TTypeId::type)15, "DECIMAL"));
TYPE_NAMES.insert(std::make_pair((TTypeId::type)16, "NULL"));
TYPE_NAMES.insert(std::make_pair((TTypeId::type)17, "DATE"));
+ TYPE_NAMES.insert(std::make_pair((TTypeId::type)18, "VARCHAR"));
+
+ CHARACTER_MAXIMUM_LENGTH = "characterMaximumLength";
}
Modified: hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_constants.h
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_constants.h?rev=1525254&r1=1525253&r2=1525254&view=diff
==============================================================================
--- hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_constants.h (original)
+++ hive/branches/vectorization/service/src/gen/thrift/gen-cpp/TCLIService_constants.h Sat Sep 21 15:49:49 2013
@@ -19,6 +19,7 @@ class TCLIServiceConstants {
std::set<TTypeId::type> COMPLEX_TYPES;
std::set<TTypeId::type> COLLECTION_TYPES;
std::map<TTypeId::type, std::string> TYPE_NAMES;
+ std::string CHARACTER_MAXIMUM_LENGTH;
};
extern const TCLIServiceConstants g_TCLIService_constants;