You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by we...@apache.org on 2017/05/17 02:53:14 UTC
[36/50] [abbrv] hive git commit: HIVE-16607:
ColumnStatsAutoGatherContext regenerates HiveConf.HIVEQUERYID (Peter Vary,
reviewed by Aihua Xu)
HIVE-16607: ColumnStatsAutoGatherContext regenerates HiveConf.HIVEQUERYID (Peter Vary, reviewed by Aihua Xu)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/86f74fdd
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/86f74fdd
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/86f74fdd
Branch: refs/heads/hive-14535
Commit: 86f74fdd26b83cee128d24055ee369e9b7f36201
Parents: 455ffdd
Author: Aihua Xu <ai...@apache.org>
Authored: Mon May 15 11:12:04 2017 -0400
Committer: Aihua Xu <ai...@apache.org>
Committed: Mon May 15 11:36:22 2017 -0400
----------------------------------------------------------------------
.../mapreduce/TestHCatMultiOutputFormat.java | 2 +-
.../test/resources/testconfiguration.properties | 1 +
.../org/apache/hadoop/hive/ql/QTestUtil.java | 4 +-
.../java/org/apache/hive/beeline/QFile.java | 20 +-
.../apache/hive/beeline/QFileBeeLineClient.java | 8 +-
.../java/org/apache/hadoop/hive/ql/Driver.java | 28 +-
.../org/apache/hadoop/hive/ql/QueryState.java | 146 ++++++---
.../org/apache/hadoop/hive/ql/exec/DDLTask.java | 2 +-
.../ql/io/rcfile/stats/PartialScanTask.java | 3 +-
.../metadata/HiveMaterializedViewsRegistry.java | 5 +-
.../ql/parse/ColumnStatsAutoGatherContext.java | 9 +-
.../hadoop/hive/ql/exec/TestExecDriver.java | 3 +-
.../ql/parse/TestMacroSemanticAnalyzer.java | 2 +-
.../hadoop/hive/ql/parse/TestQBCompact.java | 2 +-
.../ql/parse/TestQBJoinTreeApplyPredicate.java | 3 +-
.../hadoop/hive/ql/parse/TestQBSubQuery.java | 3 +-
.../parse/TestReplicationSemanticAnalyzer.java | 3 +-
.../ql/parse/TestSemanticAnalyzerFactory.java | 2 +-
.../parse/TestUpdateDeleteSemanticAnalyzer.java | 2 +-
.../TestHiveAuthorizationTaskFactory.java | 2 +-
.../parse/authorization/TestPrivilegesV1.java | 4 +-
.../parse/authorization/TestPrivilegesV2.java | 2 +-
.../materialized_view_create_rewrite.q.out | 322 +++++++++++++++++++
.../hive/service/cli/operation/Operation.java | 12 +-
.../service/cli/operation/SQLOperation.java | 6 +-
25 files changed, 498 insertions(+), 98 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
index 6ff48ee..180e802 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
@@ -356,7 +356,7 @@ public class TestHCatMultiOutputFormat {
* @throws Exception if any error occurs
*/
private List<String> getTableData(String table, String database) throws Exception {
- QueryState queryState = new QueryState(null);
+ QueryState queryState = new QueryState.Builder().build();
HiveConf conf = queryState.getConf();
conf.addResource("hive-site.xml");
ArrayList<String> results = new ArrayList<String>();
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/itests/src/test/resources/testconfiguration.properties
----------------------------------------------------------------------
diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties
index a378a5d..51385cf 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -756,6 +756,7 @@ encrypted.query.files=encryption_join_unencrypted_tbl.q,\
beeline.positive.include=drop_with_concurrency.q,\
escape_comments.q,\
mapjoin2.q,\
+ materialized_view_create_rewrite.q,\
smb_mapjoin_1.q,\
smb_mapjoin_10.q,\
smb_mapjoin_11.q,\
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index b897ffa..d408321 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -566,7 +566,7 @@ public class QTestUtil {
System.out.println("Setting hive-site: "+HiveConf.getHiveSiteLocation());
}
- queryState = new QueryState(new HiveConf(Driver.class));
+ queryState = new QueryState.Builder().withHiveConf(new HiveConf(Driver.class)).build();
if (useHBaseMetastore) {
startMiniHBaseCluster();
} else {
@@ -1896,7 +1896,7 @@ public class QTestUtil {
public void resetParser() throws SemanticException {
drv.init();
pd = new ParseDriver();
- queryState = new QueryState(conf);
+ queryState = new QueryState.Builder().withHiveConf(conf).build();
sem = new SemanticAnalyzer(queryState);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/itests/util/src/main/java/org/apache/hive/beeline/QFile.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hive/beeline/QFile.java b/itests/util/src/main/java/org/apache/hive/beeline/QFile.java
index 3d9ca99..af464b9 100644
--- a/itests/util/src/main/java/org/apache/hive/beeline/QFile.java
+++ b/itests/util/src/main/java/org/apache/hive/beeline/QFile.java
@@ -63,6 +63,7 @@ public final class QFile {
private static final String MASK_PATTERN = "#### A masked pattern was here ####\n";
private String name;
+ private String databaseName;
private File inputFile;
private File rawOutputFile;
private File outputFile;
@@ -81,6 +82,10 @@ public final class QFile {
return name;
}
+ public String getDatabaseName() {
+ return databaseName;
+ }
+
public File getInputFile() {
return inputFile;
}
@@ -163,8 +168,8 @@ public final class QFile {
*/
private String revertReplaceTableNames(String source) {
for (String table : srcTables) {
- source = source.replaceAll("(?is)(\\s+)default\\." + table + "([\\s;\\n\\)])", "$1" + table
- + "$2");
+ source = source.replaceAll("(?is)(?<!name:?|alias:?)(\\s+)default\\." + table
+ + "([\\s;\\n\\)])", "$1" + table + "$2");
}
return source;
}
@@ -319,6 +324,7 @@ public final class QFile {
public QFile getQFile(String name) throws IOException {
QFile result = new QFile();
result.name = name;
+ result.databaseName = "test_db_" + name;
result.inputFile = new File(queryDirectory, name + ".q");
result.rawOutputFile = new File(logDirectory, name + ".q.out.raw");
result.outputFile = new File(logDirectory, name + ".q.out");
@@ -328,11 +334,13 @@ public final class QFile {
result.afterExecuteLogFile = new File(logDirectory, name + ".q.afterExecute.log");
result.rewriteSourceTables = rewriteSourceTables;
result.specificFilterSet = new RegexFilterSet()
- .addFilter("(PREHOOK|POSTHOOK): (Output|Input): database:" + name + "\n",
+ .addFilter("(PREHOOK|POSTHOOK): (Output|Input): database:" + result.databaseName + "\n",
"$1: $2: database:default\n")
- .addFilter("(PREHOOK|POSTHOOK): (Output|Input): " + name + "@", "$1: $2: default@")
- .addFilter("name(:?) " + name + "\\.(.*)\n", "name$1 default.$2\n")
- .addFilter("/" + name + ".db/", "/");
+ .addFilter("(PREHOOK|POSTHOOK): (Output|Input): " + result.databaseName + "@",
+ "$1: $2: default@")
+ .addFilter("name(:?) " + result.databaseName + "\\.(.*)\n", "name$1 default.$2\n")
+ .addFilter("alias(:?) " + result.databaseName + "\\.(.*)\n", "alias$1 default.$2\n")
+ .addFilter("/" + result.databaseName + ".db/", "/");
result.converter = Converter.NONE;
String input = FileUtils.readFileToString(result.inputFile, "UTF-8");
if (input.contains("-- SORT_QUERY_RESULTS")) {
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java b/itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java
index 7c50e18..d67bf19 100644
--- a/itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java
+++ b/itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java
@@ -77,9 +77,9 @@ public class QFileBeeLineClient implements AutoCloseable {
"!set showheader false",
"USE default;",
"SHOW TABLES;",
- "DROP DATABASE IF EXISTS `" + qFile.getName() + "` CASCADE;",
- "CREATE DATABASE `" + qFile.getName() + "`;",
- "USE `" + qFile.getName() + "`;",
+ "DROP DATABASE IF EXISTS `" + qFile.getDatabaseName() + "` CASCADE;",
+ "CREATE DATABASE `" + qFile.getDatabaseName() + "`;",
+ "USE `" + qFile.getDatabaseName() + "`;",
"set hive.in.test.short.logs=true;",
"set hive.in.test.remove.logs=false;",
},
@@ -98,7 +98,7 @@ public class QFileBeeLineClient implements AutoCloseable {
"!set showheader true",
"!set outputformat table",
"USE default;",
- "DROP DATABASE IF EXISTS `" + qFile.getName() + "` CASCADE;",
+ "DROP DATABASE IF EXISTS `" + qFile.getDatabaseName() + "` CASCADE;",
},
qFile.getAfterExecuteLogFile(),
Converter.NONE);
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index f01efa5..9aced9f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -347,21 +347,21 @@ public class Driver implements CommandProcessor {
}
public Driver() {
- this(new QueryState((SessionState.get() != null) ?
+ this(getNewQueryState((SessionState.get() != null) ?
SessionState.get().getConf() : new HiveConf()), null);
}
public Driver(HiveConf conf) {
- this(new QueryState(conf), null);
+ this(getNewQueryState(conf), null);
}
public Driver(HiveConf conf, Context ctx) {
- this(new QueryState(conf), null);
+ this(getNewQueryState(conf), null);
this.ctx = ctx;
}
public Driver(HiveConf conf, String userName) {
- this(new QueryState(conf), userName, null);
+ this(getNewQueryState(conf), userName, null);
}
public Driver(QueryState queryState, String userName) {
@@ -369,7 +369,7 @@ public class Driver implements CommandProcessor {
}
public Driver(HiveConf conf, HooksLoader hooksLoader) {
- this(new QueryState(conf), null, hooksLoader, null);
+ this(getNewQueryState(conf), null, hooksLoader, null);
}
public Driver(QueryState queryState, String userName, QueryInfo queryInfo) {
@@ -388,6 +388,15 @@ public class Driver implements CommandProcessor {
}
/**
+ * Generating the new QueryState object. Making sure, that the new queryId is generated.
+ * @param conf The HiveConf which should be used
+ * @return The new QueryState object
+ */
+ private static QueryState getNewQueryState(HiveConf conf) {
+ return new QueryState.Builder().withGenerateNewQueryId(true).withHiveConf(conf).build();
+ }
+
+ /**
* Compile a new query. Any currently-planned query associated with this Driver is discarded.
* Do not reset id for inner queries(index, etc). Task ids are used for task identity check.
*
@@ -454,7 +463,7 @@ public class Driver implements CommandProcessor {
LockedDriverState.setLockedDriverState(lDrvState);
- String queryId = conf.getVar(HiveConf.ConfVars.HIVEQUERYID);
+ String queryId = queryState.getQueryId();
//save some info for webUI for use after plan is freed
this.queryDisplay.setQueryStr(queryStr);
@@ -1693,7 +1702,7 @@ public class Driver implements CommandProcessor {
int maxlen = conf.getIntVar(HiveConf.ConfVars.HIVEJOBNAMELENGTH);
Metrics metrics = MetricsFactory.getInstance();
- String queryId = conf.getVar(HiveConf.ConfVars.HIVEQUERYID);
+ String queryId = queryState.getQueryId();
// Get the query string from the conf file as the compileInternal() method might
// hide sensitive information during query redaction.
String queryStr = conf.getQueryString();
@@ -1732,8 +1741,7 @@ public class Driver implements CommandProcessor {
plan.setStarted();
if (SessionState.get() != null) {
- SessionState.get().getHiveHistory().startQuery(queryStr,
- conf.getVar(HiveConf.ConfVars.HIVEQUERYID));
+ SessionState.get().getHiveHistory().startQuery(queryStr, queryId);
SessionState.get().getHiveHistory().logPlanProgress(plan);
}
resStream = null;
@@ -2425,6 +2433,6 @@ public class Driver implements CommandProcessor {
// repeated compile/execute calls create new contexts, plan, etc., so we don't need to worry
// propagating queryState into those existing fields, or resetting them.
releaseResources();
- this.queryState = new QueryState(queryState.getConf());
+ this.queryState = getNewQueryState(queryState.getConf());
}
}
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java b/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java
index 6dfaa9f..fa7c323 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java
@@ -27,7 +27,6 @@ import org.apache.hadoop.hive.ql.plan.HiveOperation;
* The class to store query level info such as queryId. Multiple queries can run
* in the same session, so SessionState is to hold common session related info, and
* each QueryState is to hold query related info.
- *
*/
public class QueryState {
/**
@@ -39,48 +38,12 @@ public class QueryState {
*/
private HiveOperation commandType;
- public QueryState(HiveConf conf) {
- this(conf, null, false);
- }
-
- public QueryState(HiveConf conf, Map<String, String> confOverlay, boolean runAsync) {
- this.queryConf = createConf(conf, confOverlay, runAsync);
- }
-
/**
- * If there are query specific settings to overlay, then create a copy of config
- * There are two cases we need to clone the session config that's being passed to hive driver
- * 1. Async query -
- * If the client changes a config setting, that shouldn't reflect in the execution already underway
- * 2. confOverlay -
- * The query specific settings should only be applied to the query config and not session
- * @return new configuration
+ * Private constructor, use QueryState.Builder instead
+ * @param conf The query specific configuration object
*/
- private HiveConf createConf(HiveConf conf,
- Map<String, String> confOverlay,
- boolean runAsync) {
-
- if ( (confOverlay != null && !confOverlay.isEmpty()) ) {
- conf = (conf == null ? new HiveConf() : new HiveConf(conf));
-
- // apply overlay query specific settings, if any
- for (Map.Entry<String, String> confEntry : confOverlay.entrySet()) {
- try {
- conf.verifyAndSet(confEntry.getKey(), confEntry.getValue());
- } catch (IllegalArgumentException e) {
- throw new RuntimeException("Error applying statement specific settings", e);
- }
- }
- } else if (runAsync) {
- conf = (conf == null ? new HiveConf() : new HiveConf(conf));
- }
-
- if (conf == null) {
- conf = new HiveConf();
- }
-
- conf.setVar(HiveConf.ConfVars.HIVEQUERYID, QueryPlan.makeQueryId());
- return conf;
+ private QueryState(HiveConf conf) {
+ this.queryConf = conf;
}
public String getQueryId() {
@@ -109,4 +72,105 @@ public class QueryState {
public HiveConf getConf() {
return queryConf;
}
+
+ /**
+ * Builder to instantiate the QueryState object.
+ */
+ public static class Builder {
+ private Map<String, String> confOverlay = null;
+ private boolean runAsync = false;
+ private boolean generateNewQueryId = false;
+ private HiveConf hiveConf = null;
+
+ /**
+ * Default constructor - use this builder to create a QueryState object
+ */
+ public Builder() {
+ }
+
+ /**
+ * Set this to true if the configuration should be detached from the original config. If not
+ * set the default value is false.
+ * @param runAsync If the configuration should be detached
+ * @return The builder
+ */
+ public Builder withRunAsync(boolean runAsync) {
+ this.runAsync = runAsync;
+ return this;
+ }
+
+ /**
+ * Set this if there are specific configuration values which should be added to the original
+ * config. If at least one value is set, then the configuration will be detached from the
+ * original one.
+ * @param confOverlay The query specific parameters
+ * @return The builder
+ */
+ public Builder withConfOverlay(Map<String, String> confOverlay) {
+ this.confOverlay = confOverlay;
+ return this;
+ }
+
+ /**
+ * Set this to true if new queryId should be generated, otherwise the original one will be kept.
+ * If not set the default value is false.
+ * @param generateNewQueryId If new queryId should be generated
+ * @return The builder
+ */
+ public Builder withGenerateNewQueryId(boolean generateNewQueryId) {
+ this.generateNewQueryId = generateNewQueryId;
+ return this;
+ }
+
+ /**
+ * The source HiveConf object used to create the QueryState. If runAsync is false, and the
+ * confOverLay is empty then we will reuse the hiveConf object as a backing datastore for the
+ * QueryState. We will create a clone of the hiveConf object otherwise.
+ * @param hiveConf The source HiveConf
+ * @return The builder
+ */
+ public Builder withHiveConf(HiveConf hiveConf) {
+ this.hiveConf = hiveConf;
+ return this;
+ }
+
+ /**
+ * Creates the QueryState object. The default values are:
+ * - runAsync false
+ * - confOverlay null
+ * - generateNewQueryId false
+ * - hiveConf null
+ * @return The generated QueryState object
+ */
+ public QueryState build() {
+ HiveConf queryConf = hiveConf;
+
+ if (queryConf == null) {
+ // Generate a new conf if necessary
+ queryConf = new HiveConf();
+ } else if (runAsync || (confOverlay != null && !confOverlay.isEmpty())) {
+ // Detach the original conf if necessary
+ queryConf = new HiveConf(queryConf);
+ }
+
+ // Set the specific parameters if needed
+ if (confOverlay != null && !confOverlay.isEmpty()) {
+ // apply overlay query specific settings, if any
+ for (Map.Entry<String, String> confEntry : confOverlay.entrySet()) {
+ try {
+ queryConf.verifyAndSet(confEntry.getKey(), confEntry.getValue());
+ } catch (IllegalArgumentException e) {
+ throw new RuntimeException("Error applying statement specific settings", e);
+ }
+ }
+ }
+
+ // Generate the new queryId if needed
+ if (generateNewQueryId) {
+ queryConf.setVar(HiveConf.ConfVars.HIVEQUERYID, QueryPlan.makeQueryId());
+ }
+
+ return new QueryState(queryConf);
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index cf575de..b07d6b1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -740,7 +740,7 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
DriverContext driverCxt = new DriverContext();
Task task;
if (conf.getVar(ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) {
- TezWork tezWork = new TezWork(conf.getVar(HiveConf.ConfVars.HIVEQUERYID), conf);
+ TezWork tezWork = new TezWork(queryState.getQueryId(), conf);
mergeWork.setName("File Merge");
tezWork.add(mergeWork);
task = new TezTask();
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
index 77bce97..ad921f3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
@@ -344,7 +344,8 @@ public class PartialScanTask extends Task<PartialScanWork> implements
}
}
- QueryState queryState = new QueryState(new HiveConf(conf, PartialScanTask.class));
+ QueryState queryState =
+ new QueryState.Builder().withHiveConf(new HiveConf(conf, PartialScanTask.class)).build();
PartialScanWork mergeWork = new PartialScanWork(inputPaths);
DriverContext driverCxt = new DriverContext();
PartialScanTask taskExec = new PartialScanTask();
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java
index b121eea..56c0163 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java
@@ -60,7 +60,6 @@ import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableScan;
import org.apache.hadoop.hive.ql.optimizer.calcite.translator.TypeConverter;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.CalcitePlanner;
-import org.apache.hadoop.hive.ql.parse.ParseDriver;
import org.apache.hadoop.hive.ql.parse.ParseUtils;
import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
import org.apache.hadoop.hive.ql.parse.RowResolver;
@@ -70,7 +69,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.joda.time.Interval;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -330,7 +328,8 @@ public final class HiveMaterializedViewsRegistry {
private static RelNode parseQuery(String viewQuery) {
try {
final ASTNode node = ParseUtils.parse(viewQuery);
- final QueryState qs = new QueryState(SessionState.get().getConf());
+ final QueryState qs =
+ new QueryState.Builder().withHiveConf(SessionState.get().getConf()).build();
CalcitePlanner analyzer = new CalcitePlanner(qs);
analyzer.initCtx(new Context(SessionState.get().getConf()));
analyzer.init(false);
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java
index 3b719af..d72ff5cd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.ql.parse;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -33,18 +32,15 @@ import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.RowSchema;
import org.apache.hadoop.hive.ql.exec.SelectOperator;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.AnalyzeRewriteContext;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.LoadFileDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.SelectDesc;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -135,12 +131,13 @@ public class ColumnStatsAutoGatherContext {
ASTNode tree = ParseUtils.parse(analyzeCommand, ctx);
//1. get the ColumnStatsSemanticAnalyzer
- BaseSemanticAnalyzer baseSem = SemanticAnalyzerFactory.get(new QueryState(conf), tree);
+ QueryState queryState = new QueryState.Builder().withHiveConf(conf).build();
+ BaseSemanticAnalyzer baseSem = SemanticAnalyzerFactory.get(queryState, tree);
ColumnStatsSemanticAnalyzer colSem = (ColumnStatsSemanticAnalyzer) baseSem;
//2. get the rewritten AST
ASTNode ast = colSem.rewriteAST(tree, this);
- baseSem = SemanticAnalyzerFactory.get(new QueryState(conf), ast);
+ baseSem = SemanticAnalyzerFactory.get(queryState, ast);
SemanticAnalyzer sem = (SemanticAnalyzer) baseSem;
QB qb = new QB(null, null, false);
ASTNode child = ast;
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
index c7266bc..b4898e2 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
@@ -62,7 +62,6 @@ import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.mapred.TextInputFormat;
-import org.apache.hadoop.util.Shell;
/**
* Mimics the actual query compiler in generating end to end plans and testing
@@ -83,7 +82,7 @@ public class TestExecDriver extends TestCase {
static {
try {
- queryState = new QueryState(new HiveConf(ExecDriver.class));
+ queryState = new QueryState.Builder().withHiveConf(new HiveConf(ExecDriver.class)).build();
conf = queryState.getConf();
conf.setBoolVar(HiveConf.ConfVars.SUBMITVIACHILD, true);
conf.setBoolVar(HiveConf.ConfVars.SUBMITLOCALTASKVIACHILD, true);
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
index c734988..deba1d5 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
@@ -42,7 +42,7 @@ public class TestMacroSemanticAnalyzer {
@Before
public void setup() throws Exception {
- queryState = new QueryState(null);
+ queryState = new QueryState.Builder().build();
conf = queryState.getConf();
SessionState.start(conf);
context = new Context(conf);
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java
index 201622e..182ac2b 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java
@@ -50,7 +50,7 @@ public class TestQBCompact {
@BeforeClass
public static void init() throws Exception {
- queryState = new QueryState(null);
+ queryState = new QueryState.Builder().build();
conf = queryState.getConf();
conf
.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java
index e607f10..45901c9 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java
@@ -38,7 +38,8 @@ public class TestQBJoinTreeApplyPredicate {
@BeforeClass
public static void initialize() {
- queryState = new QueryState(new HiveConf(SemanticAnalyzer.class));
+ queryState =
+ new QueryState.Builder().withHiveConf(new HiveConf(SemanticAnalyzer.class)).build();
conf = queryState.getConf();
SessionState.start(conf);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java
index 2674835..993b4da 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java
@@ -48,7 +48,8 @@ public class TestQBSubQuery {
@BeforeClass
public static void initialize() {
- queryState = new QueryState(new HiveConf(SemanticAnalyzer.class));
+ queryState =
+ new QueryState.Builder().withHiveConf(new HiveConf(SemanticAnalyzer.class)).build();
conf = queryState.getConf();
SessionState.start(conf);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java
index 80865bd..1cb4470 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java
@@ -56,7 +56,8 @@ public class TestReplicationSemanticAnalyzer {
@BeforeClass
public static void initialize() throws HiveException {
- queryState = new QueryState(new HiveConf(SemanticAnalyzer.class));
+ queryState =
+ new QueryState.Builder().withHiveConf(new HiveConf(SemanticAnalyzer.class)).build();
conf = queryState.getConf();
conf.set("hive.security.authorization.manager", "");
SessionState.start(conf);
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java
index 5849950..b19d42f 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java
@@ -32,7 +32,7 @@ public class TestSemanticAnalyzerFactory {
@Before
public void setup() throws Exception {
- queryState = new QueryState(null);
+ queryState = new QueryState.Builder().build();
conf = queryState.getConf();
}
@Test
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
index a573808..9c20521 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
@@ -223,7 +223,7 @@ public class TestUpdateDeleteSemanticAnalyzer {
@Before
public void setup() {
- queryState = new QueryState(null);
+ queryState = new QueryState.Builder().build();
conf = queryState.getConf();
conf
.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java
index 58cb4b4..746aa4b 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java
@@ -99,7 +99,7 @@ public class TestHiveAuthorizationTaskFactory {
@Before
public void setup() throws Exception {
- queryState = new QueryState(null);
+ queryState = new QueryState.Builder().build();
HiveConf conf = queryState.getConf();
conf.setVar(ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY,
TestHiveAuthorizationTaskFactory.DummyHiveAuthorizationTaskFactoryImpl.class.getName());
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java
index 5d01080..349f494 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hive.ql.parse.authorization;
import java.util.HashMap;
-import junit.framework.Assert;
-
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.QueryState;
import org.apache.hadoop.hive.ql.metadata.Hive;
@@ -41,7 +39,7 @@ public class TestPrivilegesV1 extends PrivilegesTestBase{
@Before
public void setup() throws Exception {
- queryState = new QueryState(null);
+ queryState = new QueryState.Builder().build();
db = Mockito.mock(Hive.class);
table = new Table(DB, TABLE);
partition = new Partition(table);
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java
index c552ba7..312770f 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java
@@ -40,7 +40,7 @@ public class TestPrivilegesV2 extends PrivilegesTestBase{
@Before
public void setup() throws Exception {
- queryState = new QueryState(null);
+ queryState = new QueryState.Builder().build();
//set authorization mode to V2
HiveConf conf = queryState.getConf();
conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out b/ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out
new file mode 100644
index 0000000..041621f
--- /dev/null
+++ b/ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out
@@ -0,0 +1,322 @@
+PREHOOK: query: create table cmv_basetable (a int, b varchar(256), c decimal(10,2), d int)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@cmv_basetable
+POSTHOOK: query: create table cmv_basetable (a int, b varchar(256), c decimal(10,2), d int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@cmv_basetable
+PREHOOK: query: insert into cmv_basetable values
+ (1, 'alfred', 10.30, 2),
+ (2, 'bob', 3.14, 3),
+ (2, 'bonnie', 172342.2, 3),
+ (3, 'calvin', 978.76, 3),
+ (3, 'charlie', 9.8, 1)
+PREHOOK: type: QUERY
+PREHOOK: Output: default@cmv_basetable
+POSTHOOK: query: insert into cmv_basetable values
+ (1, 'alfred', 10.30, 2),
+ (2, 'bob', 3.14, 3),
+ (2, 'bonnie', 172342.2, 3),
+ (3, 'calvin', 978.76, 3),
+ (3, 'charlie', 9.8, 1)
+POSTHOOK: type: QUERY
+POSTHOOK: Output: default@cmv_basetable
+POSTHOOK: Lineage: cmv_basetable.a EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: cmv_basetable.b EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+POSTHOOK: Lineage: cmv_basetable.c EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ]
+POSTHOOK: Lineage: cmv_basetable.d EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col4, type:string, comment:), ]
+PREHOOK: query: create materialized view cmv_mat_view enable rewrite
+as select a, b, c from cmv_basetable where a = 2
+PREHOOK: type: CREATE_MATERIALIZED_VIEW
+PREHOOK: Input: default@cmv_basetable
+PREHOOK: Output: database:default
+PREHOOK: Output: default@cmv_mat_view
+POSTHOOK: query: create materialized view cmv_mat_view enable rewrite
+as select a, b, c from cmv_basetable where a = 2
+POSTHOOK: type: CREATE_MATERIALIZED_VIEW
+POSTHOOK: Input: default@cmv_basetable
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@cmv_mat_view
+PREHOOK: query: select * from cmv_mat_view
+PREHOOK: type: QUERY
+PREHOOK: Input: default@cmv_mat_view
+#### A masked pattern was here ####
+POSTHOOK: query: select * from cmv_mat_view
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@cmv_mat_view
+#### A masked pattern was here ####
+2 bob 3.14
+2 bonnie 172342.20
+PREHOOK: query: show tblproperties cmv_mat_view
+PREHOOK: type: SHOW_TBLPROPERTIES
+POSTHOOK: query: show tblproperties cmv_mat_view
+POSTHOOK: type: SHOW_TBLPROPERTIES
+numFiles 1
+totalSize 453
+#### A masked pattern was here ####
+PREHOOK: query: create materialized view if not exists cmv_mat_view2 enable rewrite
+as select a, c from cmv_basetable where a = 3
+PREHOOK: type: CREATE_MATERIALIZED_VIEW
+PREHOOK: Input: default@cmv_basetable
+PREHOOK: Output: database:default
+PREHOOK: Output: default@cmv_mat_view2
+POSTHOOK: query: create materialized view if not exists cmv_mat_view2 enable rewrite
+as select a, c from cmv_basetable where a = 3
+POSTHOOK: type: CREATE_MATERIALIZED_VIEW
+POSTHOOK: Input: default@cmv_basetable
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@cmv_mat_view2
+PREHOOK: query: select * from cmv_mat_view2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@cmv_mat_view2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from cmv_mat_view2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@cmv_mat_view2
+#### A masked pattern was here ####
+3 978.76
+3 9.80
+PREHOOK: query: show tblproperties cmv_mat_view2
+PREHOOK: type: SHOW_TBLPROPERTIES
+POSTHOOK: query: show tblproperties cmv_mat_view2
+POSTHOOK: type: SHOW_TBLPROPERTIES
+numFiles 1
+totalSize 322
+#### A masked pattern was here ####
+PREHOOK: query: explain
+select a, c from cmv_basetable where a = 3
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select a, c from cmv_basetable where a = 3
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: default.cmv_mat_view2
+ Statistics: Num rows: 2 Data size: 322 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: a (type: int), c (type: decimal(10,2))
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 2 Data size: 322 Basic stats: COMPLETE Column stats: NONE
+ ListSink
+
+PREHOOK: query: select a, c from cmv_basetable where a = 3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@cmv_basetable
+PREHOOK: Input: default@cmv_mat_view2
+#### A masked pattern was here ####
+POSTHOOK: query: select a, c from cmv_basetable where a = 3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@cmv_basetable
+POSTHOOK: Input: default@cmv_mat_view2
+#### A masked pattern was here ####
+3 978.76
+3 9.80
+Warning: Shuffle Join JOIN[7][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: explain
+select * from (
+ (select a, c from cmv_basetable where a = 3) table1
+ join
+ (select a, c from cmv_basetable where d = 3) table2
+ on table1.a = table2.a)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select * from (
+ (select a, c from cmv_basetable where a = 3) table1
+ join
+ (select a, c from cmv_basetable where d = 3) table2
+ on table1.a = table2.a)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ alias: default.cmv_mat_view2
+ Statistics: Num rows: 2 Data size: 322 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: c (type: decimal(10,2))
+ outputColumnNames: _col0
+ Statistics: Num rows: 2 Data size: 322 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 2 Data size: 322 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: decimal(10,2))
+ TableScan
+ alias: cmv_basetable
+ Statistics: Num rows: 5 Data size: 81 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: ((d = 3) and (3 = a)) (type: boolean)
+ Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: c (type: decimal(10,2))
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: decimal(10,2))
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Inner Join 0 to 1
+ keys:
+ 0
+ 1
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 2 Data size: 356 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: 3 (type: int), _col0 (type: decimal(10,2)), 3 (type: int), _col1 (type: decimal(10,2))
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 2 Data size: 356 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 2 Data size: 356 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+Warning: Shuffle Join JOIN[7][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: select * from (
+ (select a, c from cmv_basetable where a = 3) table1
+ join
+ (select a, c from cmv_basetable where d = 3) table2
+ on table1.a = table2.a)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@cmv_basetable
+PREHOOK: Input: default@cmv_mat_view2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from (
+ (select a, c from cmv_basetable where a = 3) table1
+ join
+ (select a, c from cmv_basetable where d = 3) table2
+ on table1.a = table2.a)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@cmv_basetable
+POSTHOOK: Input: default@cmv_mat_view2
+#### A masked pattern was here ####
+3 9.80 3 978.76
+3 978.76 3 978.76
+PREHOOK: query: drop materialized view cmv_mat_view2
+PREHOOK: type: DROP_MATERIALIZED_VIEW
+PREHOOK: Input: default@cmv_mat_view2
+PREHOOK: Output: default@cmv_mat_view2
+POSTHOOK: query: drop materialized view cmv_mat_view2
+POSTHOOK: type: DROP_MATERIALIZED_VIEW
+POSTHOOK: Input: default@cmv_mat_view2
+POSTHOOK: Output: default@cmv_mat_view2
+Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: explain
+select * from (
+ (select a, c from cmv_basetable where a = 3) table1
+ join
+ (select a, c from cmv_basetable where d = 3) table2
+ on table1.a = table2.a)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select * from (
+ (select a, c from cmv_basetable where a = 3) table1
+ join
+ (select a, c from cmv_basetable where d = 3) table2
+ on table1.a = table2.a)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ alias: cmv_basetable
+ Statistics: Num rows: 5 Data size: 81 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: (a = 3) (type: boolean)
+ Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: c (type: decimal(10,2))
+ outputColumnNames: _col0
+ Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: decimal(10,2))
+ TableScan
+ alias: cmv_basetable
+ Statistics: Num rows: 5 Data size: 81 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: ((d = 3) and (3 = a)) (type: boolean)
+ Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: c (type: decimal(10,2))
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: decimal(10,2))
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Inner Join 0 to 1
+ keys:
+ 0
+ 1
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 2 Data size: 66 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: 3 (type: int), _col0 (type: decimal(10,2)), 3 (type: int), _col1 (type: decimal(10,2))
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 2 Data size: 66 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 2 Data size: 66 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: select * from (
+ (select a, c from cmv_basetable where a = 3) table1
+ join
+ (select a, c from cmv_basetable where d = 3) table2
+ on table1.a = table2.a)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@cmv_basetable
+#### A masked pattern was here ####
+POSTHOOK: query: select * from (
+ (select a, c from cmv_basetable where a = 3) table1
+ join
+ (select a, c from cmv_basetable where d = 3) table2
+ on table1.a = table2.a)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@cmv_basetable
+#### A masked pattern was here ####
+3 9.80 3 978.76
+3 978.76 3 978.76
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/service/src/java/org/apache/hive/service/cli/operation/Operation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/Operation.java b/service/src/java/org/apache/hive/service/cli/operation/Operation.java
index 0b27608..4e78551 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/Operation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/Operation.java
@@ -20,7 +20,6 @@ package org.apache.hive.service.cli.operation;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.EnumSet;
-import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -63,7 +62,6 @@ public abstract class Operation {
protected volatile Future<?> backgroundHandle;
protected OperationLog operationLog;
protected boolean isOperationLogEnabled;
- protected Map<String, String> confOverlay = new HashMap<String, String>();
private long operationTimeout;
private volatile long lastAccessTime;
@@ -90,9 +88,6 @@ public abstract class Operation {
protected Operation(HiveSession parentSession,
Map<String, String> confOverlay, OperationType opType, boolean isAsyncQueryState) {
this.parentSession = parentSession;
- if (confOverlay != null) {
- this.confOverlay = confOverlay;
- }
this.opHandle = new OperationHandle(opType, parentSession.getProtocolVersion());
beginTime = System.currentTimeMillis();
lastAccessTime = beginTime;
@@ -101,7 +96,12 @@ public abstract class Operation {
currentStateScope = updateOperationStateMetrics(null, MetricsConstant.OPERATION_PREFIX,
MetricsConstant.COMPLETED_OPERATION_PREFIX, state);
- queryState = new QueryState(parentSession.getHiveConf(), confOverlay, isAsyncQueryState);
+ queryState = new QueryState.Builder()
+ .withConfOverlay(confOverlay)
+ .withRunAsync(isAsyncQueryState)
+ .withGenerateNewQueryId(true)
+ .withHiveConf(parentSession.getHiveConf())
+ .build();
}
public Future<?> getBackgroundHandle() {
http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
index 0b51591..1a2be8b 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
@@ -169,7 +169,7 @@ public class SQLOperation extends ExecuteStatementOperation {
@Override
public void run() {
try {
- String queryId = confOverlay.get(HiveConf.ConfVars.HIVEQUERYID.varname);
+ String queryId = queryState.getQueryId();
LOG.info("Query timed out after: " + queryTimeout
+ " seconds. Cancelling the execution now: " + queryId);
SQLOperation.this.cancel(OperationState.TIMEDOUT);
@@ -397,7 +397,7 @@ public class SQLOperation extends ExecuteStatementOperation {
Future<?> backgroundHandle = getBackgroundHandle();
if (backgroundHandle != null) {
boolean success = backgroundHandle.cancel(true);
- String queryId = confOverlay.get(HiveConf.ConfVars.HIVEQUERYID.varname);
+ String queryId = queryState.getQueryId();
if (success) {
LOG.info("The running operation has been successfully interrupted: " + queryId);
} else if (state == OperationState.CANCELED) {
@@ -430,7 +430,7 @@ public class SQLOperation extends ExecuteStatementOperation {
public void cancel(OperationState stateAfterCancel) throws HiveSQLException {
String queryId = null;
if (stateAfterCancel == OperationState.CANCELED) {
- queryId = confOverlay.get(HiveConf.ConfVars.HIVEQUERYID.varname);
+ queryId = queryState.getQueryId();
LOG.info("Cancelling the query execution: " + queryId);
}
cleanup(stateAfterCancel);