You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/09/08 06:38:26 UTC
svn commit: r1623263 [18/28] - in /hive/branches/spark: ./
accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/
ant/src/org/apache/hadoop/hive/ant/ beeline/src/java/org/apache/hive/beeline/
beeline/src/test/org/apache/hive/beeline/ bin/...
Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java Mon Sep 8 04:38:17 2014
@@ -33,23 +33,23 @@ import org.apache.hadoop.util.StringUtil
import org.apache.tez.common.TezUtils;
import org.apache.tez.mapreduce.input.MRInputLegacy;
import org.apache.tez.mapreduce.processor.MRTaskReporter;
+import org.apache.tez.runtime.api.AbstractLogicalIOProcessor;
import org.apache.tez.runtime.api.Event;
-import org.apache.tez.runtime.api.LogicalIOProcessor;
import org.apache.tez.runtime.api.LogicalInput;
import org.apache.tez.runtime.api.LogicalOutput;
-import org.apache.tez.runtime.api.TezProcessorContext;
+import org.apache.tez.runtime.api.ProcessorContext;
import org.apache.tez.runtime.library.api.KeyValueWriter;
/**
* Hive processor for Tez that forms the vertices in Tez and processes the data.
* Does what ExecMapper and ExecReducer does for hive in MR framework.
*/
-public class TezProcessor implements LogicalIOProcessor {
+public class TezProcessor extends AbstractLogicalIOProcessor {
private static final Log LOG = LogFactory.getLog(TezProcessor.class);
- private boolean isMap = false;
+ protected boolean isMap = false;
RecordProcessor rproc = null;
@@ -58,8 +58,6 @@ public class TezProcessor implements Log
private static final String CLASS_NAME = TezProcessor.class.getName();
private final PerfLogger perfLogger = PerfLogger.getPerfLogger();
- private TezProcessorContext processorContext;
-
protected static final NumberFormat taskIdFormat = NumberFormat.getInstance();
protected static final NumberFormat jobIdFormat = NumberFormat.getInstance();
static {
@@ -69,8 +67,9 @@ public class TezProcessor implements Log
jobIdFormat.setMinimumIntegerDigits(4);
}
- public TezProcessor(boolean isMap) {
- this.isMap = isMap;
+ public TezProcessor(ProcessorContext context) {
+ super(context);
+ ObjectCache.setupObjectRegistry(context.getObjectRegistry());
}
@Override
@@ -86,19 +85,15 @@ public class TezProcessor implements Log
}
@Override
- public void initialize(TezProcessorContext processorContext)
- throws IOException {
+ public void initialize() throws IOException {
perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.TEZ_INITIALIZE_PROCESSOR);
- this.processorContext = processorContext;
- //get the jobconf
- byte[] userPayload = processorContext.getUserPayload();
- Configuration conf = TezUtils.createConfFromUserPayload(userPayload);
+ Configuration conf = TezUtils.createConfFromUserPayload(getContext().getUserPayload());
this.jobConf = new JobConf(conf);
- setupMRLegacyConfigs(processorContext);
+ setupMRLegacyConfigs(getContext());
perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.TEZ_INITIALIZE_PROCESSOR);
}
- private void setupMRLegacyConfigs(TezProcessorContext processorContext) {
+ private void setupMRLegacyConfigs(ProcessorContext processorContext) {
// Hive "insert overwrite local directory" uses task id as dir name
// Setting the id in jobconf helps to have the similar dir name as MR
StringBuilder taskAttemptIdBuilder = new StringBuilder("attempt_");
@@ -133,10 +128,10 @@ public class TezProcessor implements Log
// in case of broadcast-join read the broadcast edge inputs
// (possibly asynchronously)
- LOG.info("Running task: " + processorContext.getUniqueIdentifier());
+ LOG.info("Running task: " + getContext().getUniqueIdentifier());
if (isMap) {
- rproc = new MapRecordProcessor();
+ rproc = new MapRecordProcessor(jobConf);
MRInputLegacy mrInput = getMRInput(inputs);
try {
mrInput.init();
@@ -160,8 +155,8 @@ public class TezProcessor implements Log
// Outputs will be started later by the individual Processors.
- MRTaskReporter mrReporter = new MRTaskReporter(processorContext);
- rproc.init(jobConf, processorContext, mrReporter, inputs, outputs);
+ MRTaskReporter mrReporter = new MRTaskReporter(getContext());
+ rproc.init(jobConf, getContext(), mrReporter, inputs, outputs);
rproc.run();
//done - output does not need to be committed as hive does not use outputcommitter
@@ -207,6 +202,7 @@ public class TezProcessor implements Log
this.writer = (KeyValueWriter) output.getWriter();
}
+ @Override
public void collect(Object key, Object value) throws IOException {
writer.write(key, value);
}
Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java Mon Sep 8 04:38:17 2014
@@ -41,19 +41,14 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.hive.common.FileUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.ErrorMsg;
-import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.LocalResource;
-import org.apache.tez.client.AMConfiguration;
-import org.apache.tez.client.PreWarmContext;
-import org.apache.tez.client.TezSession;
-import org.apache.tez.client.TezSessionConfiguration;
+import org.apache.tez.client.TezClient;
+import org.apache.tez.dag.api.PreWarmVertex;
import org.apache.tez.dag.api.SessionNotRunning;
import org.apache.tez.dag.api.TezConfiguration;
import org.apache.tez.dag.api.TezException;
@@ -70,7 +65,7 @@ public class TezSessionState {
private HiveConf conf;
private Path tezScratchDir;
private LocalResource appJarLr;
- private TezSession session;
+ private TezClient session;
private String sessionId;
private DagUtils utils;
private String queueName;
@@ -153,11 +148,6 @@ public class TezSessionState {
refreshLocalResourcesFromConf(conf);
- // generate basic tez config
- TezConfiguration tezConfig = new TezConfiguration(conf);
-
- tezConfig.set(TezConfiguration.TEZ_AM_STAGING_DIR, tezScratchDir.toUri().toString());
-
// unless already installed on all the cluster nodes, we'll have to
// localize hive-exec.jar as well.
appJarLr = createJarLocalResource(utils.getExecJarPathLocal());
@@ -171,15 +161,23 @@ public class TezSessionState {
// Create environment for AM.
Map<String, String> amEnv = new HashMap<String, String>();
- MRHelpers.updateEnvironmentForMRAM(conf, amEnv);
+ MRHelpers.updateEnvBasedOnMRAMEnv(conf, amEnv);
- AMConfiguration amConfig = new AMConfiguration(amEnv, commonLocalResources, tezConfig, null);
+ // and finally we're ready to create and start the session
+ // generate basic tez config
+ TezConfiguration tezConfig = new TezConfiguration(conf);
+ tezConfig.set(TezConfiguration.TEZ_AM_STAGING_DIR, tezScratchDir.toUri().toString());
- // configuration for the session
- TezSessionConfiguration sessionConfig = new TezSessionConfiguration(amConfig, tezConfig);
+ if (HiveConf.getBoolVar(conf, ConfVars.HIVE_PREWARM_ENABLED)) {
+ int n = HiveConf.getIntVar(conf, ConfVars.HIVE_PREWARM_NUM_CONTAINERS);
+ n = Math.max(tezConfig.getInt(
+ TezConfiguration.TEZ_AM_SESSION_MIN_HELD_CONTAINERS,
+ TezConfiguration.TEZ_AM_SESSION_MIN_HELD_CONTAINERS_DEFAULT), n);
+ tezConfig.setInt(TezConfiguration.TEZ_AM_SESSION_MIN_HELD_CONTAINERS, n);
+ }
- // and finally we're ready to create and start the session
- session = new TezSession("HIVE-" + sessionId, sessionConfig);
+ session = TezClient.create("HIVE-" + sessionId, tezConfig, true,
+ commonLocalResources, null);
LOG.info("Opening new Tez Session (id: " + sessionId
+ ", scratch dir: " + tezScratchDir + ")");
@@ -190,20 +188,30 @@ public class TezSessionState {
int n = HiveConf.getIntVar(conf, ConfVars.HIVE_PREWARM_NUM_CONTAINERS);
LOG.info("Prewarming " + n + " containers (id: " + sessionId
+ ", scratch dir: " + tezScratchDir + ")");
- PreWarmContext context = utils.createPreWarmContext(sessionConfig, n, commonLocalResources);
+ PreWarmVertex prewarmVertex = utils.createPreWarmVertex(tezConfig, n,
+ commonLocalResources);
try {
- session.preWarm(context);
- } catch (InterruptedException ie) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Hive Prewarm threw an exception ", ie);
+ session.preWarm(prewarmVertex);
+ } catch (IOException ie) {
+ if (ie.getMessage().contains("Interrupted while waiting")) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Hive Prewarm threw an exception ", ie);
+ }
+ } else {
+ throw ie;
}
}
}
-
+ try {
+ session.waitTillReady();
+ } catch(InterruptedException ie) {
+ //ignore
+ }
// In case we need to run some MR jobs, we'll run them under tez MR emulation. The session
// id is used for tez to reuse the current session rather than start a new one.
conf.set("mapreduce.framework.name", "yarn-tez");
- conf.set("mapreduce.tez.session.tokill-application-id", session.getApplicationId().toString());
+ conf.set("mapreduce.tez.session.tokill-application-id",
+ session.getAppMasterApplicationId().toString());
openSessions.add(this);
}
@@ -280,7 +288,7 @@ public class TezSessionState {
return sessionId;
}
- public TezSession getSession() {
+ public TezClient getSession() {
return session;
}
@@ -300,11 +308,11 @@ public class TezSessionState {
throws IOException {
// tez needs its own scratch dir (per session)
- Path tezDir = new Path(HiveConf.getVar(conf, HiveConf.ConfVars.SCRATCHDIR), TEZ_DIR);
+ Path tezDir = new Path(SessionState.get().getHdfsScratchDirURIString(), TEZ_DIR);
tezDir = new Path(tezDir, sessionId);
FileSystem fs = tezDir.getFileSystem(conf);
- FsPermission fsPermission = new FsPermission((short)00777);
- Utilities.createDirsWithPermission(conf, tezDir, fsPermission, true);
+ FsPermission fsPermission = new FsPermission(HiveConf.getVar(conf, HiveConf.ConfVars.SCRATCHDIRPERMISSION));
+ fs.mkdirs(tezDir, fsPermission);
// Make sure the path is normalized (we expect validation to pass since we just created it).
tezDir = DagUtils.validateTargetDir(tezDir, conf).getPath();
// don't keep the directory around on non-clean exit
Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java Mon Sep 8 04:38:17 2014
@@ -29,7 +29,6 @@ import java.util.Set;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.DriverContext;
import org.apache.hadoop.hive.ql.exec.Operator;
@@ -67,7 +66,7 @@ import org.apache.tez.dag.api.client.Sta
* using the Tez APIs directly.
*
*/
-@SuppressWarnings({"serial", "deprecation"})
+@SuppressWarnings({"serial"})
public class TezTask extends Task<TezWork> {
private static final String CLASS_NAME = TezTask.class.getName();
@@ -135,7 +134,7 @@ public class TezTask extends Task<TezWor
}
List<LocalResource> additionalLr = session.getLocalizedResources();
-
+
// log which resources we're adding (apart from the hive exec)
if (LOG.isDebugEnabled()) {
if (additionalLr == null || additionalLr.size() == 0) {
@@ -166,7 +165,7 @@ public class TezTask extends Task<TezWor
counters = client.getDAGStatus(statusGetOpts).getDAGCounters();
TezSessionPoolManager.getInstance().returnSession(session);
- if (LOG.isInfoEnabled()) {
+ if (LOG.isInfoEnabled() && counters != null) {
for (CounterGroup group: counters) {
LOG.info(group.getDisplayName() +":");
for (TezCounter counter: group) {
@@ -212,7 +211,7 @@ public class TezTask extends Task<TezWor
FileSystem fs = scratchDir.getFileSystem(conf);
// the name of the dag is what is displayed in the AM/Job UI
- DAG dag = new DAG(work.getName());
+ DAG dag = DAG.create(work.getName());
for (BaseWork w: ws) {
@@ -247,16 +246,14 @@ public class TezTask extends Task<TezWor
}
VertexGroup group = dag.createVertexGroup(w.getName(), vertexArray);
+ // For a vertex group, all Outputs use the same Key-class, Val-class and partitioner.
+ // Pick any one source vertex to figure out the Edge configuration.
+ JobConf parentConf = workToConf.get(unionWorkItems.get(0));
+
// now hook up the children
for (BaseWork v: children) {
- // need to pairwise patch up the configuration of the vertices
- for (BaseWork part: unionWorkItems) {
- utils.updateConfigurationForEdge(workToConf.get(part), workToVertex.get(part),
- workToConf.get(v), workToVertex.get(v));
- }
-
// finally we can create the grouped edge
- GroupInputEdge e = utils.createEdge(group, workToConf.get(v),
+ GroupInputEdge e = utils.createEdge(group, parentConf,
workToVertex.get(v), work.getEdgeProperty(w, v));
dag.addEdge(e);
@@ -279,7 +276,7 @@ public class TezTask extends Task<TezWor
TezEdgeProperty edgeProp = work.getEdgeProperty(w, v);
- e = utils.createEdge(wxConf, wx, workToConf.get(v), workToVertex.get(v), edgeProp);
+ e = utils.createEdge(wxConf, wx, workToVertex.get(v), edgeProp);
dag.addEdge(e);
}
}
@@ -305,7 +302,8 @@ public class TezTask extends Task<TezWor
try {
// ready to start execution on the cluster
- dagClient = sessionState.getSession().submitDAG(dag, resourceMap);
+ sessionState.getSession().addAppMasterLocalFiles(resourceMap);
+ dagClient = sessionState.getSession().submitDAG(dag);
} catch (SessionNotRunning nr) {
console.printInfo("Tez session was closed. Reopening...");
@@ -313,7 +311,7 @@ public class TezTask extends Task<TezWor
TezSessionPoolManager.getInstance().closeAndOpen(sessionState, this.conf);
console.printInfo("Session re-established.");
- dagClient = sessionState.getSession().submitDAG(dag, resourceMap);
+ dagClient = sessionState.getSession().submitDAG(dag);
}
perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.TEZ_SUBMIT_DAG);
Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/InputMerger.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/InputMerger.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/InputMerger.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/InputMerger.java Mon Sep 8 04:38:17 2014
@@ -37,7 +37,7 @@ import org.apache.tez.runtime.library.ap
* Uses a priority queue to pick the KeyValuesReader of the input that is next in
* sort order.
*/
-public class InputMerger implements KeyValuesReader {
+public class InputMerger extends KeyValuesReader {
public static final Log l4j = LogFactory.getLog(ReduceRecordProcessor.class);
private PriorityQueue<KeyValuesReader> pQueue = null;
Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/TezMergedLogicalInput.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/TezMergedLogicalInput.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/TezMergedLogicalInput.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/TezMergedLogicalInput.java Mon Sep 8 04:38:17 2014
@@ -18,9 +18,11 @@
package org.apache.hadoop.hive.ql.exec.tez.tools;
import java.util.IdentityHashMap;
+import java.util.List;
import java.util.Map;
import org.apache.tez.runtime.api.Input;
+import org.apache.tez.runtime.api.MergedInputContext;
import org.apache.tez.runtime.api.MergedLogicalInput;
import org.apache.tez.runtime.api.Reader;
@@ -31,7 +33,11 @@ import org.apache.tez.runtime.api.Reader
public class TezMergedLogicalInput extends MergedLogicalInput {
private Map<Input, Boolean> readyInputs = new IdentityHashMap<Input, Boolean>();
-
+
+ public TezMergedLogicalInput(MergedInputContext context, List<Input> inputs) {
+ super(context, inputs);
+ }
+
@Override
public Reader getReader() throws Exception {
return new InputMerger(getInputs());
Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java Mon Sep 8 04:38:17 2014
@@ -18,6 +18,8 @@
package org.apache.hadoop.hive.ql.exec.vector;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hive.common.util.AnnotationUtils;
@@ -28,15 +30,46 @@ import org.apache.hive.common.util.Annot
*/
public class VectorExpressionDescriptor {
+ private static final Log LOG = LogFactory.getLog(
+ VectorExpressionDescriptor.class.getName());
+
final static int MAX_NUM_ARGUMENTS = 3;
+ //
+ // Special handling is needed at times for DATE, TIMESTAMP, (STRING), CHAR, and VARCHAR so they can
+ // be named specifically as argument types.
+ //
+ // LongColumnVector -->
+ // INT_FAMILY
+ // DATE
+ // TIMESTAMP
+ //
+ // DoubleColumnVector -->
+ // FLOAT_FAMILY
+ //
+ // DecimalColumnVector -->
+ // DECIMAL
+ //
+ // BytesColumnVector -->
+ // STRING
+ // CHAR
+ // VARCHAR
+ //
public enum ArgumentType {
- NONE(0),
- LONG(1),
- DOUBLE(2),
- STRING(3),
- DECIMAL(4),
- ANY(7);
+ NONE (0x000),
+ INT_FAMILY (0x001),
+ FLOAT_FAMILY (0x002),
+ DECIMAL (0x004),
+ STRING (0x008),
+ CHAR (0x010),
+ VARCHAR (0x020),
+ STRING_FAMILY (STRING.value | CHAR.value | VARCHAR.value),
+ DATE (0x040),
+ TIMESTAMP (0x080),
+ DATETIME_FAMILY (DATE.value | TIMESTAMP.value),
+ INT_DATETIME_FAMILY (INT_FAMILY.value | DATETIME_FAMILY.value),
+ STRING_DATETIME_FAMILY (STRING_FAMILY.value | DATETIME_FAMILY.value),
+ ALL_FAMILY (0xFFF);
private final int value;
@@ -48,12 +81,79 @@ public class VectorExpressionDescriptor
return value;
}
+ public static ArgumentType fromHiveTypeName(String hiveTypeName) {
+ String lower = hiveTypeName.toLowerCase();
+ if (lower.equals("tinyint") ||
+ lower.equals("smallint") ||
+ lower.equals("int") ||
+ lower.equals("bigint") ||
+ lower.equals("boolean") ||
+ lower.equals("long")) {
+ return INT_FAMILY;
+ } else if (lower.equals("double") || lower.equals("float")) {
+ return FLOAT_FAMILY;
+ } else if (lower.equals("string")) {
+ return STRING;
+ } else if (VectorizationContext.charTypePattern.matcher(lower).matches()) {
+ return CHAR;
+ } else if (VectorizationContext.varcharTypePattern.matcher(lower).matches()) {
+ return VARCHAR;
+ } else if (VectorizationContext.decimalTypePattern.matcher(lower).matches()) {
+ return DECIMAL;
+ } else if (lower.equals("timestamp")) {
+ return TIMESTAMP;
+ } else if (lower.equals("date")) {
+ return DATE;
+ } else if (lower.equals("void")) {
+ // The old code let void through...
+ return INT_FAMILY;
+ } else {
+ return NONE;
+ }
+ }
+
public static ArgumentType getType(String inType) {
- String type = VectorizationContext.getNormalizedTypeName(inType);
- if (VectorizationContext.decimalTypePattern.matcher(type).matches()) {
- type = "decimal";
+ if (inType.equalsIgnoreCase("long")) {
+ // A synonym in some places in the code...
+ return INT_FAMILY;
+ } else if (inType.equalsIgnoreCase("double")) {
+ // A synonym in some places in the code...
+ return FLOAT_FAMILY;
+ } else if (VectorizationContext.decimalTypePattern.matcher(inType).matches()) {
+ return DECIMAL;
+ } else if (VectorizationContext.charTypePattern.matcher(inType).matches()) {
+ return CHAR;
+ } else if (VectorizationContext.varcharTypePattern.matcher(inType).matches()) {
+ return VARCHAR;
+ }
+ return valueOf(inType.toUpperCase());
+ }
+
+ public boolean isSameTypeOrFamily(ArgumentType other) {
+ return ((value & other.value) != 0);
+ }
+
+ public static String getVectorColumnSimpleName(ArgumentType argType) {
+ if (argType == INT_FAMILY ||
+ argType == DATE ||
+ argType == TIMESTAMP) {
+ return "Long";
+ } else if (argType == FLOAT_FAMILY) {
+ return "Double";
+ } else if (argType == DECIMAL) {
+ return "Decimal";
+ } else if (argType == STRING ||
+ argType == CHAR ||
+ argType == VARCHAR) {
+ return "String";
+ } else {
+ return "None";
}
- return valueOf(type.toUpperCase());
+ }
+
+ public static String getVectorColumnSimpleName(String hiveTypeName) {
+ ArgumentType argType = fromHiveTypeName(hiveTypeName);
+ return getVectorColumnSimpleName(argType);
}
}
@@ -162,15 +262,12 @@ public class VectorExpressionDescriptor
*/
public static final class Descriptor {
- @Override
- public boolean equals(Object o) {
- Descriptor other = (Descriptor) o;
+ public boolean matches(Descriptor other) {
if (!mode.equals(other.mode) || (argCount != other.argCount) ) {
return false;
}
for (int i = 0; i < argCount; i++) {
- if (!argTypes[i].equals(other.argTypes[i]) && (!argTypes[i].equals(ArgumentType.ANY) &&
- !other.argTypes[i].equals(ArgumentType.ANY))) {
+ if (!argTypes[i].isSameTypeOrFamily(other.argTypes[i])) {
return false;
}
if (!exprTypes[i].equals(other.exprTypes[i])) {
@@ -228,13 +325,23 @@ public class VectorExpressionDescriptor
Class<? extends VectorExpression>[] list = annotation.value();
for (Class<? extends VectorExpression> ve : list) {
try {
- if (ve.newInstance().getDescriptor().equals(descriptor)) {
+ if (ve.newInstance().getDescriptor().matches(descriptor)) {
return ve;
}
} catch (Exception ex) {
throw new HiveException(ex);
}
}
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("getVectorExpressionClass udf " + udf.getSimpleName() + " descriptor: " + descriptor.toString());
+ for (Class<? extends VectorExpression> ve : list) {
+ try {
+ LOG.debug("getVectorExpressionClass doesn't match " + ve.getSimpleName() + " " + ve.newInstance().getDescriptor().toString());
+ } catch (Exception ex) {
+ throw new HiveException(ex);
+ }
+ }
+ }
return null;
}
}
Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java Mon Sep 8 04:38:17 2014
@@ -146,7 +146,7 @@ public class VectorHashKeyWrapper extend
duplicateTo(clone);
return clone;
}
-
+
public void duplicateTo(VectorHashKeyWrapper clone) {
clone.longValues = longValues.clone();
clone.doubleValues = doubleValues.clone();
@@ -155,7 +155,7 @@ public class VectorHashKeyWrapper extend
// Decimal128 requires deep clone
clone.decimalValues = new Decimal128[decimalValues.length];
for(int i = 0; i < decimalValues.length; ++i) {
- clone.decimalValues[i] = new Decimal128().update(decimalValues[i]);
+ clone.decimalValues[i] = new Decimal128().update(decimalValues[i]);
}
clone.byteValues = new byte[byteValues.length][];
Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java Mon Sep 8 04:38:17 2014
@@ -33,7 +33,9 @@ import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
import org.apache.hadoop.hive.ql.exec.FunctionInfo;
@@ -97,11 +99,13 @@ import org.apache.hadoop.hive.ql.udf.gen
import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.util.StringUtils;
/**
* Context class for vectorization execution.
@@ -123,6 +127,15 @@ public class VectorizationContext {
public static final Pattern decimalTypePattern = Pattern.compile("decimal.*",
Pattern.CASE_INSENSITIVE);
+ public static final Pattern charTypePattern = Pattern.compile("char.*",
+ Pattern.CASE_INSENSITIVE);
+
+ public static final Pattern varcharTypePattern = Pattern.compile("varchar.*",
+ Pattern.CASE_INSENSITIVE);
+
+ public static final Pattern charVarcharTypePattern = Pattern.compile("char.*|varchar.*",
+ Pattern.CASE_INSENSITIVE);
+
//Map column number to type
private final OutputColumnManager ocm;
@@ -210,14 +223,17 @@ public class VectorizationContext {
private final Set<Integer> usedOutputColumns = new HashSet<Integer>();
- int allocateOutputColumn(String columnType) {
- if (initialOutputCol < 0) {
- // This is a test
- return 0;
+ int allocateOutputColumn(String hiveTypeName) {
+ if (initialOutputCol < 0) {
+ // This is a test
+ return 0;
+ }
+
+ // We need to differentiate DECIMAL columns by their precision and scale...
+ String normalizedTypeName = getNormalizedName(hiveTypeName);
+ int relativeCol = allocateOutputColumnInternal(normalizedTypeName);
+ return initialOutputCol + relativeCol;
}
- int relativeCol = allocateOutputColumnInternal(columnType);
- return initialOutputCol + relativeCol;
- }
private int allocateOutputColumnInternal(String columnType) {
for (int i = 0; i < outputColCount; i++) {
@@ -325,11 +341,11 @@ public class VectorizationContext {
childExpressions, mode, exprDesc.getTypeInfo());
}
} else if (exprDesc instanceof ExprNodeNullDesc) {
- ve = getConstantVectorExpression(null, exprDesc.getTypeInfo(), mode);
+ ve = getConstantVectorExpression(null, exprDesc.getTypeInfo(), mode);
} else if (exprDesc instanceof ExprNodeConstantDesc) {
ve = getConstantVectorExpression(((ExprNodeConstantDesc) exprDesc).getValue(), exprDesc.getTypeInfo(),
mode);
- }
+ }
if (ve == null) {
throw new HiveException("Could not vectorize expression: "+exprDesc.getName());
}
@@ -413,8 +429,8 @@ public class VectorizationContext {
}
}
} else {
- for (ExprNodeDesc child : children) {
- ExprNodeDesc castExpression = getImplicitCastExpression(genericUDF, child, commonType);
+ for (ExprNodeDesc child : children) {
+ ExprNodeDesc castExpression = getImplicitCastExpression(genericUDF, child, commonType);
if (castExpression != null) {
atleastOneCastNeeded = true;
childrenWithCasts.add(castExpression);
@@ -515,7 +531,7 @@ public class VectorizationContext {
}
return null;
}
-
+
private int getPrecisionForType(PrimitiveTypeInfo typeInfo) {
if (isFloatFamily(typeInfo.getTypeName())) {
return HiveDecimal.MAX_PRECISION;
@@ -548,6 +564,12 @@ public class VectorizationContext {
case STRING:
udfClass = new UDFToString();
break;
+ case CHAR:
+ genericUdf = new GenericUDFToChar();
+ break;
+ case VARCHAR:
+ genericUdf = new GenericUDFToVarchar();
+ break;
case BOOLEAN:
udfClass = new UDFToBoolean();
break;
@@ -572,8 +594,8 @@ public class VectorizationContext {
((GenericUDFBridge) genericUdf).setUdfClassName(udfClass.getClass().getName());
}
if (genericUdf instanceof SettableUDF) {
- ((SettableUDF)genericUdf).setTypeInfo(castType);
- }
+ ((SettableUDF) genericUdf).setTypeInfo(castType);
+ }
return genericUdf;
}
@@ -592,15 +614,15 @@ public class VectorizationContext {
Class<? extends UDF> udfClass = bridge.getUdfClass();
if (udfClass.equals(UDFHex.class)
|| udfClass.equals(UDFConv.class)
- || isCastToIntFamily(udfClass) && arg0Type(expr).equals("string")
- || isCastToFloatFamily(udfClass) && arg0Type(expr).equals("string")
+ || isCastToIntFamily(udfClass) && isStringFamily(arg0Type(expr))
+ || isCastToFloatFamily(udfClass) && isStringFamily(arg0Type(expr))
|| udfClass.equals(UDFToString.class) &&
(arg0Type(expr).equals("timestamp")
|| arg0Type(expr).equals("double")
|| arg0Type(expr).equals("float"))) {
return true;
}
- } else if ((gudf instanceof GenericUDFTimestamp && arg0Type(expr).equals("string"))
+ } else if ((gudf instanceof GenericUDFTimestamp && isStringFamily(arg0Type(expr)))
/* GenericUDFCase and GenericUDFWhen are implemented with the UDF Adaptor because
* of their complexity and generality. In the future, variations of these
@@ -615,6 +637,16 @@ public class VectorizationContext {
|| gudf instanceof GenericUDFCase
|| gudf instanceof GenericUDFWhen) {
return true;
+ } else if (gudf instanceof GenericUDFToChar &&
+ (arg0Type(expr).equals("timestamp")
+ || arg0Type(expr).equals("double")
+ || arg0Type(expr).equals("float"))) {
+ return true;
+ } else if (gudf instanceof GenericUDFToVarchar &&
+ (arg0Type(expr).equals("timestamp")
+ || arg0Type(expr).equals("double")
+ || arg0Type(expr).equals("float"))) {
+ return true;
}
return false;
}
@@ -662,86 +694,80 @@ public class VectorizationContext {
* @throws HiveException
*/
ExprNodeDesc evaluateCastOnConstants(ExprNodeDesc exprDesc) throws HiveException {
- if (!(exprDesc instanceof ExprNodeGenericFuncDesc)) {
- return exprDesc;
- }
-
- if (exprDesc.getChildren() == null || (exprDesc.getChildren().size() != 1) ) {
- return exprDesc;
- }
-
- ExprNodeConstantDesc foldedChild = null;
- if (!( exprDesc.getChildren().get(0) instanceof ExprNodeConstantDesc)) {
-
- // try recursive folding
- ExprNodeDesc expr = evaluateCastOnConstants(exprDesc.getChildren().get(0));
- if (expr instanceof ExprNodeConstantDesc) {
- foldedChild = (ExprNodeConstantDesc) expr;
- }
- } else {
- foldedChild = (ExprNodeConstantDesc) exprDesc.getChildren().get(0);
- }
-
- if (foldedChild == null) {
- return exprDesc;
- }
-
- ObjectInspector childoi = foldedChild.getWritableObjectInspector();
- GenericUDF gudf = ((ExprNodeGenericFuncDesc) exprDesc).getGenericUDF();
-
- // Only evaluate +ve/-ve or cast on constant or recursive casting.
- if (gudf instanceof GenericUDFOPNegative || gudf instanceof GenericUDFOPPositive ||
- castExpressionUdfs.contains(gudf.getClass())
- || ((gudf instanceof GenericUDFBridge)
- && castExpressionUdfs.contains(((GenericUDFBridge) gudf).getUdfClass()))) {
- ExprNodeEvaluator<?> evaluator = ExprNodeEvaluatorFactory.get(exprDesc);
- ObjectInspector output = evaluator.initialize(childoi);
- Object constant = evaluator.evaluate(null);
- Object java = ObjectInspectorUtils.copyToStandardJavaObject(constant, output);
- return new ExprNodeConstantDesc(exprDesc.getTypeInfo(), java);
- }
-
- return exprDesc;
+ if (!(exprDesc instanceof ExprNodeGenericFuncDesc)) {
+ return exprDesc;
+ }
+
+ if (exprDesc.getChildren() == null || (exprDesc.getChildren().size() != 1) ) {
+ return exprDesc;
+ }
+
+ ExprNodeConstantDesc foldedChild = null;
+ if (!( exprDesc.getChildren().get(0) instanceof ExprNodeConstantDesc)) {
+
+ // try recursive folding
+ ExprNodeDesc expr = evaluateCastOnConstants(exprDesc.getChildren().get(0));
+ if (expr instanceof ExprNodeConstantDesc) {
+ foldedChild = (ExprNodeConstantDesc) expr;
+ }
+ } else {
+ foldedChild = (ExprNodeConstantDesc) exprDesc.getChildren().get(0);
+ }
+
+ if (foldedChild == null) {
+ return exprDesc;
+ }
+
+ ObjectInspector childoi = foldedChild.getWritableObjectInspector();
+ GenericUDF gudf = ((ExprNodeGenericFuncDesc) exprDesc).getGenericUDF();
+
+ // Only evaluate +ve/-ve or cast on constant or recursive casting.
+ if (gudf instanceof GenericUDFOPNegative || gudf instanceof GenericUDFOPPositive ||
+ castExpressionUdfs.contains(gudf.getClass())
+ || ((gudf instanceof GenericUDFBridge)
+ && castExpressionUdfs.contains(((GenericUDFBridge) gudf).getUdfClass()))) {
+ ExprNodeEvaluator<?> evaluator = ExprNodeEvaluatorFactory.get(exprDesc);
+ ObjectInspector output = evaluator.initialize(childoi);
+ Object constant = evaluator.evaluate(null);
+ Object java = ObjectInspectorUtils.copyToStandardJavaObject(constant, output);
+ return new ExprNodeConstantDesc(exprDesc.getTypeInfo(), java);
+ }
+
+ return exprDesc;
}
-
+
/* For cast on constant operator in all members of the input list and return new list
* containing results.
*/
private List<ExprNodeDesc> evaluateCastOnConstants(List<ExprNodeDesc> childExpr)
- throws HiveException {
- List<ExprNodeDesc> evaluatedChildren = new ArrayList<ExprNodeDesc>();
- if (childExpr != null) {
+ throws HiveException {
+ List<ExprNodeDesc> evaluatedChildren = new ArrayList<ExprNodeDesc>();
+ if (childExpr != null) {
for (ExprNodeDesc expr : childExpr) {
- expr = this.evaluateCastOnConstants(expr);
- evaluatedChildren.add(expr);
+ expr = this.evaluateCastOnConstants(expr);
+ evaluatedChildren.add(expr);
}
- }
- return evaluatedChildren;
+ }
+ return evaluatedChildren;
}
-
+
private VectorExpression getConstantVectorExpression(Object constantValue, TypeInfo typeInfo,
Mode mode) throws HiveException {
- String type = typeInfo.getTypeName();
- String colVectorType = getNormalizedTypeName(type);
+ String typeName = typeInfo.getTypeName();
+ VectorExpressionDescriptor.ArgumentType vectorArgType = VectorExpressionDescriptor.ArgumentType.fromHiveTypeName(typeName);
+ if (vectorArgType == VectorExpressionDescriptor.ArgumentType.NONE) {
+ throw new HiveException("No vector argument type for type name " + typeName);
+ }
int outCol = -1;
if (mode == Mode.PROJECTION) {
- outCol = ocm.allocateOutputColumn(colVectorType);
+ outCol = ocm.allocateOutputColumn(typeName);
}
if (constantValue == null) {
- return new ConstantVectorExpression(outCol, type, true);
- } else if (decimalTypePattern.matcher(type).matches()) {
- VectorExpression ve = new ConstantVectorExpression(outCol, (Decimal128) constantValue);
- ve.setOutputType(typeInfo.getTypeName());
- return ve;
- } else if (type.equalsIgnoreCase("long") || type.equalsIgnoreCase("int") ||
- type.equalsIgnoreCase("short") || type.equalsIgnoreCase("byte")) {
- return new ConstantVectorExpression(outCol,
- ((Number) constantValue).longValue());
- } else if (type.equalsIgnoreCase("double") || type.equalsIgnoreCase("float")) {
- return new ConstantVectorExpression(outCol, ((Number) constantValue).doubleValue());
- } else if (type.equalsIgnoreCase("string")) {
- return new ConstantVectorExpression(outCol, ((String) constantValue).getBytes());
- } else if (type.equalsIgnoreCase("boolean")) {
+ return new ConstantVectorExpression(outCol, typeName, true);
+ }
+
+ // Boolean is special case.
+ if (typeName.equalsIgnoreCase("boolean")) {
if (mode == Mode.FILTER) {
if (((Boolean) constantValue).booleanValue()) {
return new FilterConstantBooleanVectorExpression(1);
@@ -756,7 +782,26 @@ public class VectorizationContext {
}
}
}
- throw new HiveException("Unsupported constant type: "+type.toString());
+
+ switch (vectorArgType) {
+ case INT_FAMILY:
+ return new ConstantVectorExpression(outCol, ((Number) constantValue).longValue());
+ case FLOAT_FAMILY:
+ return new ConstantVectorExpression(outCol, ((Number) constantValue).doubleValue());
+ case DECIMAL:
+ VectorExpression ve = new ConstantVectorExpression(outCol, (Decimal128) constantValue);
+ // Set type name with decimal precision, scale, etc.
+ ve.setOutputType(typeName);
+ return ve;
+ case STRING:
+ return new ConstantVectorExpression(outCol, ((String) constantValue).getBytes());
+ case CHAR:
+ return new ConstantVectorExpression(outCol, ((HiveChar) constantValue));
+ case VARCHAR:
+ return new ConstantVectorExpression(outCol, ((HiveVarchar) constantValue));
+ default:
+ throw new HiveException("Unsupported constant type: " + typeName);
+ }
}
/**
@@ -799,7 +844,15 @@ public class VectorizationContext {
builder.setMode(mode);
for (int i = 0; i < numChildren; i++) {
ExprNodeDesc child = childExpr.get(i);
- builder.setArgumentType(i, child.getTypeString());
+ String childTypeString = child.getTypeString();
+ if (childTypeString == null) {
+ throw new HiveException("Null child type name string");
+ }
+ String undecoratedTypeName = getUndecoratedName(childTypeString);
+ if (undecoratedTypeName == null) {
+ throw new HiveException("No match for type string " + childTypeString + " from undecorated type name method");
+ }
+ builder.setArgumentType(i, undecoratedTypeName);
if ((child instanceof ExprNodeGenericFuncDesc) || (child instanceof ExprNodeColumnDesc)) {
builder.setInputExpressionType(i, InputExpressionType.COLUMN);
} else if (child instanceof ExprNodeConstantDesc) {
@@ -829,7 +882,11 @@ public class VectorizationContext {
try {
for (int i = 0; i < numChildren; i++) {
ExprNodeDesc child = childExpr.get(i);
- inputTypes[i] = VectorExpression.Type.getValue(child.getTypeInfo().getTypeName());
+ String undecoratedName = getUndecoratedName(child.getTypeInfo().getTypeName());
+ inputTypes[i] = VectorExpression.Type.getValue(undecoratedName);
+ if (inputTypes[i] == VectorExpression.Type.OTHER){
+ throw new HiveException("No vector type for " + vectorClass.getSimpleName() + " argument #" + i + " type name " + undecoratedName);
+ }
if (child instanceof ExprNodeGenericFuncDesc) {
VectorExpression vChild = getVectorExpression(child, childrenMode);
children.add(vChild);
@@ -870,36 +927,71 @@ public class VectorizationContext {
return Mode.PROJECTION;
}
+ private String getNewInstanceArgumentString(Object [] args) {
+ if (args == null) {
+ return "arguments: NULL";
+ }
+ ArrayList<String> argClasses = new ArrayList<String>();
+ for (Object obj : args) {
+ argClasses.add(obj.getClass().getSimpleName());
+ }
+ return "arguments: " + Arrays.toString(args) + ", argument classes: " + argClasses.toString();
+ }
+
private VectorExpression instantiateExpression(Class<?> vclass, TypeInfo returnType, Object...args)
throws HiveException {
VectorExpression ve = null;
Constructor<?> ctor = getConstructor(vclass);
int numParams = ctor.getParameterTypes().length;
int argsLength = (args == null) ? 0 : args.length;
- try {
- if (numParams == 0) {
+ if (numParams == 0) {
+ try {
ve = (VectorExpression) ctor.newInstance();
- } else if (numParams == argsLength) {
+ } catch (Exception ex) {
+ throw new HiveException("Could not instantiate " + vclass.getSimpleName() + " with 0 arguments, exception: " +
+ StringUtils.stringifyException(ex));
+ }
+ } else if (numParams == argsLength) {
+ try {
ve = (VectorExpression) ctor.newInstance(args);
- } else if (numParams == argsLength + 1) {
- // Additional argument is needed, which is the outputcolumn.
+ } catch (Exception ex) {
+ throw new HiveException("Could not instantiate " + vclass.getSimpleName() + " with " + getNewInstanceArgumentString(args) + ", exception: " +
+ StringUtils.stringifyException(ex));
+ }
+ } else if (numParams == argsLength + 1) {
+ // Additional argument is needed, which is the outputcolumn.
+ Object [] newArgs = null;
+ try {
String outType;
// Special handling for decimal because decimal types need scale and precision parameter.
// This special handling should be avoided by using returnType uniformly for all cases.
if (returnType != null) {
- outType = getNormalizedTypeName(returnType.getTypeName()).toLowerCase();
+ outType = getNormalizedName(returnType.getTypeName()).toLowerCase();
+ if (outType == null) {
+ throw new HiveException("No vector type for type name " + returnType);
+ }
} else {
outType = ((VectorExpression) vclass.newInstance()).getOutputType();
}
int outputCol = ocm.allocateOutputColumn(outType);
- Object [] newArgs = Arrays.copyOf(args, numParams);
+ newArgs = Arrays.copyOf(args, numParams);
newArgs[numParams-1] = outputCol;
+
ve = (VectorExpression) ctor.newInstance(newArgs);
ve.setOutputType(outType);
+ } catch (Exception ex) {
+ throw new HiveException("Could not instantiate " + vclass.getSimpleName() + " with arguments " + getNewInstanceArgumentString(newArgs) + ", exception: " +
+ StringUtils.stringifyException(ex));
+ }
+ }
+ // Add maxLength parameter to UDFs that have CHAR or VARCHAR output.
+ if (ve instanceof TruncStringOutput) {
+ TruncStringOutput truncStringOutput = (TruncStringOutput) ve;
+ if (returnType instanceof BaseCharTypeInfo) {
+ BaseCharTypeInfo baseCharTypeInfo = (BaseCharTypeInfo) returnType;
+ truncStringOutput.setMaxLength(baseCharTypeInfo.getLength());
}
- } catch (Exception ex) {
- throw new HiveException("Could not instantiate " + vclass.getSimpleName(), ex);
}
return ve;
}
@@ -907,9 +999,9 @@ public class VectorizationContext {
private VectorExpression getGenericUdfVectorExpression(GenericUDF udf,
List<ExprNodeDesc> childExpr, Mode mode, TypeInfo returnType) throws HiveException {
- List<ExprNodeDesc> castedChildren = evaluateCastOnConstants(childExpr);
- childExpr = castedChildren;
-
+ List<ExprNodeDesc> castedChildren = evaluateCastOnConstants(childExpr);
+ childExpr = castedChildren;
+
//First handle special cases
if (udf instanceof GenericUDFBetween) {
return getBetweenFilterExpression(childExpr, mode, returnType);
@@ -933,8 +1025,12 @@ public class VectorizationContext {
}
} else if (udf instanceof GenericUDFToDecimal) {
return getCastToDecimal(childExpr, returnType);
- }
-
+ } else if (udf instanceof GenericUDFToChar) {
+ return getCastToChar(childExpr, returnType);
+ } else if (udf instanceof GenericUDFToVarchar) {
+ return getCastToVarChar(childExpr, returnType);
+ }
+
// Now do a general lookup
Class<?> udfClass = udf.getClass();
if (udf instanceof GenericUDFBridge) {
@@ -962,7 +1058,7 @@ public class VectorizationContext {
inputColumns[i++] = ve.getOutputColumn();
}
- int outColumn = ocm.allocateOutputColumn(getNormalizedTypeName(returnType.getTypeName()));
+ int outColumn = ocm.allocateOutputColumn(returnType.getTypeName());
VectorCoalesce vectorCoalesce = new VectorCoalesce(inputColumns, outColumn);
vectorCoalesce.setOutputType(returnType.getTypeName());
vectorCoalesce.setChildExpressions(vectorChildren);
@@ -989,7 +1085,7 @@ public class VectorizationContext {
inputColumns[i++] = ve.getOutputColumn();
}
- int outColumn = ocm.allocateOutputColumn(getNormalizedTypeName(returnType.getTypeName()));
+ int outColumn = ocm.allocateOutputColumn(returnType.getTypeName());
VectorElt vectorElt = new VectorElt(inputColumns, outColumn);
vectorElt.setOutputType(returnType.getTypeName());
vectorElt.setChildExpressions(vectorChildren);
@@ -1003,7 +1099,7 @@ public class VectorizationContext {
}
}
}
-
+
/**
* Create a filter or boolean-valued expression for column IN ( <list-of-constants> )
*/
@@ -1014,8 +1110,8 @@ public class VectorizationContext {
String colType = colExpr.getTypeString();
// prepare arguments for createVectorExpression
- List<ExprNodeDesc> childrenForInList = evaluateCastOnConstants(childExpr.subList(1, childExpr.size()));
-
+ List<ExprNodeDesc> childrenForInList = evaluateCastOnConstants(childExpr.subList(1, childExpr.size()));
+
/* This method assumes that the IN list has no NULL entries. That is enforced elsewhere,
* in the Vectorizer class. If NULL is passed in as a list entry, behavior is not defined.
* If in the future, NULL values are allowed in the IN list, be sure to handle 3-valued
@@ -1110,105 +1206,105 @@ public class VectorizationContext {
return getCastToString(childExpr, returnType);
}
return null;
- }
-
+ }
+
private Decimal128 castConstantToDecimal(Object scalar, TypeInfo type) throws HiveException {
- PrimitiveTypeInfo ptinfo = (PrimitiveTypeInfo) type;
- String typename = type.getTypeName();
- Decimal128 d = new Decimal128();
- int scale = HiveDecimalUtils.getScaleForType(ptinfo);
- switch (ptinfo.getPrimitiveCategory()) {
- case FLOAT:
- float floatVal = ((Float) scalar).floatValue();
- d.update(floatVal, (short) scale);
- break;
- case DOUBLE:
- double doubleVal = ((Double) scalar).doubleValue();
- d.update(doubleVal, (short) scale);
- break;
- case BYTE:
- byte byteVal = ((Byte) scalar).byteValue();
- d.update(byteVal, (short) scale);
- break;
- case SHORT:
- short shortVal = ((Short) scalar).shortValue();
- d.update(shortVal, (short) scale);
- break;
- case INT:
- int intVal = ((Integer) scalar).intValue();
- d.update(intVal, (short) scale);
- break;
- case LONG:
- long longVal = ((Long) scalar).longValue();
- d.update(longVal, (short) scale);
- break;
- case DECIMAL:
- HiveDecimal decimalVal = (HiveDecimal) scalar;
- d.update(decimalVal.unscaledValue(), (short) scale);
- break;
- default:
- throw new HiveException("Unsupported type "+typename+" for cast to Decimal128");
- }
- return d;
+ PrimitiveTypeInfo ptinfo = (PrimitiveTypeInfo) type;
+ String typename = type.getTypeName();
+ Decimal128 d = new Decimal128();
+ int scale = HiveDecimalUtils.getScaleForType(ptinfo);
+ switch (ptinfo.getPrimitiveCategory()) {
+ case FLOAT:
+ float floatVal = ((Float) scalar).floatValue();
+ d.update(floatVal, (short) scale);
+ break;
+ case DOUBLE:
+ double doubleVal = ((Double) scalar).doubleValue();
+ d.update(doubleVal, (short) scale);
+ break;
+ case BYTE:
+ byte byteVal = ((Byte) scalar).byteValue();
+ d.update(byteVal, (short) scale);
+ break;
+ case SHORT:
+ short shortVal = ((Short) scalar).shortValue();
+ d.update(shortVal, (short) scale);
+ break;
+ case INT:
+ int intVal = ((Integer) scalar).intValue();
+ d.update(intVal, (short) scale);
+ break;
+ case LONG:
+ long longVal = ((Long) scalar).longValue();
+ d.update(longVal, (short) scale);
+ break;
+ case DECIMAL:
+ HiveDecimal decimalVal = (HiveDecimal) scalar;
+ d.update(decimalVal.unscaledValue(), (short) scale);
+ break;
+ default:
+ throw new HiveException("Unsupported type "+typename+" for cast to Decimal128");
+ }
+ return d;
}
private String castConstantToString(Object scalar, TypeInfo type) throws HiveException {
- PrimitiveTypeInfo ptinfo = (PrimitiveTypeInfo) type;
- String typename = type.getTypeName();
- switch (ptinfo.getPrimitiveCategory()) {
- case FLOAT:
- case DOUBLE:
- case BYTE:
- case SHORT:
- case INT:
- case LONG:
- return ((Number) scalar).toString();
- case DECIMAL:
- HiveDecimal decimalVal = (HiveDecimal) scalar;
- return decimalVal.toString();
- default:
- throw new HiveException("Unsupported type "+typename+" for cast to String");
- }
+ PrimitiveTypeInfo ptinfo = (PrimitiveTypeInfo) type;
+ String typename = type.getTypeName();
+ switch (ptinfo.getPrimitiveCategory()) {
+ case FLOAT:
+ case DOUBLE:
+ case BYTE:
+ case SHORT:
+ case INT:
+ case LONG:
+ return ((Number) scalar).toString();
+ case DECIMAL:
+ HiveDecimal decimalVal = (HiveDecimal) scalar;
+ return decimalVal.toString();
+ default:
+ throw new HiveException("Unsupported type "+typename+" for cast to String");
+ }
}
private Double castConstantToDouble(Object scalar, TypeInfo type) throws HiveException {
- PrimitiveTypeInfo ptinfo = (PrimitiveTypeInfo) type;
- String typename = type.getTypeName();
- switch (ptinfo.getPrimitiveCategory()) {
- case FLOAT:
- case DOUBLE:
- case BYTE:
- case SHORT:
- case INT:
- case LONG:
- return ((Number) scalar).doubleValue();
- case DECIMAL:
- HiveDecimal decimalVal = (HiveDecimal) scalar;
- return decimalVal.doubleValue();
- default:
- throw new HiveException("Unsupported type "+typename+" for cast to Double");
- }
- }
+ PrimitiveTypeInfo ptinfo = (PrimitiveTypeInfo) type;
+ String typename = type.getTypeName();
+ switch (ptinfo.getPrimitiveCategory()) {
+ case FLOAT:
+ case DOUBLE:
+ case BYTE:
+ case SHORT:
+ case INT:
+ case LONG:
+ return ((Number) scalar).doubleValue();
+ case DECIMAL:
+ HiveDecimal decimalVal = (HiveDecimal) scalar;
+ return decimalVal.doubleValue();
+ default:
+ throw new HiveException("Unsupported type "+typename+" for cast to Double");
+ }
+ }
private Long castConstantToLong(Object scalar, TypeInfo type) throws HiveException {
- PrimitiveTypeInfo ptinfo = (PrimitiveTypeInfo) type;
- String typename = type.getTypeName();
- switch (ptinfo.getPrimitiveCategory()) {
- case FLOAT:
- case DOUBLE:
- case BYTE:
- case SHORT:
- case INT:
- case LONG:
- return ((Number) scalar).longValue();
- case DECIMAL:
- HiveDecimal decimalVal = (HiveDecimal) scalar;
- return decimalVal.longValue();
- default:
- throw new HiveException("Unsupported type "+typename+" for cast to Long");
- }
- }
-
+ PrimitiveTypeInfo ptinfo = (PrimitiveTypeInfo) type;
+ String typename = type.getTypeName();
+ switch (ptinfo.getPrimitiveCategory()) {
+ case FLOAT:
+ case DOUBLE:
+ case BYTE:
+ case SHORT:
+ case INT:
+ case LONG:
+ return ((Number) scalar).longValue();
+ case DECIMAL:
+ HiveDecimal decimalVal = (HiveDecimal) scalar;
+ return decimalVal.longValue();
+ default:
+ throw new HiveException("Unsupported type "+typename+" for cast to Long");
+ }
+ }
+
private VectorExpression getCastToDecimal(List<ExprNodeDesc> childExpr, TypeInfo returnType)
throws HiveException {
ExprNodeDesc child = childExpr.get(0);
@@ -1217,9 +1313,9 @@ public class VectorizationContext {
// Return a constant vector expression
Object constantValue = ((ExprNodeConstantDesc) child).getValue();
Decimal128 decimalValue = castConstantToDecimal(constantValue, child.getTypeInfo());
- return getConstantVectorExpression(decimalValue, returnType, Mode.PROJECTION);
+ return getConstantVectorExpression(decimalValue, returnType, Mode.PROJECTION);
} else if (child instanceof ExprNodeNullDesc) {
- return getConstantVectorExpression(null, returnType, Mode.PROJECTION);
+ return getConstantVectorExpression(null, returnType, Mode.PROJECTION);
}
if (isIntFamily(inputType)) {
return createVectorExpression(CastLongToDecimal.class, childExpr, Mode.PROJECTION, returnType);
@@ -1234,8 +1330,8 @@ public class VectorizationContext {
return createVectorExpression(CastTimestampToDecimal.class, childExpr, Mode.PROJECTION, returnType);
}
throw new HiveException("Unhandled cast input type: " + inputType);
- }
-
+ }
+
private VectorExpression getCastToString(List<ExprNodeDesc> childExpr, TypeInfo returnType)
throws HiveException {
ExprNodeDesc child = childExpr.get(0);
@@ -1244,9 +1340,9 @@ public class VectorizationContext {
// Return a constant vector expression
Object constantValue = ((ExprNodeConstantDesc) child).getValue();
String strValue = castConstantToString(constantValue, child.getTypeInfo());
- return getConstantVectorExpression(strValue, returnType, Mode.PROJECTION);
+ return getConstantVectorExpression(strValue, returnType, Mode.PROJECTION);
} else if (child instanceof ExprNodeNullDesc) {
- return getConstantVectorExpression(null, returnType, Mode.PROJECTION);
+ return getConstantVectorExpression(null, returnType, Mode.PROJECTION);
}
if (inputType.equals("boolean")) {
// Boolean must come before the integer family. It's a special case.
@@ -1265,6 +1361,64 @@ public class VectorizationContext {
throw new HiveException("Unhandled cast input type: " + inputType);
}
+ private VectorExpression getCastToChar(List<ExprNodeDesc> childExpr, TypeInfo returnType)
+ throws HiveException {
+ ExprNodeDesc child = childExpr.get(0);
+ String inputType = childExpr.get(0).getTypeString();
+ if (child instanceof ExprNodeConstantDesc) {
+ // Don't do constant folding here. Wait until the optimizer is changed to do it.
+ // Family of related JIRAs: HIVE-7421, HIVE-7422, and HIVE-7424.
+ return null;
+ }
+ if (inputType.equals("boolean")) {
+ // Boolean must come before the integer family. It's a special case.
+ return createVectorExpression(CastBooleanToCharViaLongToChar.class, childExpr, Mode.PROJECTION, null);
+ } else if (isIntFamily(inputType)) {
+ return createVectorExpression(CastLongToChar.class, childExpr, Mode.PROJECTION, null);
+ } else if (isDecimalFamily(inputType)) {
+ return createVectorExpression(CastDecimalToChar.class, childExpr, Mode.PROJECTION, returnType);
+ } else if (isDateFamily(inputType)) {
+ return createVectorExpression(CastDateToChar.class, childExpr, Mode.PROJECTION, returnType);
+ } else if (isStringFamily(inputType)) {
+ return createVectorExpression(CastStringGroupToChar.class, childExpr, Mode.PROJECTION, returnType);
+ }
+
+ /*
+ * Timestamp, float, and double types are handled by the legacy code path. See isLegacyPathUDF.
+ */
+
+ throw new HiveException("Unhandled cast input type: " + inputType);
+ }
+
+ private VectorExpression getCastToVarChar(List<ExprNodeDesc> childExpr, TypeInfo returnType)
+ throws HiveException {
+ ExprNodeDesc child = childExpr.get(0);
+ String inputType = childExpr.get(0).getTypeString();
+ if (child instanceof ExprNodeConstantDesc) {
+ // Don't do constant folding here. Wait until the optimizer is changed to do it.
+ // Family of related JIRAs: HIVE-7421, HIVE-7422, and HIVE-7424.
+ return null;
+ }
+ if (inputType.equals("boolean")) {
+ // Boolean must come before the integer family. It's a special case.
+ return createVectorExpression(CastBooleanToVarCharViaLongToVarChar.class, childExpr, Mode.PROJECTION, null);
+ } else if (isIntFamily(inputType)) {
+ return createVectorExpression(CastLongToVarChar.class, childExpr, Mode.PROJECTION, null);
+ } else if (isDecimalFamily(inputType)) {
+ return createVectorExpression(CastDecimalToVarChar.class, childExpr, Mode.PROJECTION, returnType);
+ } else if (isDateFamily(inputType)) {
+ return createVectorExpression(CastDateToVarChar.class, childExpr, Mode.PROJECTION, returnType);
+ } else if (isStringFamily(inputType)) {
+ return createVectorExpression(CastStringGroupToVarChar.class, childExpr, Mode.PROJECTION, returnType);
+ }
+
+ /*
+ * Timestamp, float, and double types are handled by the legacy code path. See isLegacyPathUDF.
+ */
+
+ throw new HiveException("Unhandled cast input type: " + inputType);
+ }
+
private VectorExpression getCastToDoubleExpression(Class<?> udf, List<ExprNodeDesc> childExpr,
TypeInfo returnType) throws HiveException {
ExprNodeDesc child = childExpr.get(0);
@@ -1273,9 +1427,9 @@ public class VectorizationContext {
// Return a constant vector expression
Object constantValue = ((ExprNodeConstantDesc) child).getValue();
Double doubleValue = castConstantToDouble(constantValue, child.getTypeInfo());
- return getConstantVectorExpression(doubleValue, returnType, Mode.PROJECTION);
+ return getConstantVectorExpression(doubleValue, returnType, Mode.PROJECTION);
} else if (child instanceof ExprNodeNullDesc) {
- return getConstantVectorExpression(null, returnType, Mode.PROJECTION);
+ return getConstantVectorExpression(null, returnType, Mode.PROJECTION);
}
if (isIntFamily(inputType)) {
return createVectorExpression(CastLongToDouble.class, childExpr, Mode.PROJECTION, returnType);
@@ -1286,7 +1440,7 @@ public class VectorizationContext {
// float types require no conversion, so use a no-op
return getIdentityExpression(childExpr);
- }
+ }
// The string type is deliberately omitted -- it's handled elsewhere. See isLegacyPathUDF.
return null;
@@ -1301,15 +1455,15 @@ public class VectorizationContext {
// Family of related JIRAs: HIVE-7421, HIVE-7422, and HIVE-7424.
return null;
} else if (child instanceof ExprNodeNullDesc) {
- return getConstantVectorExpression(null, TypeInfoFactory.booleanTypeInfo, Mode.PROJECTION);
+ return getConstantVectorExpression(null, TypeInfoFactory.booleanTypeInfo, Mode.PROJECTION);
}
// Long and double are handled using descriptors, string needs to be specially handled.
- if (inputType.equals("string")) {
+ if (isStringFamily(inputType)) {
// string casts to false if it is 0 characters long, otherwise true
VectorExpression lenExpr = createVectorExpression(StringLength.class, childExpr,
Mode.PROJECTION, null);
- int outputCol = ocm.allocateOutputColumn("integer");
+ int outputCol = ocm.allocateOutputColumn("Long");
VectorExpression lenToBoolExpr =
new CastLongToBooleanViaLongToLong(lenExpr.getOutputColumn(), outputCol);
lenToBoolExpr.setChildExpressions(new VectorExpression[] {lenExpr});
@@ -1329,9 +1483,9 @@ public class VectorizationContext {
// Return a constant vector expression
Object constantValue = ((ExprNodeConstantDesc) child).getValue();
Long longValue = castConstantToLong(constantValue, child.getTypeInfo());
- return getConstantVectorExpression(longValue, TypeInfoFactory.longTypeInfo, Mode.PROJECTION);
+ return getConstantVectorExpression(longValue, TypeInfoFactory.longTypeInfo, Mode.PROJECTION);
} else if (child instanceof ExprNodeNullDesc) {
- return getConstantVectorExpression(null, TypeInfoFactory.longTypeInfo, Mode.PROJECTION);
+ return getConstantVectorExpression(null, TypeInfoFactory.longTypeInfo, Mode.PROJECTION);
}
// Float family, timestamp are handled via descriptor based lookup, int family needs
// special handling.
@@ -1411,6 +1565,14 @@ public class VectorizationContext {
cl = FilterStringColumnBetween.class;
} else if (colType.equals("string") && notKeywordPresent) {
cl = FilterStringColumnNotBetween.class;
+ } else if (varcharTypePattern.matcher(colType).matches() && !notKeywordPresent) {
+ cl = FilterVarCharColumnBetween.class;
+ } else if (varcharTypePattern.matcher(colType).matches() && notKeywordPresent) {
+ cl = FilterVarCharColumnNotBetween.class;
+ } else if (charTypePattern.matcher(colType).matches() && !notKeywordPresent) {
+ cl = FilterCharColumnBetween.class;
+ } else if (charTypePattern.matcher(colType).matches() && notKeywordPresent) {
+ cl = FilterCharColumnNotBetween.class;
} else if (colType.equals("timestamp")) {
// Get timestamp boundary values as longs instead of the expected strings
@@ -1483,13 +1645,13 @@ public class VectorizationContext {
// Allocate output column and get column number;
int outputCol = -1;
- String resultType = expr.getTypeInfo().getTypeName();
- String resultColVectorType = getNormalizedTypeName(resultType);
+ String resultTypeName = expr.getTypeInfo().getTypeName();
- outputCol = ocm.allocateOutputColumn(resultColVectorType);
+ outputCol = ocm.allocateOutputColumn(resultTypeName);
// Make vectorized operator
- VectorExpression ve = new VectorUDFAdaptor(expr, outputCol, resultColVectorType, argDescs);
+ String normalizedName = getNormalizedName(resultTypeName);
+ VectorExpression ve = new VectorUDFAdaptor(expr, outputCol, normalizedName, argDescs);
// Set child expressions
VectorExpression[] childVEs = null;
@@ -1509,7 +1671,7 @@ public class VectorizationContext {
}
public static boolean isStringFamily(String resultType) {
- return resultType.equalsIgnoreCase("string");
+ return resultType.equalsIgnoreCase("string") || charVarcharTypePattern.matcher(resultType).matches();
}
public static boolean isDatetimeFamily(String resultType) {
@@ -1519,11 +1681,11 @@ public class VectorizationContext {
public static boolean isTimestampFamily(String resultType) {
return resultType.equalsIgnoreCase("timestamp");
}
-
+
public static boolean isDateFamily(String resultType) {
return resultType.equalsIgnoreCase("date");
}
-
+
// return true if this is any kind of float
public static boolean isFloatFamily(String resultType) {
return resultType.equalsIgnoreCase("double")
@@ -1617,7 +1779,7 @@ public class VectorizationContext {
"Non-constant argument not supported for vectorization.");
}
ExprNodeConstantDesc constExpr = (ExprNodeConstantDesc) expr;
- if (constExpr.getTypeString().equals("string")) {
+ if (isStringFamily(constExpr.getTypeString())) {
// create expression tree with type cast from string to timestamp
ExprNodeGenericFuncDesc expr2 = new ExprNodeGenericFuncDesc();
@@ -1667,63 +1829,99 @@ public class VectorizationContext {
}
}
- static String getNormalizedTypeName(String colType){
- String normalizedType = null;
- if (colType.equalsIgnoreCase("Double") || colType.equalsIgnoreCase("Float")) {
- normalizedType = "Double";
- } else if (colType.equalsIgnoreCase("String")) {
- normalizedType = "String";
- } else if (decimalTypePattern.matcher(colType).matches()) {
+ static String getNormalizedName(String hiveTypeName) {
+ VectorExpressionDescriptor.ArgumentType argType = VectorExpressionDescriptor.ArgumentType.fromHiveTypeName(hiveTypeName);
+ switch (argType) {
+ case INT_FAMILY:
+ return "Long";
+ case FLOAT_FAMILY:
+ return "Double";
+ case DECIMAL:
//Return the decimal type as is, it includes scale and precision.
- normalizedType = colType;
- } else {
- normalizedType = "Long";
+ return hiveTypeName;
+ case STRING:
+ return "String";
+ case CHAR:
+ //Return the CHAR type as is, it includes maximum length
+ return hiveTypeName;
+ case VARCHAR:
+ //Return the VARCHAR type as is, it includes maximum length.
+ return hiveTypeName;
+ case DATE:
+ return "Date";
+ case TIMESTAMP:
+ return "Timestamp";
+ default:
+ return "None";
}
- return normalizedType;
}
-
- static Object[][] aggregatesDefinition = {
- {"min", "Long", VectorUDAFMinLong.class},
- {"min", "Double", VectorUDAFMinDouble.class},
- {"min", "String", VectorUDAFMinString.class},
- {"min", "Decimal",VectorUDAFMinDecimal.class},
- {"max", "Long", VectorUDAFMaxLong.class},
- {"max", "Double", VectorUDAFMaxDouble.class},
- {"max", "String", VectorUDAFMaxString.class},
- {"max", "Decimal",VectorUDAFMaxDecimal.class},
- {"count", null, VectorUDAFCountStar.class},
- {"count", "Long", VectorUDAFCount.class},
- {"count", "Double", VectorUDAFCount.class},
- {"count", "String", VectorUDAFCount.class},
- {"count", "Decimal",VectorUDAFCount.class},
- {"sum", "Long", VectorUDAFSumLong.class},
- {"sum", "Double", VectorUDAFSumDouble.class},
- {"sum", "Decimal",VectorUDAFSumDecimal.class},
- {"avg", "Long", VectorUDAFAvgLong.class},
- {"avg", "Double", VectorUDAFAvgDouble.class},
- {"avg", "Decimal",VectorUDAFAvgDecimal.class},
- {"variance", "Long", VectorUDAFVarPopLong.class},
- {"var_pop", "Long", VectorUDAFVarPopLong.class},
- {"variance", "Double", VectorUDAFVarPopDouble.class},
- {"var_pop", "Double", VectorUDAFVarPopDouble.class},
- {"variance", "Decimal",VectorUDAFVarPopDecimal.class},
- {"var_pop", "Decimal",VectorUDAFVarPopDecimal.class},
- {"var_samp", "Long", VectorUDAFVarSampLong.class},
- {"var_samp" , "Double", VectorUDAFVarSampDouble.class},
- {"var_samp" , "Decimal",VectorUDAFVarSampDecimal.class},
- {"std", "Long", VectorUDAFStdPopLong.class},
- {"stddev", "Long", VectorUDAFStdPopLong.class},
- {"stddev_pop","Long", VectorUDAFStdPopLong.class},
- {"std", "Double", VectorUDAFStdPopDouble.class},
- {"stddev", "Double", VectorUDAFStdPopDouble.class},
- {"stddev_pop","Double", VectorUDAFStdPopDouble.class},
- {"std", "Decimal",VectorUDAFStdPopDecimal.class},
- {"stddev", "Decimal",VectorUDAFStdPopDecimal.class},
- {"stddev_pop","Decimal",VectorUDAFStdPopDecimal.class},
- {"stddev_samp","Long", VectorUDAFStdSampLong.class},
- {"stddev_samp","Double",VectorUDAFStdSampDouble.class},
- {"stddev_samp","Decimal",VectorUDAFStdSampDecimal.class},
- };
+
+ static String getUndecoratedName(String hiveTypeName) {
+ VectorExpressionDescriptor.ArgumentType argType = VectorExpressionDescriptor.ArgumentType.fromHiveTypeName(hiveTypeName);
+ switch (argType) {
+ case INT_FAMILY:
+ return "Long";
+ case FLOAT_FAMILY:
+ return "Double";
+ case DECIMAL:
+ return "Decimal";
+ case STRING:
+ return "String";
+ case CHAR:
+ return "Char";
+ case VARCHAR:
+ return "VarChar";
+ case DATE:
+ return "Date";
+ case TIMESTAMP:
+ return "Timestamp";
+ default:
+ return "None";
+ }
+ }
+
+ static ArrayList<AggregateDefinition> aggregatesDefinition = new ArrayList<AggregateDefinition>() {{
+ add(new AggregateDefinition("min", VectorExpressionDescriptor.ArgumentType.INT_FAMILY, VectorUDAFMinLong.class));
+ add(new AggregateDefinition("min", VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY, VectorUDAFMinDouble.class));
+ add(new AggregateDefinition("min", VectorExpressionDescriptor.ArgumentType.STRING_FAMILY, VectorUDAFMinString.class));
+ add(new AggregateDefinition("min", VectorExpressionDescriptor.ArgumentType.DECIMAL, VectorUDAFMinDecimal.class));
+ add(new AggregateDefinition("max", VectorExpressionDescriptor.ArgumentType.INT_FAMILY, VectorUDAFMaxLong.class));
+ add(new AggregateDefinition("max", VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY, VectorUDAFMaxDouble.class));
+ add(new AggregateDefinition("max", VectorExpressionDescriptor.ArgumentType.STRING_FAMILY, VectorUDAFMaxString.class));
+ add(new AggregateDefinition("max", VectorExpressionDescriptor.ArgumentType.DECIMAL, VectorUDAFMaxDecimal.class));
+ add(new AggregateDefinition("count", VectorExpressionDescriptor.ArgumentType.NONE, VectorUDAFCountStar.class));
+ add(new AggregateDefinition("count", VectorExpressionDescriptor.ArgumentType.INT_FAMILY, VectorUDAFCount.class));
+ add(new AggregateDefinition("count", VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY, VectorUDAFCount.class));
+ add(new AggregateDefinition("count", VectorExpressionDescriptor.ArgumentType.STRING_FAMILY, VectorUDAFCount.class));
+ add(new AggregateDefinition("count", VectorExpressionDescriptor.ArgumentType.DECIMAL, VectorUDAFCount.class));
+ add(new AggregateDefinition("sum", VectorExpressionDescriptor.ArgumentType.INT_FAMILY, VectorUDAFSumLong.class));
+ add(new AggregateDefinition("sum", VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY, VectorUDAFSumDouble.class));
+ add(new AggregateDefinition("sum", VectorExpressionDescriptor.ArgumentType.DECIMAL, VectorUDAFSumDecimal.class));
+ add(new AggregateDefinition("avg", VectorExpressionDescriptor.ArgumentType.INT_FAMILY, VectorUDAFAvgLong.class));
+ add(new AggregateDefinition("avg", VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY, VectorUDAFAvgDouble.class));
+ add(new AggregateDefinition("avg", VectorExpressionDescriptor.ArgumentType.DECIMAL, VectorUDAFAvgDecimal.class));
+ add(new AggregateDefinition("variance", VectorExpressionDescriptor.ArgumentType.INT_FAMILY, VectorUDAFVarPopLong.class));
+ add(new AggregateDefinition("var_pop", VectorExpressionDescriptor.ArgumentType.INT_FAMILY, VectorUDAFVarPopLong.class));
+ add(new AggregateDefinition("variance", VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY, VectorUDAFVarPopDouble.class));
+ add(new AggregateDefinition("var_pop", VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY, VectorUDAFVarPopDouble.class));
+ add(new AggregateDefinition("variance", VectorExpressionDescriptor.ArgumentType.DECIMAL, VectorUDAFVarPopDecimal.class));
+ add(new AggregateDefinition("var_pop", VectorExpressionDescriptor.ArgumentType.DECIMAL, VectorUDAFVarPopDecimal.class));
+ add(new AggregateDefinition("var_samp", VectorExpressionDescriptor.ArgumentType.INT_FAMILY, VectorUDAFVarSampLong.class));
+ add(new AggregateDefinition("var_samp" , VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY, VectorUDAFVarSampDouble.class));
+ add(new AggregateDefinition("var_samp" , VectorExpressionDescriptor.ArgumentType.DECIMAL, VectorUDAFVarSampDecimal.class));
+ add(new AggregateDefinition("std", VectorExpressionDescriptor.ArgumentType.INT_FAMILY, VectorUDAFStdPopLong.class));
+ add(new AggregateDefinition("stddev", VectorExpressionDescriptor.ArgumentType.INT_FAMILY, VectorUDAFStdPopLong.class));
+ add(new AggregateDefinition("stddev_pop", VectorExpressionDescriptor.ArgumentType.INT_FAMILY, VectorUDAFStdPopLong.class));
+ add(new AggregateDefinition("std", VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY, VectorUDAFStdPopDouble.class));
+ add(new AggregateDefinition("stddev", VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY, VectorUDAFStdPopDouble.class));
+ add(new AggregateDefinition("stddev_pop", VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY, VectorUDAFStdPopDouble.class));
+ add(new AggregateDefinition("std", VectorExpressionDescriptor.ArgumentType.DECIMAL, VectorUDAFStdPopDecimal.class));
+ add(new AggregateDefinition("stddev", VectorExpressionDescriptor.ArgumentType.DECIMAL, VectorUDAFStdPopDecimal.class));
+ add(new AggregateDefinition("stddev_pop", VectorExpressionDescriptor.ArgumentType.DECIMAL, VectorUDAFStdPopDecimal.class));
+ add(new AggregateDefinition("stddev_samp", VectorExpressionDescriptor.ArgumentType.INT_FAMILY, VectorUDAFStdSampLong.class));
+ add(new AggregateDefinition("stddev_samp", VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY, VectorUDAFStdSampDouble.class));
+ add(new AggregateDefinition("stddev_samp", VectorExpressionDescriptor.ArgumentType.DECIMAL, VectorUDAFStdSampDecimal.class));
+ }};
public VectorAggregateExpression getAggregatorExpression(AggregationDesc desc)
throws HiveException {
@@ -1737,22 +1935,22 @@ public class VectorizationContext {
}
String aggregateName = desc.getGenericUDAFName();
- String inputType = null;
+ VectorExpressionDescriptor.ArgumentType inputType = VectorExpressionDescriptor.ArgumentType.NONE;
if (paramDescList.size() > 0) {
ExprNodeDesc inputExpr = paramDescList.get(0);
- inputType = getNormalizedTypeName(inputExpr.getTypeString());
- if (decimalTypePattern.matcher(inputType).matches()) {
- inputType = "Decimal";
+ inputType = VectorExpressionDescriptor.ArgumentType.fromHiveTypeName(inputExpr.getTypeString());
+ if (inputType == VectorExpressionDescriptor.ArgumentType.NONE) {
+ throw new HiveException("No vector argument type for Hive type name " + inputExpr.getTypeString());
}
}
- for (Object[] aggDef : aggregatesDefinition) {
- if (aggregateName.equalsIgnoreCase((String) aggDef[0]) &&
- ((aggDef[1] == null && inputType == null) ||
- (aggDef[1] != null && aggDef[1].equals(inputType)))) {
- Class<? extends VectorAggregateExpression> aggClass =
- (Class<? extends VectorAggregateExpression>) (aggDef[2]);
+ for (AggregateDefinition aggDef : aggregatesDefinition) {
+ if (aggregateName.equalsIgnoreCase(aggDef.getName()) &&
+ ((aggDef.getType() == VectorExpressionDescriptor.ArgumentType.NONE &&
+ inputType == VectorExpressionDescriptor.ArgumentType.NONE) ||
+ (aggDef.getType().isSameTypeOrFamily(inputType)))) {
+ Class<? extends VectorAggregateExpression> aggClass = aggDef.getAggClass();
try
{
Constructor<? extends VectorAggregateExpression> ctor =
@@ -1769,7 +1967,7 @@ public class VectorizationContext {
}
throw new HiveException("Vector aggregate not implemented: \"" + aggregateName +
- "\" for type: \"" + inputType + "");
+ "\" for type: \"" + inputType.name() + "");
}
public Map<Integer, String> getOutputColumnTypeMap() {
Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java Mon Sep 8 04:38:17 2014
@@ -23,11 +23,16 @@ import java.sql.Timestamp;
import java.util.LinkedList;
import java.util.List;
+import org.apache.hadoop.hive.common.type.HiveChar;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -126,6 +131,8 @@ public class VectorizedBatchUtil {
break;
case BINARY:
case STRING:
+ case CHAR:
+ case VARCHAR:
cvList.add(new BytesColumnVector(VectorizedRowBatch.DEFAULT_SIZE));
break;
case DECIMAL:
@@ -375,6 +382,51 @@ public class VectorizedBatchUtil {
}
}
break;
+ case CHAR: {
+ BytesColumnVector bcv = (BytesColumnVector) batch.cols[off + i];
+ if (writableCol != null) {
+ bcv.isNull[rowIndex] = false;
+ HiveChar colHiveChar = ((HiveCharWritable) writableCol).getHiveChar();
+ byte[] bytes = colHiveChar.getStrippedValue().getBytes();
+
+ // We assume the CHAR maximum length was enforced when the object was created.
+ int length = bytes.length;
+
+ int start = buffer.getLength();
+ try {
+ // In vector mode, we store CHAR as unpadded.
+ buffer.write(bytes, 0, length);
+ } catch (IOException ioe) {
+ throw new IllegalStateException("bad write", ioe);
+ }
+ bcv.setRef(rowIndex, buffer.getData(), start, length);
+ } else {
+ setNullColIsNullValue(bcv, rowIndex);
+ }
+ }
+ break;
+ case VARCHAR: {
+ BytesColumnVector bcv = (BytesColumnVector) batch.cols[off + i];
+ if (writableCol != null) {
+ bcv.isNull[rowIndex] = false;
+ HiveVarchar colHiveVarchar = ((HiveVarcharWritable) writableCol).getHiveVarchar();
+ byte[] bytes = colHiveVarchar.getValue().getBytes();
+
+ // We assume the VARCHAR maximum length was enforced when the object was created.
+ int length = bytes.length;
+
+ int start = buffer.getLength();
+ try {
+ buffer.write(bytes, 0, length);
+ } catch (IOException ioe) {
+ throw new IllegalStateException("bad write", ioe);
+ }
+ bcv.setRef(rowIndex, buffer.getData(), start, length);
+ } else {
+ setNullColIsNullValue(bcv, rowIndex);
+ }
+ }
+ break;
case DECIMAL:
DecimalColumnVector dcv = (DecimalColumnVector) batch.cols[off + i];
if (writableCol != null) {
Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedColumnarSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedColumnarSerDe.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedColumnarSerDe.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedColumnarSerDe.java Mon Sep 8 04:38:17 2014
@@ -158,7 +158,10 @@ public class VectorizedColumnarSerDe ext
serializeVectorStream.write(bytes, 0, bytes.length);
}
break;
- case STRING: {
+ case STRING:
+ case CHAR:
+ case VARCHAR: {
+ // Is it correct to escape CHAR and VARCHAR?
BytesColumnVector bcv = (BytesColumnVector) batch.cols[k];
LazyUtils.writeEscaped(serializeVectorStream, bcv.vector[rowIndex],
bcv.start[rowIndex],
Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java Mon Sep 8 04:38:17 2014
@@ -278,7 +278,7 @@ public class VectorizedRowBatchCtx {
case PRIMITIVE: {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) foi;
// Vectorization currently only supports the following data types:
- // BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, BINARY, STRING, TIMESTAMP,
+ // BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, BINARY, STRING, CHAR, VARCHAR, TIMESTAMP,
// DATE and DECIMAL
switch (poi.getPrimitiveCategory()) {
case BOOLEAN:
@@ -296,6 +296,8 @@ public class VectorizedRowBatchCtx {
break;
case BINARY:
case STRING:
+ case CHAR:
+ case VARCHAR:
result.cols[j] = new BytesColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
break;
case DECIMAL:
@@ -544,7 +546,9 @@ public class VectorizedRowBatchCtx {
}
break;
- case STRING: {
+ case STRING:
+ case CHAR:
+ case VARCHAR: {
BytesColumnVector bcv = (BytesColumnVector) batch.cols[colIndex];
String sVal = (String) value;
if (sVal == null) {
@@ -566,13 +570,17 @@ public class VectorizedRowBatchCtx {
}
}
- private void addScratchColumnsToBatch(VectorizedRowBatch vrb) {
+ private void addScratchColumnsToBatch(VectorizedRowBatch vrb) throws HiveException {
if (columnTypeMap != null && !columnTypeMap.isEmpty()) {
int origNumCols = vrb.numCols;
int newNumCols = vrb.cols.length+columnTypeMap.keySet().size();
vrb.cols = Arrays.copyOf(vrb.cols, newNumCols);
for (int i = origNumCols; i < newNumCols; i++) {
- vrb.cols[i] = allocateColumnVector(columnTypeMap.get(i),
+ String typeName = columnTypeMap.get(i);
+ if (typeName == null) {
+ throw new HiveException("No type found for column type entry " + i);
+ }
+ vrb.cols[i] = allocateColumnVector(typeName,
VectorizedRowBatch.DEFAULT_SIZE);
}
vrb.numCols = vrb.cols.length;
@@ -599,13 +607,17 @@ public class VectorizedRowBatchCtx {
private ColumnVector allocateColumnVector(String type, int defaultSize) {
if (type.equalsIgnoreCase("double")) {
return new DoubleColumnVector(defaultSize);
- } else if (type.equalsIgnoreCase("string")) {
+ } else if (VectorizationContext.isStringFamily(type)) {
return new BytesColumnVector(defaultSize);
} else if (VectorizationContext.decimalTypePattern.matcher(type).matches()){
int [] precisionScale = getScalePrecisionFromDecimalType(type);
return new DecimalColumnVector(defaultSize, precisionScale[0], precisionScale[1]);
- } else {
+ } else if (type.equalsIgnoreCase("long") ||
+ type.equalsIgnoreCase("date") ||
+ type.equalsIgnoreCase("timestamp")) {
return new LongColumnVector(defaultSize);
+ } else {
+ throw new Error("Cannot allocate vector column for " + type);
}
}
Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/AbstractFilterStringColLikeStringScalar.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/AbstractFilterStringColLikeStringScalar.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/AbstractFilterStringColLikeStringScalar.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/AbstractFilterStringColLikeStringScalar.java Mon Sep 8 04:38:17 2014
@@ -420,8 +420,8 @@ public abstract class AbstractFilterStri
VectorExpressionDescriptor.Mode.FILTER)
.setNumArguments(2)
.setArgumentTypes(
- VectorExpressionDescriptor.ArgumentType.getType("string"),
- VectorExpressionDescriptor.ArgumentType.getType("string"))
+ VectorExpressionDescriptor.ArgumentType.STRING_FAMILY,
+ VectorExpressionDescriptor.ArgumentType.STRING_FAMILY)
.setInputExpressionTypes(
VectorExpressionDescriptor.InputExpressionType.COLUMN,
VectorExpressionDescriptor.InputExpressionType.SCALAR).build();