You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ga...@apache.org on 2015/07/01 18:18:40 UTC
[1/4] hive git commit: HIVE-11055 HPL/SQL - Implementing Procedural
SQL in Hive (PL/HQL Contribution) (Dmitry Tolpeko via gates)
Repository: hive
Updated Branches:
refs/heads/master edb7b8893 -> 052643cb8
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/functions/Function.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/functions/Function.java b/hplsql/src/main/java/org/apache/hive/hplsql/functions/Function.java
new file mode 100644
index 0000000..9895b5e
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/functions/Function.java
@@ -0,0 +1,709 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql.functions;
+
+import java.sql.ResultSet;
+import java.sql.Date;
+import java.sql.SQLException;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.TimeZone;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.commons.lang.StringUtils;
+import org.antlr.v4.runtime.ParserRuleContext;
+import org.apache.hive.hplsql.*;
+
+interface FuncCommand {
+ void run(HplsqlParser.Expr_func_paramsContext ctx);
+}
+
+interface FuncSpecCommand {
+ void run(HplsqlParser.Expr_spec_funcContext ctx);
+}
+
+/**
+ * HPL/SQL functions
+ */
+public class Function {
+ Exec exec;
+ HashMap<String, FuncCommand> map = new HashMap<String, FuncCommand>();
+ HashMap<String, FuncSpecCommand> specMap = new HashMap<String, FuncSpecCommand>();
+ HashMap<String, FuncSpecCommand> specSqlMap = new HashMap<String, FuncSpecCommand>();
+ HashMap<String, HplsqlParser.Create_function_stmtContext> userMap = new HashMap<String, HplsqlParser.Create_function_stmtContext>();
+ HashMap<String, HplsqlParser.Create_procedure_stmtContext> procMap = new HashMap<String, HplsqlParser.Create_procedure_stmtContext>();
+ boolean trace = false;
+
+ public Function(Exec e) {
+ exec = e;
+ trace = exec.getTrace();
+ }
+
+ /**
+ * Register functions
+ */
+ public void register(Function f) {
+ }
+
+ /**
+ * Execute a function
+ */
+ public void exec(String name, HplsqlParser.Expr_func_paramsContext ctx) {
+ if (execUser(ctx, name)) {
+ return;
+ }
+ if (name.indexOf(".") != -1) { // Name can be qualified and spaces are allowed between parts
+ String[] parts = name.split("\\.");
+ StringBuilder str = new StringBuilder();
+ for (int i = 0; i < parts.length; i++) {
+ if (i > 0) {
+ str.append(".");
+ }
+ str.append(parts[i].trim());
+ }
+ name = str.toString();
+ }
+ if (trace && ctx.parent.parent instanceof HplsqlParser.Expr_stmtContext) {
+ trace(ctx, "FUNC " + name);
+ }
+ FuncCommand func = map.get(name);
+ if (func != null) {
+ func.run(ctx);
+ }
+ else {
+ evalNull();
+ }
+ }
+
+ /**
+ * User-defined function in a SQL query
+ */
+ public void execSql(String name, HplsqlParser.Expr_func_paramsContext ctx) {
+ if (execUserSql(ctx, name)) {
+ return;
+ }
+ StringBuilder sql = new StringBuilder();
+ sql.append(name);
+ sql.append("(");
+ int cnt = ctx.expr().size();
+ for (int i = 0; i < cnt; i++) {
+ sql.append(evalPop(ctx.expr(i)));
+ if (i + 1 < cnt) {
+ sql.append(", ");
+ }
+ }
+ sql.append(")");
+ exec.stackPush(sql);
+ }
+
+ /**
+ * Aggregate or window function in a SQL query
+ */
+ public void execAggWindowSql(HplsqlParser.Expr_agg_window_funcContext ctx) {
+ exec.stackPush(exec.getFormattedText(ctx));
+ }
+
+ /**
+ * Execute a user-defined function
+ */
+ public boolean execUser(HplsqlParser.Expr_func_paramsContext ctx, String name) {
+ HplsqlParser.Create_function_stmtContext userCtx = userMap.get(name.toUpperCase());
+ if (userCtx == null) {
+ return false;
+ }
+ if (trace) {
+ trace(ctx, "EXEC FUNCTION " + name);
+ }
+ exec.enterScope(Scope.Type.ROUTINE);
+ setCallParameters(ctx, userCtx.create_routine_params(), null);
+ visit(userCtx.single_block_stmt());
+ exec.leaveScope();
+ return true;
+ }
+
+ /**
+ * Execute a HPL/SQL user-defined function in a query
+ */
+ public boolean execUserSql(HplsqlParser.Expr_func_paramsContext ctx, String name) {
+ HplsqlParser.Create_function_stmtContext userCtx = userMap.get(name.toUpperCase());
+ if (userCtx == null) {
+ return false;
+ }
+ StringBuilder sql = new StringBuilder();
+ sql.append("hplsql('");
+ sql.append(name);
+ sql.append("(");
+ int cnt = ctx.expr().size();
+ for (int i = 0; i < cnt; i++) {
+ sql.append(":" + (i + 1));
+ if (i + 1 < cnt) {
+ sql.append(", ");
+ }
+ }
+ sql.append(")'");
+ if (cnt > 0) {
+ sql.append(", ");
+ }
+ for (int i = 0; i < cnt; i++) {
+ sql.append(evalPop(ctx.expr(i)));
+ if (i + 1 < cnt) {
+ sql.append(", ");
+ }
+ }
+ sql.append(")");
+ exec.stackPush(sql);
+ exec.registerUdf();
+ return true;
+ }
+
+ /**
+ * Execute a stored procedure as the entry point of the script (defined by -main option)
+ */
+ public boolean execProc(String name) {
+ if (trace) {
+ trace("EXEC PROCEDURE " + name);
+ }
+ HplsqlParser.Create_procedure_stmtContext procCtx = procMap.get(name.toUpperCase());
+ if (procCtx == null) {
+ trace("Procedure not found");
+ return false;
+ }
+ exec.enterScope(Scope.Type.ROUTINE);
+ setCallParameters(procCtx.create_routine_params());
+ visit(procCtx.single_block_stmt());
+ exec.leaveScope();
+ return true;
+ }
+
+ /**
+ * Execute a stored procedure using CALL or EXEC statement passing parameters
+ */
+ public boolean execProc(HplsqlParser.Expr_func_paramsContext ctx, String name) {
+ if (trace) {
+ trace(ctx, "EXEC PROCEDURE " + name);
+ }
+ HplsqlParser.Create_procedure_stmtContext procCtx = procMap.get(name.toUpperCase());
+ if (procCtx == null) {
+ trace(ctx, "Procedure not found");
+ return false;
+ }
+ HashMap<String, Var> out = new HashMap<String, Var>();
+ exec.enterScope(Scope.Type.ROUTINE);
+ setCallParameters(ctx, procCtx.create_routine_params(), out);
+ visit(procCtx.single_block_stmt());
+ exec.leaveScope();
+ for (Map.Entry<String, Var> i : out.entrySet()) { // Set OUT parameters
+ exec.setVariable(i.getKey(), i.getValue());
+ }
+ return true;
+ }
+
+ /**
+ * Set parameters for user-defined function call
+ */
+ void setCallParameters(HplsqlParser.Expr_func_paramsContext actual,
+ HplsqlParser.Create_routine_paramsContext formal,
+ HashMap<String, Var> out) {
+ int actualCnt = actual.expr().size();
+ int formalCnt = formal.create_routine_param_item().size();
+ for (int i = 0; i < actualCnt; i++) {
+ if (i >= formalCnt) {
+ break;
+ }
+ HplsqlParser.ExprContext a = actual.expr(i);
+ HplsqlParser.Create_routine_param_itemContext p = formal.create_routine_param_item(i);
+ String name = p.ident().getText();
+ String type = p.dtype().getText();
+ String len = null;
+ String scale = null;
+ if (p.dtype_len() != null) {
+ len = p.dtype_len().L_INT(0).getText();
+ if (p.dtype_len().L_INT(1) != null) {
+ scale = p.dtype_len().L_INT(1).getText();
+ }
+ }
+ Var value = evalPop(a);
+ Var var = setCallParameter(name, type, len, scale, value);
+ if (trace) {
+ trace(actual, "SET PARAM " + name + " = " + var.toString());
+ }
+ if (out != null && a.expr_atom() != null && a.expr_atom().ident() != null &&
+ (p.T_OUT() != null || p.T_INOUT() != null)) {
+ String actualName = a.expr_atom().ident().getText();
+ if (actualName != null) {
+ out.put(actualName, var);
+ }
+ }
+ }
+ }
+
+ /**
+ * Set parameters for entry-point call (Main procedure defined by -main option)
+ */
+ void setCallParameters(HplsqlParser.Create_routine_paramsContext ctx) {
+ int cnt = ctx.create_routine_param_item().size();
+ for (int i = 0; i < cnt; i++) {
+ HplsqlParser.Create_routine_param_itemContext p = ctx.create_routine_param_item(i);
+ String name = p.ident().getText();
+ String type = p.dtype().getText();
+ String len = null;
+ String scale = null;
+ if (p.dtype_len() != null) {
+ len = p.dtype_len().L_INT(0).getText();
+ if (p.dtype_len().L_INT(1) != null) {
+ scale = p.dtype_len().L_INT(1).getText();
+ }
+ }
+ Var value = exec.findVariable(name);
+ Var var = setCallParameter(name, type, len, scale, value);
+ if (trace) {
+ trace(ctx, "SET PARAM " + name + " = " + var.toString());
+ }
+ }
+ }
+
+ /**
+ * Create a function or procedure parameter and set its value
+ */
+ Var setCallParameter(String name, String type, String len, String scale, Var value) {
+ Var var = new Var(name, type, len, scale, null);
+ var.cast(value);
+ exec.addVariable(var);
+ return var;
+ }
+
+ /**
+ * Add a user-defined function
+ */
+ public void addUserFunction(HplsqlParser.Create_function_stmtContext ctx) {
+ String name = ctx.ident().getText();
+ if (trace) {
+ trace(ctx, "CREATE FUNCTION " + name);
+ }
+ userMap.put(name.toUpperCase(), ctx);
+ }
+
+ /**
+ * Add a user-defined procedure
+ */
+ public void addUserProcedure(HplsqlParser.Create_procedure_stmtContext ctx) {
+ String name = ctx.ident(0).getText();
+ if (trace) {
+ trace(ctx, "CREATE PROCEDURE " + name);
+ }
+ procMap.put(name.toUpperCase(), ctx);
+ }
+
+ /**
+ * Execute a special function
+ */
+ public void specExec(HplsqlParser.Expr_spec_funcContext ctx) {
+ String name = ctx.start.getText().toUpperCase();
+ if (trace && ctx.parent.parent instanceof HplsqlParser.Expr_stmtContext) {
+ trace(ctx, "FUNC " + name);
+ }
+ FuncSpecCommand func = specMap.get(name);
+ if (func != null) {
+ func.run(ctx);
+ }
+ else if(ctx.T_MAX_PART_STRING() != null) {
+ execMaxPartString(ctx);
+ } else if(ctx.T_MIN_PART_STRING() != null) {
+ execMinPartString(ctx);
+ } else if(ctx.T_MAX_PART_INT() != null) {
+ execMaxPartInt(ctx);
+ } else if(ctx.T_MIN_PART_INT() != null) {
+ execMinPartInt(ctx);
+ } else if(ctx.T_MAX_PART_DATE() != null) {
+ execMaxPartDate(ctx);
+ } else if(ctx.T_MIN_PART_DATE() != null) {
+ execMinPartDate(ctx);
+ } else if(ctx.T_PART_LOC() != null) {
+ execPartLoc(ctx);
+ } else if(ctx.T_SYSDATE() != null) {
+ execCurrentTimestamp(ctx, 0);
+ } else {
+ evalNull();
+ }
+ }
+
+ /**
+ * Execute a special function in executable SQL statement
+ */
+ public void specExecSql(HplsqlParser.Expr_spec_funcContext ctx) {
+ String name = ctx.start.getText().toUpperCase();
+ if (trace && ctx.parent.parent instanceof HplsqlParser.Expr_stmtContext) {
+ trace(ctx, "FUNC " + name);
+ }
+ FuncSpecCommand func = specSqlMap.get(name);
+ if (func != null) {
+ func.run(ctx);
+ }
+ else {
+ exec.stackPush(exec.getFormattedText(ctx));
+ }
+ }
+
+ /**
+ * Get the current date
+ */
+ public void execCurrentDate(HplsqlParser.Expr_spec_funcContext ctx) {
+ if(trace) {
+ trace(ctx, "CURRENT_DATE");
+ }
+ SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
+ String s = f.format(Calendar.getInstance().getTime());
+ exec.stackPush(new Var(Var.Type.DATE, Utils.toDate(s)));
+ }
+
+ /**
+ * Get the current date and time
+ */
+ public void execCurrentTimestamp(HplsqlParser.Expr_spec_funcContext ctx, int defPrecision) {
+ trace(ctx, "CURRENT_TIMESTAMP");
+ int precision = evalPop(ctx.expr(0), defPrecision).intValue();
+ String format = "yyyy-MM-dd HH:mm:ss";
+ if(precision > 0 && precision <= 3) {
+ format += "." + StringUtils.repeat("S", precision);
+ }
+ SimpleDateFormat f = new SimpleDateFormat(format);
+ String s = f.format(Calendar.getInstance(TimeZone.getDefault()).getTime());
+ exec.stackPush(new Var(Utils.toTimestamp(s), precision));
+ }
+
+ /**
+ * Execute MAX_PART_STRING function
+ */
+ public void execMaxPartString(HplsqlParser.Expr_spec_funcContext ctx) {
+ if(trace) {
+ trace(ctx, "MAX_PART_STRING");
+ }
+ execMinMaxPart(ctx, Var.Type.STRING, true /*max*/);
+ }
+
+ /**
+ * Execute MIN_PART_STRING function
+ */
+ public void execMinPartString(HplsqlParser.Expr_spec_funcContext ctx) {
+ if(trace) {
+ trace(ctx, "MIN_PART_STRING");
+ }
+ execMinMaxPart(ctx, Var.Type.STRING, false /*max*/);
+ }
+
+ /**
+ * Execute MAX_PART_INT function
+ */
+ public void execMaxPartInt(HplsqlParser.Expr_spec_funcContext ctx) {
+ if(trace) {
+ trace(ctx, "MAX_PART_INT");
+ }
+ execMinMaxPart(ctx, Var.Type.BIGINT, true /*max*/);
+ }
+
+ /**
+ * Execute MIN_PART_INT function
+ */
+ public void execMinPartInt(HplsqlParser.Expr_spec_funcContext ctx) {
+ if(trace) {
+ trace(ctx, "MIN_PART_INT");
+ }
+ execMinMaxPart(ctx, Var.Type.BIGINT, false /*max*/);
+ }
+
+ /**
+ * Execute MAX_PART_DATE function
+ */
+ public void execMaxPartDate(HplsqlParser.Expr_spec_funcContext ctx) {
+ if(trace) {
+ trace(ctx, "MAX_PART_DATE");
+ }
+ execMinMaxPart(ctx, Var.Type.DATE, true /*max*/);
+ }
+
+ /**
+ * Execute MIN_PART_DATE function
+ */
+ public void execMinPartDate(HplsqlParser.Expr_spec_funcContext ctx) {
+ if(trace) {
+ trace(ctx, "MIN_PART_DATE");
+ }
+ execMinMaxPart(ctx, Var.Type.DATE, false /*max*/);
+ }
+
+ /**
+ * Execute MIN or MAX partition function
+ */
+ public void execMinMaxPart(HplsqlParser.Expr_spec_funcContext ctx, Var.Type type, boolean max) {
+ String tabname = evalPop(ctx.expr(0)).toString();
+ String sql = "SHOW PARTITIONS " + tabname;
+ String colname = null;
+ int colnum = -1;
+ int exprnum = ctx.expr().size();
+ // Column name
+ if (ctx.expr(1) != null) {
+ colname = evalPop(ctx.expr(1)).toString();
+ } else {
+ colnum = 0;
+ }
+ // Partition filter
+ if (exprnum >= 4) {
+ sql += " PARTITION (";
+ int i = 2;
+ while (i + 1 < exprnum) {
+ String fcol = evalPop(ctx.expr(i)).toString();
+ String fval = evalPop(ctx.expr(i+1)).toSqlString();
+ if (i > 2) {
+ sql += ", ";
+ }
+ sql += fcol + "=" + fval;
+ i += 2;
+ }
+ sql += ")";
+ }
+ if (trace) {
+ trace(ctx, "Query: " + sql);
+ }
+ if (exec.getOffline()) {
+ evalNull();
+ return;
+ }
+ Query query = exec.executeQuery(ctx, sql, exec.conf.defaultConnection);
+ if (query.error()) {
+ evalNullClose(query, exec.conf.defaultConnection);
+ return;
+ }
+ ResultSet rs = query.getResultSet();
+ try {
+ String resultString = null;
+ Long resultInt = null;
+ Date resultDate = null;
+ while (rs.next()) {
+ String[] parts = rs.getString(1).split("/");
+ // Find partition column by name
+ if (colnum == -1) {
+ for (int i = 0; i < parts.length; i++) {
+ String[] name = parts[i].split("=");
+ if (name[0].equalsIgnoreCase(colname)) {
+ colnum = i;
+ break;
+ }
+ }
+ // No partition column with the specified name exists
+ if (colnum == -1) {
+ evalNullClose(query, exec.conf.defaultConnection);
+ return;
+ }
+ }
+ String[] pair = parts[colnum].split("=");
+ if (type == Var.Type.STRING) {
+ resultString = Utils.minMaxString(resultString, pair[1], max);
+ }
+ else if (type == Var.Type.BIGINT) {
+ resultInt = Utils.minMaxInt(resultInt, pair[1], max);
+ }
+ else if (type == Var.Type.DATE) {
+ resultDate = Utils.minMaxDate(resultDate, pair[1], max);
+ }
+ }
+ if (resultString != null) {
+ evalString(resultString);
+ }
+ else if (resultInt != null) {
+ evalInt(resultInt);
+ }
+ else if (resultDate != null) {
+ evalDate(resultDate);
+ }
+ else {
+ evalNull();
+ }
+ } catch (SQLException e) {}
+ exec.closeQuery(query, exec.conf.defaultConnection);
+ }
+
+ /**
+ * Execute PART_LOC function
+ */
+ public void execPartLoc(HplsqlParser.Expr_spec_funcContext ctx) {
+ String tabname = evalPop(ctx.expr(0)).toString();
+ String sql = "DESCRIBE EXTENDED " + tabname;
+ int exprnum = ctx.expr().size();
+ boolean hostname = false;
+ // Partition filter
+ if (exprnum > 1) {
+ sql += " PARTITION (";
+ int i = 1;
+ while (i + 1 < exprnum) {
+ String col = evalPop(ctx.expr(i)).toString();
+ String val = evalPop(ctx.expr(i+1)).toSqlString();
+ if (i > 2) {
+ sql += ", ";
+ }
+ sql += col + "=" + val;
+ i += 2;
+ }
+ sql += ")";
+ }
+ // With host name
+ if (exprnum % 2 == 0 && evalPop(ctx.expr(exprnum - 1)).intValue() == 1) {
+ hostname = true;
+ }
+ if (trace) {
+ trace(ctx, "Query: " + sql);
+ }
+ if (exec.getOffline()) {
+ evalNull();
+ return;
+ }
+ Query query = exec.executeQuery(ctx, sql, exec.conf.defaultConnection);
+ if (query.error()) {
+ evalNullClose(query, exec.conf.defaultConnection);
+ return;
+ }
+ String result = null;
+ ResultSet rs = query.getResultSet();
+ try {
+ while (rs.next()) {
+ if (rs.getString(1).startsWith("Detailed Partition Information")) {
+ Matcher m = Pattern.compile(".*, location:(.*?),.*").matcher(rs.getString(2));
+ if (m.find()) {
+ result = m.group(1);
+ }
+ }
+ }
+ } catch (SQLException e) {}
+ if (result != null) {
+ // Remove the host name
+ if (!hostname) {
+ Matcher m = Pattern.compile(".*://.*?(/.*)").matcher(result);
+ if (m.find()) {
+ result = m.group(1);
+ }
+ }
+ evalString(result);
+ }
+ else {
+ evalNull();
+ }
+ exec.closeQuery(query, exec.conf.defaultConnection);
+ }
+
+ /**
+ * Evaluate the expression and push the value to the stack
+ */
+ void eval(ParserRuleContext ctx) {
+ exec.visit(ctx);
+ }
+
+ /**
+ * Evaluate the expression to the specified variable
+ */
+ void evalVar(Var var) {
+ exec.stackPush(var);
+ }
+
+ /**
+ * Evaluate the expression to NULL
+ */
+ void evalNull() {
+ exec.stackPush(Var.Null);
+ }
+
+ /**
+ * Evaluate the expression to specified String value
+ */
+ void evalString(String string) {
+ exec.stackPush(new Var(string));
+ }
+
+ void evalString(StringBuilder string) {
+ evalString(string.toString());
+ }
+
+ /**
+ * Evaluate the expression to specified Int value
+ */
+ void evalInt(Long i) {
+ exec.stackPush(new Var(i));
+ }
+
+ /**
+ * Evaluate the expression to specified Date value
+ */
+ void evalDate(Date date) {
+ exec.stackPush(new Var(Var.Type.DATE, date));
+ }
+
+ /**
+ * Evaluate the expression to NULL and close the query
+ */
+ void evalNullClose(Query query, String conn) {
+ exec.stackPush(Var.Null);
+ exec.closeQuery(query, conn);
+ if(trace) {
+ query.printStackTrace();
+ }
+ }
+
+ /**
+ * Evaluate the expression and pop value from the stack
+ */
+ Var evalPop(ParserRuleContext ctx) {
+ exec.visit(ctx);
+ return exec.stackPop();
+ }
+
+ Var evalPop(ParserRuleContext ctx, int value) {
+ if (ctx != null) {
+ return evalPop(ctx);
+ }
+ return new Var(new Long(value));
+ }
+
+ /**
+ * Execute rules
+ */
+ Integer visit(ParserRuleContext ctx) {
+ return exec.visit(ctx);
+ }
+
+ /**
+ * Execute children rules
+ */
+ Integer visitChildren(ParserRuleContext ctx) {
+ return exec.visitChildren(ctx);
+ }
+
+ /**
+ * Trace information
+ */
+ public void trace(ParserRuleContext ctx, String message) {
+ if (trace) {
+ exec.trace(ctx, message);
+ }
+ }
+
+ public void trace(String message) {
+ trace(null, message);
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionDatetime.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionDatetime.java b/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionDatetime.java
new file mode 100644
index 0000000..926eeeb
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionDatetime.java
@@ -0,0 +1,151 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql.functions;
+
+import java.sql.Timestamp;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.TimeZone;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hive.hplsql.*;
+
+public class FunctionDatetime extends Function {
+ public FunctionDatetime(Exec e) {
+ super(e);
+ }
+
+ /**
+ * Register functions
+ */
+ @Override
+ public void register(Function f) {
+ f.map.put("DATE", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { date(ctx); }});
+ f.map.put("TIMESTAMP_ISO", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { timestampIso(ctx); }});
+ f.map.put("TO_TIMESTAMP", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { toTimestamp(ctx); }});
+
+ f.specMap.put("CURRENT_DATE", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { currentDate(ctx); }});
+ f.specMap.put("CURRENT_TIMESTAMP", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { currentTimestamp(ctx); }});
+
+ f.specSqlMap.put("CURRENT_DATE", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { currentDateSql(ctx); }});
+ f.specSqlMap.put("CURRENT_TIMESTAMP", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { currentTimestampSql(ctx); }});
+ }
+
+ /**
+ * CURRENT_DATE
+ */
+ public void currentDate(HplsqlParser.Expr_spec_funcContext ctx) {
+ evalVar(currentDate());
+ }
+
+ public static Var currentDate() {
+ SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
+ String s = f.format(Calendar.getInstance().getTime());
+ return new Var(Var.Type.DATE, Utils.toDate(s));
+ }
+
+ /**
+ * CURRENT_DATE in executable SQL statement
+ */
+ public void currentDateSql(HplsqlParser.Expr_spec_funcContext ctx) {
+ if (exec.getConnectionType() == Conn.Type.HIVE) {
+ evalString("TO_DATE(FROM_UNIXTIME(UNIX_TIMESTAMP()))");
+ }
+ else {
+ evalString(exec.getFormattedText(ctx));
+ }
+ }
+
+ /**
+ * CURRENT_TIMESTAMP
+ */
+ public void currentTimestamp(HplsqlParser.Expr_spec_funcContext ctx) {
+ int precision = evalPop(ctx.expr(0), 3).intValue();
+ evalVar(currentTimestamp(precision));
+ }
+
+ public static Var currentTimestamp(int precision) {
+ String format = "yyyy-MM-dd HH:mm:ss";
+ if (precision > 0 && precision <= 3) {
+ format += "." + StringUtils.repeat("S", precision);
+ }
+ SimpleDateFormat f = new SimpleDateFormat(format);
+ String s = f.format(Calendar.getInstance(TimeZone.getDefault()).getTime());
+ return new Var(Utils.toTimestamp(s), precision);
+ }
+
+ /**
+ * CURRENT_TIMESTAMP in executable SQL statement
+ */
+ public void currentTimestampSql(HplsqlParser.Expr_spec_funcContext ctx) {
+ if (exec.getConnectionType() == Conn.Type.HIVE) {
+ evalString("FROM_UNIXTIME(UNIX_TIMESTAMP())");
+ }
+ else {
+ evalString(exec.getFormattedText(ctx));
+ }
+ }
+
+ /**
+ * DATE function
+ */
+ void date(HplsqlParser.Expr_func_paramsContext ctx) {
+ if (ctx.expr().size() != 1) {
+ evalNull();
+ return;
+ }
+ Var var = new Var(Var.Type.DATE);
+ var.cast(evalPop(ctx.expr(0)));
+ evalVar(var);
+ }
+
+ /**
+ * TIMESTAMP_ISO function
+ */
+ void timestampIso(HplsqlParser.Expr_func_paramsContext ctx) {
+ if (ctx.expr().size() != 1) {
+ evalNull();
+ return;
+ }
+ Var var = new Var(Var.Type.TIMESTAMP);
+ var.cast(evalPop(ctx.expr(0)));
+ evalVar(var);
+ }
+
+ /**
+ * TO_TIMESTAMP function
+ */
+ void toTimestamp(HplsqlParser.Expr_func_paramsContext ctx) {
+ if (ctx.expr().size() != 2) {
+ evalNull();
+ return;
+ }
+ String value = evalPop(ctx.expr(0)).toString();
+ String sqlFormat = evalPop(ctx.expr(1)).toString();
+ String format = Utils.convertSqlDatetimeFormat(sqlFormat);
+ try {
+ long timeInMs = new SimpleDateFormat(format).parse(value).getTime();
+ evalVar(new Var(Var.Type.TIMESTAMP, new Timestamp(timeInMs)));
+ }
+ catch (Exception e) {
+ exec.signal(e);
+ evalNull();
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionMisc.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionMisc.java b/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionMisc.java
new file mode 100644
index 0000000..8d2456c
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionMisc.java
@@ -0,0 +1,188 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql.functions;
+
+import org.apache.hive.hplsql.*;
+
+public class FunctionMisc extends Function {
+ public FunctionMisc(Exec e) {
+ super(e);
+ }
+
+ /**
+ * Register functions
+ */
+ @Override
+ public void register(Function f) {
+ f.map.put("COALESCE", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { nvl(ctx); }});
+ f.map.put("DECODE", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { decode(ctx); }});
+ f.map.put("NVL", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { nvl(ctx); }});
+ f.map.put("NVL2", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { nvl2(ctx); }});
+
+ f.specMap.put("ACTIVITY_COUNT", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { activityCount(ctx); }});
+ f.specMap.put("CAST", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { cast(ctx); }});
+ f.specMap.put("CURRENT", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { current(ctx); }});
+ f.specMap.put("CURRENT_USER", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { currentUser(ctx); }});
+ f.specMap.put("USER", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { currentUser(ctx); }});
+
+ f.specSqlMap.put("CURRENT", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { currentSql(ctx); }});
+ }
+
+ /**
+ * ACTIVITY_COUNT function (built-in variable)
+ */
+ void activityCount(HplsqlParser.Expr_spec_funcContext ctx) {
+ evalInt(new Long(exec.getRowCount()));
+ }
+
+ /**
+ * CAST function
+ */
+ void cast(HplsqlParser.Expr_spec_funcContext ctx) {
+ if (ctx.expr().size() != 1) {
+ evalNull();
+ return;
+ }
+ String type = ctx.dtype().getText();
+ String len = null;
+ String scale = null;
+ if (ctx.dtype_len() != null) {
+ len = ctx.dtype_len().L_INT(0).getText();
+ if (ctx.dtype_len().L_INT(1) != null) {
+ scale = ctx.dtype_len().L_INT(1).getText();
+ }
+ }
+ Var var = new Var(null, type, len, scale, null);
+ var.cast(evalPop(ctx.expr(0)));
+ evalVar(var);
+ }
+
+ /**
+ * CURRENT <VALUE> function
+ */
+ void current(HplsqlParser.Expr_spec_funcContext ctx) {
+ if (ctx.T_DATE() != null) {
+ evalVar(FunctionDatetime.currentDate());
+ }
+ else if (ctx.T_TIMESTAMP() != null) {
+ int precision = evalPop(ctx.expr(0), 3).intValue();
+ evalVar(FunctionDatetime.currentTimestamp(precision));
+ }
+ else if (ctx.T_USER() != null) {
+ evalVar(FunctionMisc.currentUser());
+ }
+ else {
+ evalNull();
+ }
+ }
+
+ /**
+ * CURRENT <VALUE> function in executable SQL statement
+ */
+ void currentSql(HplsqlParser.Expr_spec_funcContext ctx) {
+ if (ctx.T_DATE() != null) {
+ if (exec.getConnectionType() == Conn.Type.HIVE) {
+ evalString("TO_DATE(FROM_UNIXTIME(UNIX_TIMESTAMP()))");
+ }
+ else {
+ evalString("CURRENT_DATE");
+ }
+ }
+ else if (ctx.T_TIMESTAMP() != null) {
+ if (exec.getConnectionType() == Conn.Type.HIVE) {
+ evalString("FROM_UNIXTIME(UNIX_TIMESTAMP())");
+ }
+ else {
+ evalString("CURRENT_TIMESTAMP");
+ }
+ }
+ else {
+ evalString(exec.getFormattedText(ctx));
+ }
+ }
+
+ /**
+ * CURRENT_USER function
+ */
+ void currentUser(HplsqlParser.Expr_spec_funcContext ctx) {
+ evalVar(currentUser());
+ }
+
+ public static Var currentUser() {
+ return new Var(System.getProperty("user.name"));
+ }
+
+ /**
+ * DECODE function
+ */
+ void decode(HplsqlParser.Expr_func_paramsContext ctx) {
+ int cnt = ctx.expr().size();
+ if (cnt < 3) {
+ evalNull();
+ return;
+ }
+ Var value = evalPop(ctx.expr(0));
+ int i = 1;
+ while (i + 1 < cnt) {
+ Var when = evalPop(ctx.expr(i));
+ if ((value.isNull() && when.isNull()) || value.equals(when)) {
+ eval(ctx.expr(i + 1));
+ return;
+ }
+ i += 2;
+ }
+ if (i < cnt) { // ELSE expression
+ eval(ctx.expr(i));
+ }
+ else {
+ evalNull();
+ }
+ }
+
+ /**
+ * NVL function - Return first non-NULL expression
+ */
+ void nvl(HplsqlParser.Expr_func_paramsContext ctx) {
+ for (int i=0; i < ctx.expr().size(); i++) {
+ Var v = evalPop(ctx.expr(i));
+ if (v.type != Var.Type.NULL) {
+ exec.stackPush(v);
+ return;
+ }
+ }
+ evalNull();
+ }
+
+ /**
+ * NVL2 function - If expr1 is not NULL return expr2, otherwise expr3
+ */
+ void nvl2(HplsqlParser.Expr_func_paramsContext ctx) {
+ if (ctx.expr().size() == 3) {
+ if (!evalPop(ctx.expr(0)).isNull()) {
+ eval(ctx.expr(1));
+ }
+ else {
+ eval(ctx.expr(2));
+ }
+ }
+ else {
+ evalNull();
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionOra.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionOra.java b/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionOra.java
new file mode 100644
index 0000000..fec891a
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionOra.java
@@ -0,0 +1,231 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql.functions;
+
+import java.io.IOException;
+import java.io.EOFException;
+
+import org.apache.hive.hplsql.*;
+
+public class FunctionOra extends Function {
+ public FunctionOra(Exec e) {
+ super(e);
+ }
+
+ /**
+ * Register functions
+ */
+ @Override
+ public void register(Function f) {
+ f.map.put("DBMS_OUTPUT.PUT_LINE", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) {
+ execDbmsOutputPutLine(ctx); }});
+ f.map.put("UTL_FILE.FOPEN", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) {
+ execUtlFileFopen(ctx); }});
+ f.map.put("UTL_FILE.GET_LINE", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) {
+ execUtlFileGetLine(ctx); }});
+ f.map.put("UTL_FILE.PUT_LINE", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) {
+ execUtlFilePutLine(ctx); }});
+ f.map.put("UTL_FILE.PUT", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) {
+ execUtlFilePut(ctx); }});
+ f.map.put("UTL_FILE.FCLOSE", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) {
+ execUtlFileFclose(ctx); }});
+ }
+
+ /**
+ * Print a text message
+ */
+ void execDbmsOutputPutLine(HplsqlParser.Expr_func_paramsContext ctx) {
+ if (ctx.expr().size() > 0) {
+ visit(ctx.expr(0));
+ System.out.println(exec.stackPop().toString());
+ }
+ }
+
+ /**
+ * Execute UTL_FILE.FOPEN function
+ */
+ public void execUtlFileFopen(HplsqlParser.Expr_func_paramsContext ctx) {
+ String dir = "";
+ String name = "";
+ boolean write = true;
+ boolean overwrite = false;
+ int cnt = ctx.expr().size();
+ // Directory
+ if (cnt > 0) {
+ dir = evalPop(ctx.expr(0)).toString();
+ }
+ // File name
+ if (cnt > 1) {
+ name = evalPop(ctx.expr(1)).toString();
+ }
+ // Mode
+ if (cnt >= 2) {
+ String mode = evalPop(ctx.expr(2)).toString();
+ if (mode.equalsIgnoreCase("r")) {
+ write = false;
+ }
+ else if (mode.equalsIgnoreCase("w")) {
+ write = true;
+ overwrite = true;
+ }
+ }
+ File file = new File();
+ if (write) {
+ file.create(dir, name, overwrite);
+ }
+ else {
+ file.open(dir, name);
+ }
+ exec.stackPush(new Var(Var.Type.FILE, file));
+ }
+
+ /**
+ * Read a text line from an open file
+ */
+ void execUtlFileGetLine(HplsqlParser.Expr_func_paramsContext ctx) {
+ int cnt = ctx.expr().size();
+ Var file = null;
+ Var str = null;
+ StringBuilder out = new StringBuilder();
+
+ // File handle
+ if(cnt > 0) {
+ visit(ctx.expr(0));
+ file = exec.stackPop();
+ }
+ // String variable
+ if(cnt > 1) {
+ visit(ctx.expr(1));
+ str = exec.stackPop();
+ }
+
+ if(file != null && file.type == Var.Type.FILE) {
+ File f = (File)file.value;
+
+ if(trace) {
+ trace(ctx, "File: " + f.toString());
+ }
+
+ try {
+ while(true) {
+ char c = f.readChar();
+ if(c == '\n') {
+ break;
+ }
+ out.append(c);
+ }
+ } catch (IOException e) {
+ if(!(e instanceof EOFException)) {
+ out.setLength(0);
+ }
+ }
+
+ // Set the new value to the output string variable
+ if(str != null) {
+ str.setValue(out.toString());
+
+ if(trace) {
+ trace(ctx, "OUT " + str.getName() + " = " + str.toString());
+ }
+ }
+ }
+ else if(trace) {
+ trace(ctx, "Variable of FILE type not found");
+ }
+ }
+
+ /**
+ * Execute UTL_FILE.PUT_LINE function
+ */
+ public void execUtlFilePutLine(HplsqlParser.Expr_func_paramsContext ctx) {
+ execUtlFilePut(ctx, true /*newline*/);
+ }
+
+ /**
+ * Execute UTL_FILE.PUT function
+ */
+ public void execUtlFilePut(HplsqlParser.Expr_func_paramsContext ctx) {
+ execUtlFilePut(ctx, false /*newline*/);
+ }
+
+ /**
+ * Write a string to file
+ */
+ void execUtlFilePut(HplsqlParser.Expr_func_paramsContext ctx, boolean newline) {
+ int cnt = ctx.expr().size();
+ Var file = null;
+ String str = "";
+
+ // File handle
+ if(cnt > 0) {
+ visit(ctx.expr(0));
+ file = exec.stackPop();
+ }
+ // Text string
+ if(cnt > 1) {
+ visit(ctx.expr(1));
+ str = exec.stackPop().toString();
+ }
+
+ if(file != null && file.type == Var.Type.FILE) {
+ File f = (File)file.value;
+
+ if(trace) {
+ trace(ctx, "File: " + f.toString());
+ }
+
+ f.writeString(str);
+
+ if(newline) {
+ f.writeString("\n");
+ }
+ }
+ else if(trace) {
+ trace(ctx, "Variable of FILE type not found");
+ }
+ }
+
+ /**
+ * Execute UTL_FILE.FCLOSE function
+ */
+ void execUtlFileFclose(HplsqlParser.Expr_func_paramsContext ctx) {
+ int cnt = ctx.expr().size();
+ Var file = null;
+
+ // File handle
+ if(cnt > 0) {
+ visit(ctx.expr(0));
+ file = exec.stackPop();
+ }
+
+ if(file != null && file.type == Var.Type.FILE) {
+ File f = (File)file.value;
+
+ if(trace) {
+ trace(ctx, "File: " + f.toString());
+ }
+
+ f.close();
+ file.removeValue();
+ }
+ else if(trace) {
+ trace(ctx, "Variable of FILE type not found");
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionString.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionString.java b/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionString.java
new file mode 100644
index 0000000..610ff60
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionString.java
@@ -0,0 +1,276 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql.functions;
+
+import org.apache.hive.hplsql.*;
+
+public class FunctionString extends Function {
+ public FunctionString(Exec e) {
+ super(e);
+ }
+
+ /**
+ * Register functions
+ */
+ @Override
+ public void register(Function f) {
+ f.map.put("CONCAT", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { concat(ctx); }});
+ f.map.put("CHAR", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { char_(ctx); }});
+ f.map.put("INSTR", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { instr(ctx); }});
+ f.map.put("LEN", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { len(ctx); }});
+ f.map.put("LENGTH", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { length(ctx); }});
+ f.map.put("LOWER", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { lower(ctx); }});
+ f.map.put("SUBSTR", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { substr(ctx); }});
+ f.map.put("SUBSTRING", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { substr(ctx); }});
+ f.map.put("TO_CHAR", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { toChar(ctx); }});
+ f.map.put("UPPER", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { upper(ctx); }});
+
+ f.specMap.put("SUBSTRING", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { substring(ctx); }});
+ f.specMap.put("TRIM", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { trim(ctx); }});
+ }
+
+ /**
+ * CONCAT function
+ */
+ void concat(HplsqlParser.Expr_func_paramsContext ctx) {
+ StringBuilder val = new StringBuilder();
+ int cnt = ctx.expr().size();
+ boolean nulls = true;
+ for (int i = 0; i < cnt; i++) {
+ Var c = evalPop(ctx.expr(i));
+ if (!c.isNull()) {
+ val.append(c.toString());
+ nulls = false;
+ }
+ }
+ if (nulls) {
+ evalNull();
+ }
+ else {
+ evalString(val);
+ }
+ }
+
+ /**
+ * CHAR function
+ */
+ void char_(HplsqlParser.Expr_func_paramsContext ctx) {
+ int cnt = ctx.expr().size();
+ if (cnt != 1) {
+ evalNull();
+ return;
+ }
+ String str = evalPop(ctx.expr(0)).toString();
+ evalString(str);
+ }
+
+ /**
+ * INSTR function
+ */
+ void instr(HplsqlParser.Expr_func_paramsContext ctx) {
+ int cnt = ctx.expr().size();
+ if (cnt < 2) {
+ evalNull();
+ return;
+ }
+ String str = evalPop(ctx.expr(0)).toString();
+ if (str == null) {
+ evalNull();
+ return;
+ }
+ else if(str.isEmpty()) {
+ evalInt(new Long(0));
+ return;
+ }
+ String substr = evalPop(ctx.expr(1)).toString();
+ int pos = 1;
+ int occur = 1;
+ int idx = 0;
+ if (cnt >= 3) {
+ pos = evalPop(ctx.expr(2)).intValue();
+ if (pos == 0) {
+ pos = 1;
+ }
+ }
+ if (cnt >= 4) {
+ occur = evalPop(ctx.expr(3)).intValue();
+ if (occur < 0) {
+ occur = 1;
+ }
+ }
+ for (int i = occur; i > 0; i--) {
+ if (pos > 0) {
+ idx = str.indexOf(substr, pos - 1);
+ }
+ else {
+ str = str.substring(0, str.length() - pos*(-1));
+ idx = str.lastIndexOf(substr);
+ }
+ if (idx == -1) {
+ idx = 0;
+ break;
+ }
+ else {
+ idx++;
+ }
+ if (i > 1) {
+ if (pos > 0) {
+ pos = idx + 1;
+ }
+ else {
+ pos = (str.length() - idx + 1) * (-1);
+ }
+ }
+ }
+ evalInt(new Long(idx));
+ }
+
+ /**
+ * LEN function (excluding trailing spaces)
+ */
+ void len(HplsqlParser.Expr_func_paramsContext ctx) {
+ if (ctx.expr().size() != 1) {
+ evalNull();
+ return;
+ }
+ int len = evalPop(ctx.expr(0)).toString().trim().length();
+ evalInt(new Long(len));
+ }
+
+ /**
+ * LENGTH function
+ */
+ void length(HplsqlParser.Expr_func_paramsContext ctx) {
+ if (ctx.expr().size() != 1) {
+ evalNull();
+ return;
+ }
+ int len = evalPop(ctx.expr(0)).toString().length();
+ evalInt(new Long(len));
+ }
+
+ /**
+ * LOWER function
+ */
+ void lower(HplsqlParser.Expr_func_paramsContext ctx) {
+ if (ctx.expr().size() != 1) {
+ evalNull();
+ return;
+ }
+ String str = evalPop(ctx.expr(0)).toString().toLowerCase();
+ evalString(str);
+ }
+
+ /**
+ * SUBSTR and SUBSTRING function
+ */
+ void substr(HplsqlParser.Expr_func_paramsContext ctx) {
+ int cnt = ctx.expr().size();
+ if (cnt < 2) {
+ evalNull();
+ return;
+ }
+ String str = evalPop(ctx.expr(0)).toString();
+ int start = evalPop(ctx.expr(1)).intValue();
+ int len = -1;
+ if (start == 0) {
+ start = 1;
+ }
+ if (cnt > 2) {
+ len = evalPop(ctx.expr(2)).intValue();
+ }
+ substr(str, start, len);
+ }
+
+ void substr(String str, int start, int len) {
+ if (str == null) {
+ evalNull();
+ return;
+ }
+ else if (str.isEmpty()) {
+ evalString(str);
+ return;
+ }
+ if (start == 0) {
+ start = 1;
+ }
+ if (len == -1) {
+ if (start > 0) {
+ evalString(str.substring(start - 1));
+ }
+ }
+ else {
+ evalString(str.substring(start - 1, start - 1 + len));
+ }
+ }
+
+ /**
+ * SUBSTRING FROM FOR function
+ */
+ void substring(HplsqlParser.Expr_spec_funcContext ctx) {
+ String str = evalPop(ctx.expr(0)).toString();
+ int start = evalPop(ctx.expr(1)).intValue();
+ int len = -1;
+ if (start == 0) {
+ start = 1;
+ }
+ if (ctx.T_FOR() != null) {
+ len = evalPop(ctx.expr(2)).intValue();
+ }
+ substr(str, start, len);
+ }
+
+ /**
+ * TRIM function
+ */
+ void trim(HplsqlParser.Expr_spec_funcContext ctx) {
+ int cnt = ctx.expr().size();
+ if (cnt != 1) {
+ evalNull();
+ return;
+ }
+ String str = evalPop(ctx.expr(0)).toString();
+ evalString(str.trim());
+ }
+
+ /**
+ * TO_CHAR function
+ */
+ void toChar(HplsqlParser.Expr_func_paramsContext ctx) {
+ int cnt = ctx.expr().size();
+ if (cnt != 1) {
+ evalNull();
+ return;
+ }
+ String str = evalPop(ctx.expr(0)).toString();
+ evalString(str);
+ }
+
+ /**
+ * UPPER function
+ */
+ void upper(HplsqlParser.Expr_func_paramsContext ctx) {
+ if (ctx.expr().size() != 1) {
+ evalNull();
+ return;
+ }
+ String str = evalPop(ctx.expr(0)).toString().toUpperCase();
+ evalString(str);
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 1c9b02f..f84f3e9 100644
--- a/pom.xml
+++ b/pom.xml
@@ -39,6 +39,7 @@
<module>contrib</module>
<module>hbase-handler</module>
<module>hcatalog</module>
+ <module>hplsql</module>
<module>hwi</module>
<module>jdbc</module>
<module>metastore</module>
[2/4] hive git commit: HIVE-11055 HPL/SQL - Implementing Procedural
SQL in Hive (PL/HQL Contribution) (Dmitry Tolpeko via gates)
Posted by ga...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/Select.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Select.java b/hplsql/src/main/java/org/apache/hive/hplsql/Select.java
new file mode 100644
index 0000000..e0f4098
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Select.java
@@ -0,0 +1,411 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.util.Stack;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+import org.antlr.v4.runtime.misc.Interval;
+
+public class Select {
+
+ Exec exec = null;
+ Stack<Var> stack = null;
+ Conf conf;
+
+ boolean trace = false;
+
+ Select(Exec e) {
+ exec = e;
+ stack = exec.getStack();
+ conf = exec.getConf();
+ trace = exec.getTrace();
+ }
+
+ /**
+ * Executing or building SELECT statement
+ */
+ public Integer select(HplsqlParser.Select_stmtContext ctx) {
+ if (ctx.parent instanceof HplsqlParser.StmtContext) {
+ exec.stmtConnList.clear();
+ trace(ctx, "SELECT");
+ }
+ boolean oldBuildSql = exec.buildSql;
+ exec.buildSql = true;
+ StringBuilder sql = new StringBuilder();
+ if (ctx.cte_select_stmt() != null) {
+ sql.append(evalPop(ctx.cte_select_stmt()).toString());
+ sql.append("\n");
+ }
+ sql.append(evalPop(ctx.fullselect_stmt()).toString());
+ exec.buildSql = oldBuildSql;
+ if (!(ctx.parent instanceof HplsqlParser.StmtContext)) { // No need to execute at this stage
+ exec.stackPush(sql);
+ return 0;
+ }
+ if (trace && ctx.parent instanceof HplsqlParser.StmtContext) {
+ trace(ctx, sql.toString());
+ }
+ if (exec.getOffline()) {
+ trace(ctx, "Not executed - offline mode set");
+ return 0;
+ }
+ String conn = exec.getStatementConnection();
+ Query query = exec.executeQuery(ctx, sql.toString(), conn);
+ if (query.error()) {
+ exec.signal(query);
+ return 1;
+ }
+ trace(ctx, "SELECT completed successfully");
+ exec.setSqlSuccess();
+ try {
+ ResultSet rs = query.getResultSet();
+ ResultSetMetaData rm = null;
+ if (rs != null) {
+ rm = rs.getMetaData();
+ }
+ HplsqlParser.Into_clauseContext into = getIntoClause(ctx);
+ if (into != null) {
+ trace(ctx, "SELECT INTO statement executed");
+ int cols = into.ident().size();
+ if (rs.next()) {
+ for (int i = 1; i <= cols; i++) {
+ Var var = exec.findVariable(into.ident(i-1).getText());
+ if (var != null) {
+ var.setValue(rs, rm, i);
+ if (trace) {
+ trace(ctx, "COLUMN: " + rm.getColumnName(i) + ", " + rm.getColumnTypeName(i));
+ trace(ctx, "SET " + var.getName() + " = " + var.toString());
+ }
+ }
+ else if(trace) {
+ trace(ctx, "Variable not found: " + into.ident(i-1).getText());
+ }
+ }
+ exec.incRowCount();
+ exec.setSqlSuccess();
+ }
+ else {
+ exec.setSqlCode(100);
+ exec.signal(Signal.Type.NOTFOUND);
+ }
+ }
+ // Print all results for standalone SELECT statement
+ else if (ctx.parent instanceof HplsqlParser.StmtContext) {
+ int cols = rm.getColumnCount();
+ if (trace) {
+ trace(ctx, "Standalone SELECT executed: " + cols + " columns in the result set");
+ }
+ while (rs.next()) {
+ for (int i = 1; i <= cols; i++) {
+ if (i > 1) {
+ System.out.print("\t");
+ }
+ System.out.print(rs.getString(i));
+ }
+ System.out.println("");
+ exec.incRowCount();
+ }
+ }
+ // Scalar subquery
+ else {
+ trace(ctx, "Scalar subquery executed, first row and first column fetched only");
+ if(rs.next()) {
+ exec.stackPush(new Var().setValue(rs, rm, 1));
+ exec.setSqlSuccess();
+ }
+ else {
+ evalNull();
+ exec.setSqlCode(100);
+ }
+ }
+ }
+ catch (SQLException e) {
+ exec.signal(query);
+ exec.closeQuery(query, exec.conf.defaultConnection);
+ return 1;
+ }
+ exec.closeQuery(query, exec.conf.defaultConnection);
+ return 0;
+ }
+
+ /**
+ * Common table expression (WITH clause)
+ */
+ public Integer cte(HplsqlParser.Cte_select_stmtContext ctx) {
+ int cnt = ctx.cte_select_stmt_item().size();
+ StringBuilder sql = new StringBuilder();
+ sql.append("WITH ");
+ for (int i = 0; i < cnt; i++) {
+ HplsqlParser.Cte_select_stmt_itemContext c = ctx.cte_select_stmt_item(i);
+ sql.append(c.ident().getText());
+ if (c.cte_select_cols() != null) {
+ sql.append(" " + exec.getFormattedText(c.cte_select_cols()));
+ }
+ sql.append(" AS (");
+ sql.append(evalPop(ctx.cte_select_stmt_item(i).fullselect_stmt()).toString());
+ sql.append(")");
+ if (i + 1 != cnt) {
+ sql.append(",\n");
+ }
+ }
+ exec.stackPush(sql);
+ return 0;
+ }
+
+ /**
+ * Part of SELECT
+ */
+ public Integer fullselect(HplsqlParser.Fullselect_stmtContext ctx) {
+ int cnt = ctx.fullselect_stmt_item().size();
+ StringBuilder sql = new StringBuilder();
+ for (int i = 0; i < cnt; i++) {
+ String part = evalPop(ctx.fullselect_stmt_item(i)).toString();
+ sql.append(part);
+ if (i + 1 != cnt) {
+ sql.append("\n" + getText(ctx.fullselect_set_clause(i)) + "\n");
+ }
+ }
+ exec.stackPush(sql);
+ return 0;
+ }
+
+ public Integer subselect(HplsqlParser.Subselect_stmtContext ctx) {
+ StringBuilder sql = new StringBuilder();
+ if (ctx.T_SELECT() != null) {
+ sql.append(ctx.T_SELECT().getText());
+ }
+ sql.append(" " + evalPop(ctx.select_list()));
+ if (ctx.from_clause() != null) {
+ sql.append(" " + evalPop(ctx.from_clause()));
+ } else {
+ sql.append(" FROM " + conf.dualTable);
+ }
+ if (ctx.where_clause() != null) {
+ sql.append(" " + evalPop(ctx.where_clause()));
+ }
+ if (ctx.group_by_clause() != null) {
+ sql.append(" " + getText(ctx.group_by_clause()));
+ }
+ if (ctx.having_clause() != null) {
+ sql.append(" " + getText(ctx.having_clause()));
+ }
+ if (ctx.order_by_clause() != null) {
+ sql.append(" " + getText(ctx.order_by_clause()));
+ }
+ if (ctx.select_options() != null) {
+ sql.append(" " + evalPop(ctx.select_options()));
+ }
+ if (ctx.select_list().select_list_limit() != null) {
+ sql.append(" LIMIT " + evalPop(ctx.select_list().select_list_limit().expr()));
+ }
+ exec.stackPush(sql);
+ return 0;
+ }
+
+ /**
+ * SELECT list
+ */
+ public Integer selectList(HplsqlParser.Select_listContext ctx) {
+ StringBuilder sql = new StringBuilder();
+ if (ctx.select_list_set() != null) {
+ sql.append(exec.getText(ctx.select_list_set())).append(" ");
+ }
+ int cnt = ctx.select_list_item().size();
+ for (int i = 0; i < cnt; i++) {
+ if (ctx.select_list_item(i).select_list_asterisk() == null) {
+ sql.append(evalPop(ctx.select_list_item(i)));
+ if (ctx.select_list_item(i).select_list_alias() != null) {
+ sql.append(" " + exec.getText(ctx.select_list_item(i).select_list_alias()));
+ }
+ }
+ else {
+ sql.append(exec.getText(ctx.select_list_item(i).select_list_asterisk()));
+ }
+ if (i + 1 < cnt) {
+ sql.append(", ");
+ }
+ }
+ exec.stackPush(sql);
+ return 0;
+ }
+
+ /**
+ * FROM clause
+ */
+ public Integer from(HplsqlParser.From_clauseContext ctx) {
+ StringBuilder sql = new StringBuilder();
+ sql.append(ctx.T_FROM().getText()).append(" ");
+ sql.append(evalPop(ctx.from_table_clause()));
+ int cnt = ctx.from_join_clause().size();
+ for (int i = 0; i < cnt; i++) {
+ sql.append(evalPop(ctx.from_join_clause(i)));
+ }
+ exec.stackPush(sql);
+ return 0;
+ }
+
+ /**
+ * Single table name in FROM
+ */
+ public Integer fromTable(HplsqlParser.From_table_name_clauseContext ctx) {
+ StringBuilder sql = new StringBuilder();
+ sql.append(evalPop(ctx.table_name()));
+ if (ctx.from_alias_clause() != null) {
+ sql.append(" ").append(exec.getText(ctx.from_alias_clause()));
+ }
+ exec.stackPush(sql);
+ return 0;
+ }
+
+ /**
+ * JOIN clause in FROM
+ */
+ public Integer fromJoin(HplsqlParser.From_join_clauseContext ctx) {
+ StringBuilder sql = new StringBuilder();
+ if (ctx.T_COMMA() != null) {
+ sql.append(", ");
+ sql.append(evalPop(ctx.from_table_clause()));
+ }
+ else if (ctx.from_join_type_clause() != null) {
+ sql.append(" ");
+ sql.append(exec.getText(ctx.from_join_type_clause()));
+ sql.append(" ");
+ sql.append(evalPop(ctx.from_table_clause()));
+ sql.append(" ");
+ sql.append(exec.getText(ctx, ctx.T_ON().getSymbol(), ctx.bool_expr().getStop()));
+ }
+ exec.stackPush(sql);
+ return 0;
+ }
+
+ /**
+ * FROM TABLE (VALUES ...) clause
+ */
+ public Integer fromTableValues(HplsqlParser.From_table_values_clauseContext ctx) {
+ StringBuilder sql = new StringBuilder();
+ int rows = ctx.from_table_values_row().size();
+ sql.append("(");
+ for (int i = 0; i < rows; i++) {
+ int cols = ctx.from_table_values_row(i).expr().size();
+ int cols_as = ctx.from_alias_clause().L_ID().size();
+ sql.append("SELECT ");
+ for (int j = 0; j < cols; j++) {
+ sql.append(evalPop(ctx.from_table_values_row(i).expr(j)));
+ if (j < cols_as) {
+ sql.append(" AS ");
+ sql.append(ctx.from_alias_clause().L_ID(j));
+ }
+ if (j + 1 < cols) {
+ sql.append(", ");
+ }
+ }
+ sql.append(" FROM " + conf.dualTable);
+ if (i + 1 < rows) {
+ sql.append("\nUNION ALL\n");
+ }
+ }
+ sql.append(") ");
+ if (ctx.from_alias_clause() != null) {
+ sql.append(ctx.from_alias_clause().ident().getText());
+ }
+ exec.stackPush(sql);
+ return 0;
+ }
+
+ /**
+ * WHERE clause
+ */
+ public Integer where(HplsqlParser.Where_clauseContext ctx) {
+ StringBuilder sql = new StringBuilder();
+ sql.append(ctx.T_WHERE().getText());
+ sql.append(" " + evalPop(ctx.bool_expr()));
+ exec.stackPush(sql);
+ return 0;
+ }
+
+ /**
+ * Get INTO clause
+ */
+ HplsqlParser.Into_clauseContext getIntoClause(HplsqlParser.Select_stmtContext ctx) {
+ if (ctx.fullselect_stmt().fullselect_stmt_item(0).subselect_stmt() != null) {
+ return ctx.fullselect_stmt().fullselect_stmt_item(0).subselect_stmt().into_clause();
+ }
+ return null;
+ }
+
+ /**
+ * SELECT statement options - LIMIT n, WITH UR i.e
+ */
+ public Integer option(HplsqlParser.Select_options_itemContext ctx) {
+ if (ctx.T_LIMIT() != null) {
+ exec.stackPush("LIMIT " + evalPop(ctx.expr()));
+ }
+ return 0;
+ }
+
+ /**
+ * Evaluate the expression to NULL
+ */
+ void evalNull() {
+ exec.stackPush(Var.Null);
+ }
+
+ /**
+ * Evaluate the expression and pop value from the stack
+ */
+ Var evalPop(ParserRuleContext ctx) {
+ exec.visit(ctx);
+ if (!exec.stack.isEmpty()) {
+ return exec.stackPop();
+ }
+ return Var.Empty;
+ }
+
+ /**
+ * Get node text including spaces
+ */
+ String getText(ParserRuleContext ctx) {
+ return ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
+ }
+
+ /**
+ * Execute rules
+ */
+ Integer visit(ParserRuleContext ctx) {
+ return exec.visit(ctx);
+ }
+
+ /**
+ * Execute children rules
+ */
+ Integer visitChildren(ParserRuleContext ctx) {
+ return exec.visitChildren(ctx);
+ }
+
+ /**
+ * Trace information
+ */
+ void trace(ParserRuleContext ctx, String message) {
+ exec.trace(ctx, message);
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/Signal.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Signal.java b/hplsql/src/main/java/org/apache/hive/hplsql/Signal.java
new file mode 100644
index 0000000..6330ae3
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Signal.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+/**
+ * Signals and exceptions
+ */
+public class Signal {
+ public enum Type { LEAVE_LOOP, LEAVE_ROUTINE, SQLEXCEPTION, NOTFOUND, USERDEFINED };
+ Type type;
+ String value = "";
+ Exception exception = null;
+
+ Signal(Type type, String value) {
+ this.type = type;
+ this.value = value;
+ this.exception = null;
+ }
+
+ Signal(Type type, String value, Exception exception) {
+ this.type = type;
+ this.value = value;
+ this.exception = exception;
+ }
+
+ /**
+ * Get the signal value (message text)
+ */
+ public String getValue() {
+ return value;
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java b/hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java
new file mode 100644
index 0000000..acc4907
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java
@@ -0,0 +1,1021 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.util.Stack;
+import java.util.UUID;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+import org.apache.hive.hplsql.Var.Type;
+import org.apache.hive.hplsql.HplsqlParser.Create_table_columns_itemContext;
+import org.apache.hive.hplsql.HplsqlParser.Create_table_columnsContext;
+
+/**
+ * HPL/SQL statements execution
+ */
+public class Stmt {
+
+ Exec exec = null;
+ Stack<Var> stack = null;
+ Conf conf;
+
+ boolean trace = false;
+
+ Stmt(Exec e) {
+ exec = e;
+ stack = exec.getStack();
+ conf = exec.getConf();
+ trace = exec.getTrace();
+ }
+
+ /**
+ * DECLARE cursor statement
+ */
+ public Integer declareCursor(HplsqlParser.Declare_cursor_itemContext ctx) {
+ String name = ctx.ident().getText();
+ if (trace) {
+ trace(ctx, "DECLARE CURSOR " + name);
+ }
+ Query query = new Query();
+ if (ctx.expr() != null) {
+ query.setExprCtx(ctx.expr());
+ }
+ else if (ctx.select_stmt() != null) {
+ query.setSelectCtx(ctx.select_stmt());
+ }
+ exec.addVariable(new Var(name, Type.CURSOR, query));
+ return 0;
+ }
+
+ /**
+ * CREATE TABLE statement
+ */
+ public Integer createTable(HplsqlParser.Create_table_stmtContext ctx) {
+ trace(ctx, "CREATE TABLE");
+ StringBuilder sql = new StringBuilder();
+ sql.append(exec.getText(ctx, ctx.T_CREATE().getSymbol(), ctx.T_OPEN_P().getSymbol()));
+ int cnt = ctx.create_table_columns().create_table_columns_item().size();
+ int cols = 0;
+ for (int i = 0; i < cnt; i++) {
+ Create_table_columns_itemContext col = ctx.create_table_columns().create_table_columns_item(i);
+ if (col.create_table_column_cons() != null) {
+ continue;
+ }
+ if (cols > 0) {
+ sql.append(",\n");
+ }
+ sql.append(col.ident().getText());
+ sql.append(" ");
+ sql.append(exec.evalPop(col.dtype(), col.dtype_len()));
+ cols++;
+ }
+ sql.append("\n)");
+ if (ctx.create_table_options() != null) {
+ sql.append(" " + evalPop(ctx.create_table_options()).toString());
+ }
+ trace(ctx, sql.toString());
+ Query query = exec.executeSql(ctx, sql.toString(), exec.conf.defaultConnection);
+ if (query.error()) {
+ exec.signal(query);
+ return 1;
+ }
+ exec.setSqlSuccess();
+ exec.closeQuery(query, exec.conf.defaultConnection);
+ return 0;
+ }
+
+ /**
+ * CREATE TABLE options for Hive
+ */
+ public Integer createTableHiveOptions(HplsqlParser.Create_table_options_hive_itemContext ctx) {
+ if (ctx.create_table_hive_row_format() != null) {
+ createTableHiveRowFormat(ctx.create_table_hive_row_format());
+ }
+ return 0;
+ }
+
+ public Integer createTableHiveRowFormat(HplsqlParser.Create_table_hive_row_formatContext ctx) {
+ StringBuilder sql = new StringBuilder();
+ sql.append("ROW FORMAT DELIMITED");
+ int cnt = ctx.create_table_hive_row_format_fields().size();
+ for (int i = 0; i < cnt; i++) {
+ HplsqlParser.Create_table_hive_row_format_fieldsContext c = ctx.create_table_hive_row_format_fields(i);
+ if (c.T_FIELDS() != null) {
+ sql.append(" FIELDS TERMINATED BY " + evalPop(c.expr(0)).toSqlString());
+ }
+ else if (c.T_LINES() != null) {
+ sql.append(" LINES TERMINATED BY " + evalPop(c.expr(0)).toSqlString());
+ }
+ }
+ evalString(sql);
+ return 0;
+ }
+
+ /**
+ * DECLARE TEMPORARY TABLE statement
+ */
+ public Integer declareTemporaryTable(HplsqlParser.Declare_temporary_table_itemContext ctx) {
+ String name = ctx.ident().getText();
+ if (trace) {
+ trace(ctx, "DECLARE TEMPORARY TABLE " + name);
+ }
+ return createTemporaryTable(ctx, ctx.create_table_columns(), name);
+ }
+
+ /**
+ * CREATE LOCAL TEMPORARY | VOLATILE TABLE statement
+ */
+ public Integer createLocalTemporaryTable(HplsqlParser.Create_local_temp_table_stmtContext ctx) {
+ String name = ctx.ident().getText();
+ if (trace) {
+ trace(ctx, "CREATE LOCAL TEMPORARY TABLE " + name);
+ }
+ return createTemporaryTable(ctx, ctx.create_table_columns(), name);
+ }
+
+ /**
+ * Create a temporary table statement
+ */
+ public Integer createTemporaryTable(ParserRuleContext ctx, Create_table_columnsContext colCtx, String name) {
+ String managedName = null;
+ String sql = null;
+ String columns = exec.getFormattedText(colCtx);
+ if (conf.tempTables == Conf.TempTables.NATIVE) {
+ sql = "CREATE TEMPORARY TABLE " + name + "\n(" + columns + "\n)";
+ } else if (conf.tempTables == Conf.TempTables.MANAGED) {
+ managedName = name + "_" + UUID.randomUUID().toString().replace("-","");
+ if (!conf.tempTablesSchema.isEmpty()) {
+ managedName = conf.tempTablesSchema + "." + managedName;
+ }
+ sql = "CREATE TABLE " + managedName + "\n(" + columns + "\n)";
+ if (!conf.tempTablesLocation.isEmpty()) {
+ sql += "\nLOCATION '" + conf.tempTablesLocation + "/" + managedName + "'";
+ }
+ if (trace) {
+ trace(ctx, "Managed table name: " + managedName);
+ }
+ }
+ if (sql != null) {
+ Query query = exec.executeSql(ctx, sql, exec.conf.defaultConnection);
+ if (query.error()) {
+ exec.signal(query);
+ return 1;
+ }
+ if (managedName != null) {
+ exec.addManagedTable(name, managedName);
+ }
+ exec.setSqlSuccess();
+ exec.closeQuery(query, exec.conf.defaultConnection);
+ }
+ return 0;
+ }
+
+ /**
+ * DROP statement
+ */
+ public Integer drop(HplsqlParser.Drop_stmtContext ctx) {
+ trace(ctx, "DROP");
+ String sql = null;
+ if (ctx.T_TABLE() != null) {
+ sql = "DROP TABLE ";
+ if (ctx.T_EXISTS() != null) {
+ sql += "IF NOT EXISTS ";
+ }
+ sql += evalPop(ctx.table_name()).toString();
+ }
+ if (sql != null) {
+ trace(ctx, sql);
+ Query query = exec.executeSql(ctx, sql, exec.conf.defaultConnection);
+ if (query.error()) {
+ exec.signal(query);
+ return 1;
+ }
+ exec.setSqlSuccess();
+ exec.closeQuery(query, exec.conf.defaultConnection);
+ }
+ return 0;
+ }
+
+ /**
+ * OPEN cursor statement
+ */
+ public Integer open(HplsqlParser.Open_stmtContext ctx) {
+ trace(ctx, "OPEN");
+ Query query = null;
+ Var var = null;
+ String cursor = ctx.L_ID().toString();
+ String sql = null;
+ // Dynamic SQL
+ if (ctx.T_FOR() != null) {
+ sql = evalPop(ctx.expr()).toString();
+ if (trace) {
+ trace(ctx, cursor + ": " + sql);
+ }
+ query = new Query(sql);
+ var = new Var(cursor, Type.CURSOR, query);
+ exec.addVariable(var);
+ }
+ // Declared cursor
+ else {
+ var = exec.findVariable(cursor);
+ if (var != null && var.type == Type.CURSOR) {
+ query = (Query)var.value;
+ if (query.sqlExpr != null) {
+ sql = evalPop(query.sqlExpr).toString();
+ query.setSql(sql);
+ }
+ else if (query.sqlSelect != null) {
+ sql = evalPop(query.sqlSelect).toString();
+ query.setSql(sql);
+ }
+ if (trace) {
+ trace(ctx, cursor + ": " + sql);
+ }
+ }
+ }
+ // Open cursor now
+ if (query != null) {
+ exec.executeQuery(ctx, query, exec.conf.defaultConnection);
+ if (query.error()) {
+ exec.signal(query);
+ return 1;
+ }
+ else if (!exec.getOffline()) {
+ exec.setSqlCode(0);
+ }
+ }
+ else {
+ trace(ctx, "Cursor not found: " + cursor);
+ exec.setSqlCode(-1);
+ exec.signal(Signal.Type.SQLEXCEPTION);
+ return 1;
+ }
+ return 0;
+ }
+
+ /**
+ * FETCH cursor statement
+ */
+ public Integer fetch(HplsqlParser.Fetch_stmtContext ctx) {
+ trace(ctx, "FETCH");
+ String name = ctx.L_ID(0).toString();
+ Var cursor = exec.findVariable(name);
+ if (cursor == null || cursor.type != Type.CURSOR) {
+ trace(ctx, "Cursor not found: " + name);
+ exec.setSqlCode(-1);
+ exec.signal(Signal.Type.SQLEXCEPTION);
+ return 1;
+ }
+ else if (exec.getOffline()) {
+ exec.setSqlCode(100);
+ exec.signal(Signal.Type.NOTFOUND);
+ return 0;
+ }
+ // Assign values from the row to local variables
+ try {
+ Query query = (Query)cursor.value;
+ ResultSet rs = query.getResultSet();
+ ResultSetMetaData rsm = null;
+ if(rs != null) {
+ rsm = rs.getMetaData();
+ }
+ if(rs != null && rsm != null) {
+ int cols = ctx.L_ID().size() - 1;
+ if(rs.next()) {
+ for(int i=1; i <= cols; i++) {
+ Var var = exec.findVariable(ctx.L_ID(i).getText());
+ if(var != null) {
+ var.setValue(rs, rsm, i);
+ if(trace) {
+ trace(ctx, "COLUMN: " + rsm.getColumnName(i) + ", " + rsm.getColumnTypeName(i));
+ trace(ctx, "SET " + var.getName() + " = " + var.toString());
+ }
+ }
+ else if(trace) {
+ trace(ctx, "Variable not found: " + ctx.L_ID(i).getText());
+ }
+ }
+ exec.incRowCount();
+ exec.setSqlSuccess();
+ }
+ else {
+ exec.setSqlCode(100);
+ exec.signal(Signal.Type.NOTFOUND);
+ }
+ }
+ }
+ catch (SQLException e) {
+ exec.setSqlCode(e);
+ exec.signal(Signal.Type.SQLEXCEPTION, e.getMessage(), e);
+ }
+ return 0;
+ }
+
+ /**
+ * CLOSE cursor statement
+ */
+ public Integer close(HplsqlParser.Close_stmtContext ctx) {
+ trace(ctx, "CLOSE");
+ String name = ctx.L_ID().toString();
+ Var var = exec.findVariable(name);
+ if(var != null && var.type == Type.CURSOR) {
+ exec.closeQuery((Query)var.value, exec.conf.defaultConnection);
+ exec.setSqlCode(0);
+ }
+ else if(trace) {
+ trace(ctx, "Cursor not found: " + name);
+ }
+ return 0;
+ }
+
+ /**
+ * INCLUDE statement
+ */
+ public Integer include(HplsqlParser.Include_stmtContext ctx) {
+ String file = ctx.file_name().getText();
+ trace(ctx, "INCLUDE " + file);
+ exec.includeFile(file);
+ return 0;
+ }
+
+ /**
+ * IF statement (PL/SQL syntax)
+ */
+ public Integer ifPlsql(HplsqlParser.If_plsql_stmtContext ctx) {
+ boolean trueExecuted = false;
+ trace(ctx, "IF");
+ if (evalPop(ctx.bool_expr()).isTrue()) {
+ trace(ctx, "IF TRUE executed");
+ visit(ctx.block());
+ trueExecuted = true;
+ }
+ else if (ctx.elseif_block() != null) {
+ int cnt = ctx.elseif_block().size();
+ for (int i = 0; i < cnt; i++) {
+ if (evalPop(ctx.elseif_block(i).bool_expr()).isTrue()) {
+ trace(ctx, "ELSE IF executed");
+ visit(ctx.elseif_block(i).block());
+ trueExecuted = true;
+ break;
+ }
+ }
+ }
+ if (!trueExecuted && ctx.else_block() != null) {
+ trace(ctx, "ELSE executed");
+ visit(ctx.else_block());
+ }
+ return 0;
+ }
+
+ /**
+ * IF statement (Transact-SQL syntax)
+ */
+ public Integer ifTsql(HplsqlParser.If_tsql_stmtContext ctx) {
+ trace(ctx, "IF");
+ visit(ctx.bool_expr());
+ if(exec.stackPop().isTrue()) {
+ trace(ctx, "IF TRUE executed");
+ visit(ctx.single_block_stmt(0));
+ }
+ else if(ctx.T_ELSE() != null) {
+ trace(ctx, "ELSE executed");
+ visit(ctx.single_block_stmt(1));
+ }
+ return 0;
+ }
+
+ /**
+ * Assignment from SELECT statement
+ */
+ public Integer assignFromSelect(HplsqlParser.Assignment_stmt_select_itemContext ctx) {
+ String sql = evalPop(ctx.select_stmt()).toString();
+ if (trace) {
+ trace(ctx, sql.toString());
+ }
+ String conn = exec.getStatementConnection();
+ Query query = exec.executeQuery(ctx, sql.toString(), conn);
+ if (query.error()) {
+ exec.signal(query);
+ return 1;
+ }
+ exec.setSqlSuccess();
+ try {
+ ResultSet rs = query.getResultSet();
+ ResultSetMetaData rm = null;
+ if (rs != null) {
+ rm = rs.getMetaData();
+ int cnt = ctx.ident().size();
+ if (rs.next()) {
+ for (int i = 1; i <= cnt; i++) {
+ Var var = exec.findVariable(ctx.ident(i-1).getText());
+ if (var != null) {
+ var.setValue(rs, rm, i);
+ if (trace) {
+ trace(ctx, "COLUMN: " + rm.getColumnName(i) + ", " + rm.getColumnTypeName(i));
+ trace(ctx, "SET " + var.getName() + " = " + var.toString());
+ }
+ }
+ else if(trace) {
+ trace(ctx, "Variable not found: " + ctx.ident(i-1).getText());
+ }
+ }
+ exec.incRowCount();
+ exec.setSqlSuccess();
+ }
+ else {
+ exec.setSqlCode(100);
+ exec.signal(Signal.Type.NOTFOUND);
+ }
+ }
+ }
+ catch (SQLException e) {
+ exec.signal(query);
+ return 1;
+ }
+ finally {
+ exec.closeQuery(query, conn);
+ }
+ return 0;
+ }
+
+ /**
+ * SQL INSERT statement
+ */
+ public Integer insert(HplsqlParser.Insert_stmtContext ctx) {
+ exec.stmtConnList.clear();
+ if (ctx.select_stmt() != null) {
+ return insertSelect(ctx);
+ }
+ return insertValues(ctx);
+ }
+
+ /**
+ * SQL INSERT SELECT statement
+ */
+ public Integer insertSelect(HplsqlParser.Insert_stmtContext ctx) {
+ trace(ctx, "INSERT SELECT");
+ String table = evalPop(ctx.table_name()).toString();
+ String select = evalPop(ctx.select_stmt()).toString();
+ String sql = "INSERT INTO TABLE " + table + " " + select;
+ trace(ctx, sql);
+ Query query = exec.executeSql(ctx, sql, exec.conf.defaultConnection);
+ if (query.error()) {
+ exec.signal(query);
+ return 1;
+ }
+ exec.setSqlSuccess();
+ exec.closeQuery(query, exec.conf.defaultConnection);
+ return 0;
+ }
+
+ /**
+ * SQL INSERT VALUES statement
+ */
+ public Integer insertValues(HplsqlParser.Insert_stmtContext ctx) {
+ trace(ctx, "INSERT VALUES");
+ String table = evalPop(ctx.table_name()).toString();
+ String conn = exec.getObjectConnection(ctx.table_name().getText());
+ Conn.Type type = exec.getConnectionType(conn);
+ StringBuilder sql = new StringBuilder();
+ if (type == Conn.Type.HIVE) {
+ sql.append("INSERT INTO TABLE " + table + " ");
+ if (conf.insertValues == Conf.InsertValues.NATIVE) {
+ sql.append("VALUES\n(");
+ }
+ }
+ else {
+ sql.append("INSERT INTO " + table);
+ if (ctx.insert_stmt_cols() != null) {
+ sql.append(" " + exec.getFormattedText(ctx.insert_stmt_cols()));
+ }
+ sql.append(" VALUES\n(");
+ }
+ int rows = ctx.insert_stmt_rows().insert_stmt_row().size();
+ for (int i = 0; i < rows; i++) {
+ HplsqlParser.Insert_stmt_rowContext row =ctx.insert_stmt_rows().insert_stmt_row(i);
+ int cols = row.expr().size();
+ for (int j = 0; j < cols; j++) {
+ String value = evalPop(row.expr(j)).toSqlString();
+ if (j == 0 && type == Conn.Type.HIVE && conf.insertValues == Conf.InsertValues.SELECT ) {
+ sql.append("SELECT ");
+ }
+ sql.append(value);
+ if (j + 1 != cols) {
+ sql.append(", ");
+ }
+ }
+ if (type != Conn.Type.HIVE || conf.insertValues == Conf.InsertValues.NATIVE) {
+ if (i + 1 == rows) {
+ sql.append(")");
+ } else {
+ sql.append("),\n(");
+ }
+ }
+ else if (type == Conn.Type.HIVE && conf.insertValues == Conf.InsertValues.SELECT) {
+ sql.append(" FROM " + conf.dualTable);
+ if (i + 1 < rows) {
+ sql.append("\nUNION ALL\n");
+ }
+ }
+ }
+ if (trace) {
+ trace(ctx, sql.toString());
+ }
+ Query query = exec.executeSql(ctx, sql.toString(), conn);
+ if (query.error()) {
+ exec.signal(query);
+ return 1;
+ }
+ exec.setSqlSuccess();
+ exec.closeQuery(query, exec.conf.defaultConnection);
+ return 0;
+ }
+
+ /**
+ * GET DIAGNOSTICS EXCEPTION statement
+ */
+ public Integer getDiagnosticsException(HplsqlParser.Get_diag_stmt_exception_itemContext ctx) {
+ trace(ctx, "GET DIAGNOSTICS EXCEPTION");
+ Signal signal = exec.signalPeek();
+ if (signal == null || (signal != null && signal.type != Signal.Type.SQLEXCEPTION)) {
+ signal = exec.currentSignal;
+ }
+ if (signal != null) {
+ exec.setVariable(ctx.ident().getText(), signal.getValue());
+ }
+ return 0;
+ }
+
+ /**
+ * GET DIAGNOSTICS ROW_COUNT statement
+ */
+ public Integer getDiagnosticsRowCount(HplsqlParser.Get_diag_stmt_rowcount_itemContext ctx) {
+ trace(ctx, "GET DIAGNOSTICS ROW_COUNT");
+ exec.setVariable(ctx.ident().getText(), exec.getRowCount());
+ return 0;
+ }
+
+ /**
+ * USE statement
+ */
+ public Integer use(HplsqlParser.Use_stmtContext ctx) {
+ if(trace) {
+ trace(ctx, "USE");
+ }
+ String sql = ctx.T_USE().toString() + " " + evalPop(ctx.expr()).toString();
+ if(trace) {
+ trace(ctx, "Query: " + sql);
+ }
+ Query query = exec.executeSql(ctx, sql, exec.conf.defaultConnection);
+ if(query.error()) {
+ exec.signal(query);
+ return 1;
+ }
+ exec.setSqlCode(0);
+ exec.closeQuery(query, exec.conf.defaultConnection);
+ return 0;
+ }
+
+ /**
+ * VALUES statement
+ */
+ public Integer values(HplsqlParser.Values_into_stmtContext ctx) {
+ trace(ctx, "VALUES statement");
+ int cnt = ctx.ident().size(); // Number of variables and assignment expressions
+ int ecnt = ctx.expr().size();
+ for (int i = 0; i < cnt; i++) {
+ String name = ctx.ident(i).getText();
+ if (i < ecnt) {
+ visit(ctx.expr(i));
+ Var var = exec.setVariable(name);
+ if (trace) {
+ trace(ctx, "SET " + name + " = " + var.toString());
+ }
+ }
+ }
+ return 0;
+ }
+
+ /**
+ * WHILE statement
+ */
+ public Integer while_(HplsqlParser.While_stmtContext ctx) {
+ trace(ctx, "WHILE - ENTERED");
+ String label = exec.labelPop();
+ while (true) {
+ if (evalPop(ctx.bool_expr()).isTrue()) {
+ exec.enterScope(Scope.Type.LOOP);
+ visit(ctx.block());
+ exec.leaveScope();
+ if (canContinue(label)) {
+ continue;
+ }
+ }
+ break;
+ }
+ trace(ctx, "WHILE - LEFT");
+ return 0;
+ }
+
+ /**
+ * FOR cursor statement
+ */
+ public Integer forCursor(HplsqlParser.For_cursor_stmtContext ctx) {
+ trace(ctx, "FOR CURSOR - ENTERED");
+ exec.enterScope(Scope.Type.LOOP);
+ String cursor = ctx.L_ID().getText();
+ String sql = evalPop(ctx.select_stmt()).toString();
+ trace(ctx, sql);
+ Query query = exec.executeQuery(ctx, sql, exec.conf.defaultConnection);
+ if (query.error()) {
+ exec.signal(query);
+ return 1;
+ }
+ trace(ctx, "SELECT completed successfully");
+ exec.setSqlSuccess();
+ try {
+ ResultSet rs = query.getResultSet();
+ if (rs != null) {
+ ResultSetMetaData rm = rs.getMetaData();
+ int cols = rm.getColumnCount();
+ Var[] vars = new Var[cols];
+ for (int i = 0; i < cols; i++) {
+ vars[i] = new Var();
+ vars[i].setName(cursor + "." + rm.getColumnName(i + 1));
+ vars[i].setType(rm.getColumnType(i + 1));
+ exec.addVariable(vars[i]);
+ if (trace) {
+ trace(ctx, "Column: " + vars[i].getName() + " " + rm.getColumnTypeName(i + 1));
+ }
+ }
+ while (rs.next()) {
+ for (int i = 0; i < cols; i++) {
+ vars[i].setValue(rs, rm, i + 1);
+ }
+ visit(ctx.block());
+ exec.incRowCount();
+ }
+ }
+ }
+ catch (SQLException e) {
+ exec.signal(e);
+ exec.closeQuery(query, exec.conf.defaultConnection);
+ return 1;
+ }
+ exec.setSqlSuccess();
+ exec.closeQuery(query, exec.conf.defaultConnection);
+ exec.leaveScope();
+ trace(ctx, "FOR CURSOR - LEFT");
+ return 0;
+ }
+
+ /**
+ * FOR (integer range) statement
+ */
+ public Integer forRange(HplsqlParser.For_range_stmtContext ctx) {
+ trace(ctx, "FOR RANGE - ENTERED");
+ int start = evalPop(ctx.expr(0)).intValue();
+ int end = evalPop(ctx.expr(1)).intValue();
+ int step = evalPop(ctx.expr(2), 1L).intValue();
+ exec.enterScope(Scope.Type.LOOP);
+ Var index = new Var(ctx.L_ID().getText(), new Long(start));
+ exec.addVariable(index);
+ if (ctx.T_REVERSE() == null) {
+ for (int i = start; i <= end; i += step) {
+ visit(ctx.block());
+ index.increment(new Long(step));
+ }
+ } else {
+ for (int i = start; i >= end; i -= step) {
+ visit(ctx.block());
+ index.decrement(new Long(step));
+ }
+ }
+ exec.leaveScope();
+ trace(ctx, "FOR RANGE - LEFT");
+ return 0;
+ }
+
+ /**
+ * EXEC, EXECUTE and EXECUTE IMMEDIATE statement to execute dynamic SQL
+ */
+ public Integer exec(HplsqlParser.Exec_stmtContext ctx) {
+ if(trace) {
+ trace(ctx, "EXECUTE");
+ }
+ Var vsql = evalPop(ctx.expr());
+ String sql = vsql.toString();
+ if(trace) {
+ trace(ctx, "Query: " + sql);
+ }
+ Query query = exec.executeSql(ctx, sql, exec.conf.defaultConnection);
+ if(query.error()) {
+ exec.signal(query);
+ return 1;
+ }
+ ResultSet rs = query.getResultSet();
+ if(rs != null) {
+ try {
+ ResultSetMetaData rsm = rs.getMetaData();
+ // Assign to variables
+ if(ctx.T_INTO() != null) {
+ int cols = ctx.L_ID().size();
+ if(rs.next()) {
+ for(int i=0; i < cols; i++) {
+ Var var = exec.findVariable(ctx.L_ID(i).getText());
+ if(var != null) {
+ var.setValue(rs, rsm, i+1);
+ if(trace) {
+ trace(ctx, "COLUMN: " + rsm.getColumnName(i+1) + ", " + rsm.getColumnTypeName(i+1));
+ trace(ctx, "SET " + var.getName() + " = " + var.toString());
+ }
+ }
+ else if(trace) {
+ trace(ctx, "Variable not found: " + ctx.L_ID(i).getText());
+ }
+ }
+ exec.setSqlCode(0);
+ }
+ }
+ // Print the results
+ else {
+ int cols = rsm.getColumnCount();
+ while(rs.next()) {
+ for(int i = 1; i <= cols; i++) {
+ if(i > 1) {
+ System.out.print("\t");
+ }
+ System.out.print(rs.getString(i));
+ }
+ System.out.println("");
+ }
+ }
+ }
+ catch(SQLException e) {
+ exec.setSqlCode(e);
+ }
+ }
+ exec.closeQuery(query, exec.conf.defaultConnection);
+ return 0;
+ }
+
+ /**
+ * EXIT statement (leave the specified loop with a condition)
+ */
+ public Integer exit(HplsqlParser.Exit_stmtContext ctx) {
+ trace(ctx, "EXIT");
+ String label = "";
+ if (ctx.L_ID() != null) {
+ label = ctx.L_ID().toString();
+ }
+ if (ctx.T_WHEN() != null) {
+ if (evalPop(ctx.bool_expr()).isTrue()) {
+ leaveLoop(label);
+ }
+ } else {
+ leaveLoop(label);
+ }
+ return 0;
+ }
+
+ /**
+ * BREAK statement (leave the innermost loop unconditionally)
+ */
+ public Integer break_(HplsqlParser.Break_stmtContext ctx) {
+ trace(ctx, "BREAK");
+ leaveLoop("");
+ return 0;
+ }
+
+ /**
+ * LEAVE statement (leave the specified loop unconditionally)
+ */
+ public Integer leave(HplsqlParser.Leave_stmtContext ctx) {
+ trace(ctx, "LEAVE");
+ String label = "";
+ if (ctx.L_ID() != null) {
+ label = ctx.L_ID().toString();
+ }
+ leaveLoop(label);
+ return 0;
+ }
+
+ /**
+ * Leave the specified or innermost loop unconditionally
+ */
+ public void leaveLoop(String value) {
+ exec.signal(Signal.Type.LEAVE_LOOP, value);
+ }
+
+ /**
+ * UPDATE statement
+ */
+ public Integer update(HplsqlParser.Update_stmtContext ctx) {
+ trace(ctx, "UPDATE");
+ String sql = exec.getFormattedText(ctx);
+ trace(ctx, sql);
+ Query query = exec.executeSql(ctx, sql, exec.conf.defaultConnection);
+ if (query.error()) {
+ exec.signal(query);
+ return 1;
+ }
+ exec.setSqlSuccess();
+ exec.closeQuery(query, exec.conf.defaultConnection);
+ return 0;
+ }
+
+ /**
+ * DELETE statement
+ */
+ public Integer delete(HplsqlParser.Delete_stmtContext ctx) {
+ trace(ctx, "DELETE");
+ String table = evalPop(ctx.table_name()).toString();
+ StringBuilder sql = new StringBuilder();
+ sql.append("DELETE FROM ");
+ sql.append(table);
+ if (ctx.where_clause() != null) {
+ boolean oldBuildSql = exec.buildSql;
+ exec.buildSql = true;
+ sql.append(" " + evalPop(ctx.where_clause()).toString());
+ exec.buildSql = oldBuildSql;
+ }
+ trace(ctx, sql.toString());
+ Query query = exec.executeSql(ctx, sql.toString(), exec.conf.defaultConnection);
+ if (query.error()) {
+ exec.signal(query);
+ return 1;
+ }
+ exec.setSqlSuccess();
+ exec.closeQuery(query, exec.conf.defaultConnection);
+ return 0;
+ }
+
+ /**
+ * MERGE statement
+ */
+ public Integer merge(HplsqlParser.Merge_stmtContext ctx) {
+ trace(ctx, "MERGE");
+ String sql = exec.getFormattedText(ctx);
+ trace(ctx, sql);
+ Query query = exec.executeSql(ctx, sql, exec.conf.defaultConnection);
+ if (query.error()) {
+ exec.signal(query);
+ return 1;
+ }
+ exec.setSqlSuccess();
+ exec.closeQuery(query, exec.conf.defaultConnection);
+ return 0;
+ }
+
+ /**
+ * PRINT Statement
+ */
+ public Integer print(HplsqlParser.Print_stmtContext ctx) {
+ trace(ctx, "PRINT");
+ if (ctx.expr() != null) {
+ visit(ctx.expr());
+ System.out.println(stack.pop().toString());
+ }
+ return 0;
+ }
+
+ /**
+ * SIGNAL statement
+ */
+ public Integer signal(HplsqlParser.Signal_stmtContext ctx) {
+ trace(ctx, "SIGNAL");
+ Signal signal = new Signal(Signal.Type.USERDEFINED, ctx.ident().getText());
+ exec.signal(signal);
+ return 0;
+ }
+
+ /**
+ * RESIGNAL statement
+ */
+ public Integer resignal(HplsqlParser.Resignal_stmtContext ctx) {
+ trace(ctx, "RESIGNAL");
+ if (ctx.T_SQLSTATE() != null) {
+ String sqlstate = evalPop(ctx.expr(0)).toString();
+ String text = "";
+ if (ctx.T_MESSAGE_TEXT() != null) {
+ text = evalPop(ctx.expr(1)).toString();
+ }
+ SQLException exception = new SQLException(text, sqlstate, -1);
+ Signal signal = new Signal(Signal.Type.SQLEXCEPTION, text, exception);
+ exec.setSqlCode(exception);
+ exec.resignal(signal);
+ }
+ else {
+ exec.resignal();
+ }
+ return 0;
+ }
+
+ /**
+ * RETURN statement
+ */
+ public Integer return_(HplsqlParser.Return_stmtContext ctx) {
+ trace(ctx, "RETURN");
+ if (ctx.expr() != null) {
+ eval(ctx.expr());
+ }
+ exec.signal(Signal.Type.LEAVE_ROUTINE);
+ return 0;
+ }
+
+ /**
+ * Check if an exception is raised or EXIT executed, and we should leave the block
+ */
+ boolean canContinue(String label) {
+ Signal signal = exec.signalPeek();
+ if (signal != null && signal.type == Signal.Type.SQLEXCEPTION) {
+ return false;
+ }
+ signal = exec.signalPeek();
+ if (signal != null && signal.type == Signal.Type.LEAVE_LOOP) {
+ if (signal.value == null || signal.value.isEmpty() ||
+ (label != null && label.equalsIgnoreCase(signal.value))) {
+ exec.signalPop();
+ }
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * Evaluate the expression and push the value to the stack
+ */
+ void eval(ParserRuleContext ctx) {
+ exec.visit(ctx);
+ }
+
+ /**
+ * Evaluate the expression to specified String value
+ */
+ void evalString(String string) {
+ exec.stackPush(new Var(string));
+ }
+
+ void evalString(StringBuilder string) {
+ evalString(string.toString());
+ }
+
+ /**
+ * Evaluate the expression and pop value from the stack
+ */
+ Var evalPop(ParserRuleContext ctx) {
+ visit(ctx);
+ if (!exec.stack.isEmpty()) {
+ return exec.stackPop();
+ }
+ return Var.Empty;
+ }
+
+ Var evalPop(ParserRuleContext ctx, long def) {
+ if (ctx != null) {
+ exec.visit(ctx);
+ return exec.stackPop();
+ }
+ return new Var(def);
+ }
+
+ /**
+ * Execute rules
+ */
+ Integer visit(ParserRuleContext ctx) {
+ return exec.visit(ctx);
+ }
+
+ /**
+ * Execute children rules
+ */
+ Integer visitChildren(ParserRuleContext ctx) {
+ return exec.visitChildren(ctx);
+ }
+
+ /**
+ * Trace information
+ */
+ void trace(ParserRuleContext ctx, String message) {
+ exec.trace(ctx, message);
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/StreamGobbler.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/StreamGobbler.java b/hplsql/src/main/java/org/apache/hive/hplsql/StreamGobbler.java
new file mode 100644
index 0000000..d5a7cc4
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/StreamGobbler.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.BufferedReader;
+import java.io.IOException;
+
+/**
+ * Read a stream from an external process
+ */
+public class StreamGobbler extends Thread {
+ InputStream is;
+
+ StreamGobbler(InputStream is) {
+ this.is = is;
+ }
+
+ public void run() {
+ try {
+ InputStreamReader isr = new InputStreamReader(is);
+ BufferedReader br = new BufferedReader(isr);
+ while(true) {
+ String line = br.readLine();
+ if(line == null) {
+ break;
+ }
+ System.out.println(line);
+ }
+ } catch (IOException ioe) {
+ ioe.printStackTrace();
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/Timer.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Timer.java b/hplsql/src/main/java/org/apache/hive/hplsql/Timer.java
new file mode 100644
index 0000000..9330eb4
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Timer.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+public class Timer {
+ long start = 0;
+ long stop = 0;
+ long elapsed = 0;
+
+ /**
+ * Start the timer
+ */
+ public long start() {
+ start = System.currentTimeMillis();
+ return start;
+ }
+
+ /**
+ * Get intermediate timer value
+ */
+ public long current() {
+ return System.currentTimeMillis();
+ }
+
+ /**
+ * Stop the timer and return elapsed time
+ */
+ public long stop() {
+ stop = System.currentTimeMillis();
+ elapsed = stop - start;
+ return elapsed;
+ }
+
+ /**
+ * Format the elapsed time
+ */
+ public String format() {
+ if (elapsed < 1000) {
+ return String.valueOf(elapsed) + " ms";
+ }
+ return String.format("%.2f", ((float)elapsed)/1000) + " sec";
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/Udf.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Udf.java b/hplsql/src/main/java/org/apache/hive/hplsql/Udf.java
new file mode 100644
index 0000000..9c29eeb
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Udf.java
@@ -0,0 +1,117 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
+import org.apache.hadoop.hive.ql.udf.UDFType;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
+
+@Description(name = "hplsql", value = "_FUNC_('query' [, :1, :2, ...n]) - Execute HPL/SQL query", extended = "Example:\n" + " > SELECT _FUNC_('CURRENT_DATE') FROM src LIMIT 1;\n")
+@UDFType(deterministic = false)
+public class Udf extends GenericUDF {
+
+ Exec exec;
+ StringObjectInspector queryOI;
+ ObjectInspector[] argumentsOI;
+
+ /**
+ * Initialize UDF
+ */
+ @Override
+ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+ if (arguments.length == 0) {
+ throw new UDFArgumentLengthException("At least one argument must be specified");
+ }
+ if (!(arguments[0] instanceof StringObjectInspector)) {
+ throw new UDFArgumentException("First argument must be a string");
+ }
+ queryOI = (StringObjectInspector)arguments[0];
+ argumentsOI = arguments;
+ return PrimitiveObjectInspectorFactory.javaStringObjectInspector;
+ }
+
+ /**
+ * Execute UDF
+ */
+ @Override
+ public Object evaluate(DeferredObject[] arguments) throws HiveException {
+ if (exec == null) {
+ exec = new Exec();
+ String query = queryOI.getPrimitiveJavaObject(arguments[0].get());
+ String[] args = { "-e", query, "-trace" };
+ try {
+ exec.setUdfRun(true);
+ exec.init(args);
+ } catch (Exception e) {
+ throw new HiveException(e.getMessage());
+ }
+ }
+ if (arguments.length > 1) {
+ setParameters(arguments);
+ }
+ Var result = exec.run();
+ if (result != null) {
+ return result.toString();
+ }
+ return null;
+ }
+
+ /**
+ * Set parameters for the current call
+ */
+ void setParameters(DeferredObject[] arguments) throws HiveException {
+ for (int i = 1; i < arguments.length; i++) {
+ String name = ":" + i;
+ if (argumentsOI[i] instanceof StringObjectInspector) {
+ String value = ((StringObjectInspector)argumentsOI[i]).getPrimitiveJavaObject(arguments[i].get());
+ if (value != null) {
+ exec.setVariable(name, value);
+ }
+ }
+ else if (argumentsOI[i] instanceof IntObjectInspector) {
+ Integer value = (Integer)((IntObjectInspector)argumentsOI[i]).getPrimitiveJavaObject(arguments[i].get());
+ if (value != null) {
+ exec.setVariable(name, new Var(new Long(value)));
+ }
+ }
+ else if (argumentsOI[i] instanceof LongObjectInspector) {
+ Long value = (Long)((LongObjectInspector)argumentsOI[i]).getPrimitiveJavaObject(arguments[i].get());
+ if (value != null) {
+ exec.setVariable(name, new Var(value));
+ }
+ }
+ else {
+ exec.setVariableToNull(name);
+ }
+ }
+ }
+
+ @Override
+ public String getDisplayString(String[] children) {
+ return "hplsql";
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/Utils.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Utils.java b/hplsql/src/main/java/org/apache/hive/hplsql/Utils.java
new file mode 100644
index 0000000..da0d878
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Utils.java
@@ -0,0 +1,289 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+import java.sql.Date;
+import java.sql.Timestamp;
+
+public class Utils {
+
+ /**
+ * Unquote string and remove escape characters inside the script
+ */
+ public static String unquoteString(String s) {
+ if(s == null) {
+ return null;
+ }
+
+ int len = s.length();
+ StringBuffer s2 = new StringBuffer(len);
+
+ for (int i = 0; i < len; i++) {
+ char ch = s.charAt(i);
+ char ch2 = (i < len - 1) ? s.charAt(i+1) : 0;
+
+ if((i == 0 || i == len -1) && (ch == '\'' || ch == '"'))
+ continue;
+ else
+ // \' and '' escape sequences
+ if((ch == '\\' && ch2 == '\'') || (ch == '\'' && ch2 == '\''))
+ continue;
+
+ s2.append(ch);
+ }
+
+ return s2.toString();
+ }
+
+ /**
+ * Quote string and escape characters - ab'c -> 'ab''c'
+ */
+ public static String quoteString(String s) {
+ if(s == null) {
+ return null;
+ }
+ int len = s.length();
+ StringBuilder s2 = new StringBuilder(len + 2).append('\'');
+
+ for (int i = 0; i < len; i++) {
+ char ch = s.charAt(i);
+ s2.append(ch);
+ if(ch == '\'') {
+ s2.append(ch);
+ }
+ }
+ s2.append('\'');
+ return s2.toString();
+ }
+
+ /**
+ * Merge quoted strings: 'a' 'b' -> 'ab'; 'a''b' 'c' -> 'a''bc'
+ */
+ public static String mergeQuotedStrings(String s1, String s2) {
+ if(s1 == null || s2 == null) {
+ return null;
+ }
+
+ int len1 = s1.length();
+ int len2 = s2.length();
+
+ if(len1 == 0 || len2 == 0) {
+ return s1;
+ }
+
+ return s1.substring(0, len1 - 1) + s2.substring(1);
+ }
+
+ /**
+ * Convert String to Date
+ */
+ public static Date toDate(String s) {
+ int len = s.length();
+ if(len >= 10) {
+ int c4 = s.charAt(4);
+ int c7 = s.charAt(7);
+ // YYYY-MM-DD
+ if(c4 == '-' && c7 == '-') {
+ return Date.valueOf(s.substring(0, 10));
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Convert String to Timestamp
+ */
+ public static Timestamp toTimestamp(String s) {
+ int len = s.length();
+ if(len >= 10) {
+ int c4 = s.charAt(4);
+ int c7 = s.charAt(7);
+ // YYYY-MM-DD
+ if(c4 == '-' && c7 == '-') {
+ // Convert DB2 syntax: YYYY-MM-DD-HH.MI.SS.FFF
+ if(len > 19) {
+ if(s.charAt(10) == '-') {
+ String s2 = s.substring(0, 10) + ' ' + s.substring(11, 13) + ':' + s.substring(14, 16) + ':' +
+ s.substring(17);
+ return Timestamp.valueOf(s2);
+ }
+ }
+ else if(len == 10) {
+ s += " 00:00:00.000";
+ }
+ return Timestamp.valueOf(s);
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Compare two String values and return min or max
+ */
+ public static String minMaxString(String s1, String s2, boolean max) {
+ if(s1 == null) {
+ return s2;
+ }
+ else if(s2 == null) {
+ return s1;
+ }
+ int cmp = s1.compareTo(s2);
+ if((max && cmp < 0) || (!max && cmp > 0)) {
+ return s2;
+ }
+ return s1;
+ }
+
+ /**
+ * Compare two Int values and return min or max
+ */
+ public static Long minMaxInt(Long i1, String s, boolean max) {
+ Long i2 = null;
+ try {
+ i2 = Long.parseLong(s);
+ }
+ catch(NumberFormatException e) {}
+ if(i1 == null) {
+ return i2;
+ }
+ else if(i2 == null) {
+ return i1;
+ }
+ if((max && i1.longValue() < i2.longValue()) || (!max && i1.longValue() > i2.longValue())) {
+ return i2;
+ }
+ return i1;
+ }
+
+ /**
+ * Compare two Date values and return min or max
+ */
+ public static Date minMaxDate(Date d1, String s, boolean max) {
+ Date d2 = Utils.toDate(s);
+ if(d1 == null) {
+ return d2;
+ } else if(d2 == null) {
+ return d1;
+ }
+ if((max && d1.before(d2)) || (!max && d1.after(d2))) {
+ return d2;
+ }
+ return d1;
+ }
+
+ /**
+ * Convert String array to a string with the specified delimiter
+ */
+ public static String toString(String[] a, char del) {
+ StringBuilder s = new StringBuilder();
+ for(int i=0; i < a.length; i++) {
+ if(i > 0) {
+ s.append(del);
+ }
+ s.append(a[i]);
+ }
+ return s.toString();
+ }
+
+ /**
+ * Convert SQL datetime format string to Java SimpleDateFormat
+ */
+ public static String convertSqlDatetimeFormat(String in) {
+ StringBuilder out = new StringBuilder();
+ int len = in.length();
+ int i = 0;
+ while (i < len) {
+ if (i + 4 <= len && in.substring(i, i + 4).compareTo("YYYY") == 0) {
+ out.append("yyyy");
+ i += 4;
+ }
+ else if (i + 2 <= len && in.substring(i, i + 2).compareTo("mm") == 0) {
+ out.append("MM");
+ i += 2;
+ }
+ else if (i + 2 <= len && in.substring(i, i + 2).compareTo("DD") == 0) {
+ out.append("dd");
+ i += 2;
+ }
+ else if (i + 4 <= len && in.substring(i, i + 4).compareToIgnoreCase("HH24") == 0) {
+ out.append("HH");
+ i += 4;
+ }
+ else if (i + 2 <= len && in.substring(i, i + 2).compareToIgnoreCase("MI") == 0) {
+ out.append("mm");
+ i += 2;
+ }
+ else if (i + 2 <= len && in.substring(i, i + 2).compareTo("SS") == 0) {
+ out.append("ss");
+ i += 2;
+ }
+ else {
+ out.append(in.charAt(i));
+ i++;
+ }
+ }
+ return out.toString();
+ }
+
+ /**
+ * Get the executable directory
+ */
+ public static String getExecDir() {
+ String dir = Hplsql.class.getProtectionDomain().getCodeSource().getLocation().getPath();
+ if (dir.endsWith(".jar")) {
+ dir = dir.substring(0, dir.lastIndexOf("/") + 1);
+ }
+ return dir;
+ }
+
+ /**
+ * Format size value specified in bytes
+ */
+ public static String formatSizeInBytes(long bytes, String postfix) {
+ String out;
+ if (bytes < 1024) {
+ out = bytes + " bytes";
+ }
+ else if (bytes < 1024 * 1024) {
+ out = String.format("%.1f", ((float)bytes)/1024) + " KB";
+ }
+ else if (bytes < 1024 * 1024 * 1024) {
+ out = String.format("%.1f", ((float)bytes)/(1024 * 1024)) + " MB";
+ }
+ else {
+ out = String.format("%.1f", ((float)bytes)/(1024 * 1024 * 1024)) + " GB";
+ }
+ if (postfix != null && !postfix.isEmpty()) {
+ out += postfix;
+ }
+ return out;
+ }
+
+ public static String formatSizeInBytes(long bytes) {
+ return Utils.formatSizeInBytes(bytes, null);
+ }
+
+ /**
+ * Format bytes per second rate
+ */
+ public static String formatBytesPerSec(long bytes, long msElapsed) {
+ float bytesPerSec = ((float)bytes)/msElapsed*1000;
+ return Utils.formatSizeInBytes((long)bytesPerSec, "/sec");
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/Var.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Var.java b/hplsql/src/main/java/org/apache/hive/hplsql/Var.java
new file mode 100644
index 0000000..0a4ead2
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Var.java
@@ -0,0 +1,430 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+import java.math.BigDecimal;
+import java.util.ArrayList;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Date;
+import java.sql.Timestamp;
+
+/**
+ * Variable or the result of expression
+ */
+public class Var {
+
+ // Data types
+ public enum Type {BOOL, CURSOR, DATE, DEC, FILE, IDENT, BIGINT, INTERVAL, STRING, STRINGLIST, TIMESTAMP, NULL};
+ public static Var Empty = new Var();
+ public static Var Null = new Var(Type.NULL);
+
+ public String name;
+ public Type type;
+ public Object value;
+
+ int len;
+ int scale;
+
+ public Var() {
+ type = Type.NULL;
+ }
+
+ public Var(Var var) {
+ name = var.name;
+ type = var.type;
+ value = var.value;
+ len = var.len;
+ scale = var.scale;
+ }
+
+ public Var(Long value) {
+ this.type = Type.BIGINT;
+ this.value = value;
+ }
+
+ public Var(BigDecimal value) {
+ this.type = Type.DEC;
+ this.value = value;
+ }
+
+ public Var(String name, Long value) {
+ this.type = Type.BIGINT;
+ this.name = name;
+ this.value = value;
+ }
+
+ public Var(String value) {
+ this.type = Type.STRING;
+ this.value = value;
+ }
+
+ public Var(Date value) {
+ this.type = Type.DATE;
+ this.value = value;
+ }
+
+ public Var(Timestamp value, int scale) {
+ this.type = Type.TIMESTAMP;
+ this.value = value;
+ this.scale = scale;
+ }
+
+ public Var(Interval value) {
+ this.type = Type.INTERVAL;
+ this.value = value;
+ }
+
+ public Var(ArrayList<String> value) {
+ this.type = Type.STRINGLIST;
+ this.value = value;
+ }
+
+ public Var(Boolean b) {
+ type = Type.BOOL;
+ value = b;
+ }
+
+ public Var(Type type, String name) {
+ this.type = type;
+ this.name = name;
+ }
+
+ public Var(Type type, Object value) {
+ this.type = type;
+ this.value = value;
+ }
+
+ public Var(String name, Type type, Object value) {
+ this.name = name;
+ this.type = type;
+ this.value = value;
+ }
+
+ public Var(Type type) {
+ this.type = type;
+ }
+
+ public Var(String name, String type, String len, String scale, Var def) {
+ this.name = name;
+ setType(type);
+ if (len != null) {
+ this.len = Integer.parseInt(len);
+ }
+ if (scale != null) {
+ this.scale = Integer.parseInt(scale);
+ }
+ if (def != null) {
+ cast(def);
+ }
+ }
+
+ /**
+ * Cast a new value to the variable
+ */
+ public Var cast(Var val) {
+ if (val == null || val.value == null) {
+ value = null;
+ }
+ else if (type == val.type && type == Type.STRING) {
+ cast((String)val.value);
+ }
+ else if (type == val.type) {
+ value = val.value;
+ }
+ else if (type == Type.STRING) {
+ cast(val.toString());
+ }
+ else if (type == Type.DATE) {
+ value = Utils.toDate(val.toString());
+ }
+ else if (type == Type.TIMESTAMP) {
+ value = Utils.toTimestamp(val.toString());
+ }
+ return this;
+ }
+
+ /**
+ * Cast a new string value to the variable
+ */
+ public Var cast(String val) {
+ if (type == Type.STRING) {
+ if (len != 0 ) {
+ int l = val.length();
+ if (l > len) {
+ value = val.substring(0, len);
+ return this;
+ }
+ }
+ value = val;
+ }
+ return this;
+ }
+
+ /**
+ * Set the new value
+ */
+ public void setValue(String str) {
+ if(type == Type.STRING) {
+ value = str;
+ }
+ }
+
+ public Var setValue(Long val) {
+ if (type == Type.BIGINT) {
+ value = val;
+ }
+ return this;
+ }
+
+ /**
+ * Set the new value from a result set
+ */
+ public Var setValue(ResultSet rs, ResultSetMetaData rsm, int idx) throws SQLException {
+ int type = rsm.getColumnType(idx);
+ if (type == java.sql.Types.CHAR || type == java.sql.Types.VARCHAR) {
+ cast(new Var(rs.getString(idx)));
+ }
+ else if (type == java.sql.Types.INTEGER || type == java.sql.Types.BIGINT) {
+ cast(new Var(new Long(rs.getLong(idx))));
+ }
+ return this;
+ }
+
+ /**
+ * Set the data type from string representation
+ */
+ void setType(String type) {
+ this.type = defineType(type);
+ }
+
+ /**
+ * Set the data type from JDBC type code
+ */
+ void setType(int type) {
+ this.type = defineType(type);
+ }
+
+ /**
+ * Define the data type from string representation
+ */
+ public static Type defineType(String type) {
+ if (type == null) {
+ return Type.NULL;
+ }
+ else if (type.equalsIgnoreCase("INT") || type.equalsIgnoreCase("INTEGER")) {
+ return Type.BIGINT;
+ }
+ else if (type.equalsIgnoreCase("CHAR") || type.equalsIgnoreCase("VARCHAR") || type.equalsIgnoreCase("STRING")) {
+ return Type.STRING;
+ }
+ else if (type.equalsIgnoreCase("DATE")) {
+ return Type.DATE;
+ }
+ else if (type.equalsIgnoreCase("TIMESTAMP")) {
+ return Type.TIMESTAMP;
+ }
+ else if (type.equalsIgnoreCase("UTL_FILE.FILE_TYPE")) {
+ return Type.FILE;
+ }
+ return Type.NULL;
+ }
+
+ /**
+ * Define the data type from JDBC type code
+ */
+ public static Type defineType(int type) {
+ if (type == java.sql.Types.CHAR || type == java.sql.Types.VARCHAR) {
+ return Type.STRING;
+ }
+ else if (type == java.sql.Types.INTEGER || type == java.sql.Types.BIGINT) {
+ return Type.BIGINT;
+ }
+ return Type.NULL;
+ }
+
+ /**
+ * Remove value
+ */
+ public void removeValue() {
+ type = Type.NULL;
+ name = null;
+ value = null;
+ len = 0;
+ scale = 0;
+ }
+
+ /*
+ * Compare values
+ */
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ else if (obj == null || this.value == null) {
+ return false;
+ }
+ else if (getClass() != obj.getClass()) {
+ return false;
+ }
+
+ Var var = (Var)obj;
+ if (type == Type.BIGINT && var.type == Type.BIGINT &&
+ ((Long)value).longValue() == ((Long)var.value).longValue()) {
+ return true;
+ }
+ else if (type == Type.STRING && var.type == Type.STRING &&
+ ((String)value).equals((String)var.value)) {
+ return true;
+ }
+ return false;
+ }
+
+ /*
+ * Compare values
+ */
+ public int compareTo(Var v) {
+ if (this == v) {
+ return 0;
+ }
+ else if (v == null) {
+ return -1;
+ }
+ else if (type == Type.BIGINT && v.type == Type.BIGINT) {
+ return ((Long)value).compareTo((Long)v.value);
+ }
+ else if (type == Type.STRING && v.type == Type.STRING) {
+ return ((String)value).compareTo((String)v.value);
+ }
+ return -1;
+ }
+
+ /**
+ * Increment an integer value
+ */
+ public Var increment(Long i) {
+ if (type == Type.BIGINT) {
+ value = new Long(((Long)value).longValue() + i);
+ }
+ return this;
+ }
+
+ /**
+ * Decrement an integer value
+ */
+ public Var decrement(Long i) {
+ if (type == Type.BIGINT) {
+ value = new Long(((Long)value).longValue() - i);
+ }
+ return this;
+ }
+
+ /**
+ * Return an integer value
+ */
+ public int intValue() {
+ if (type == Type.BIGINT) {
+ return ((Long)value).intValue();
+ }
+ return -1;
+ }
+
+ /**
+ * Return true/false for BOOL type
+ */
+ public boolean isTrue() {
+ if(type == Type.BOOL && value != null) {
+ return ((Boolean)value).booleanValue();
+ }
+ return false;
+ }
+
+ /**
+ * Check if the variable contains NULL
+ */
+ public boolean isNull() {
+ if (type == Type.NULL || value == null) {
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * Convert value to String
+ */
+ @Override
+ public String toString() {
+ if (type == Type.IDENT) {
+ return name;
+ }
+ else if (value == null) {
+ return null;
+ }
+ else if (type == Type.BIGINT) {
+ return ((Long)value).toString();
+ }
+ else if (type == Type.STRING) {
+ return (String)value;
+ }
+ else if (type == Type.DATE) {
+ return ((Date)value).toString();
+ }
+ else if (type == Type.TIMESTAMP) {
+ int len = 19;
+ String t = ((Timestamp)value).toString(); // .0 returned if the fractional part not set
+ if (scale > 0) {
+ len += scale + 1;
+ }
+ if (t.length() > len) {
+ t = t.substring(0, len);
+ }
+ return t;
+ }
+ return value.toString();
+ }
+
+ /**
+ * Convert value to SQL string - string literals are quoted and escaped, ab'c -> 'ab''c'
+ */
+ public String toSqlString() {
+ if (value == null) {
+ return "NULL";
+ }
+ else if (type == Type.STRING) {
+ return Utils.quoteString((String)value);
+ }
+ return toString();
+ }
+
+ /**
+ * Set variable name
+ */
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ /**
+ * Get variable name
+ */
+ public String getName() {
+ return name;
+ }
+}
[3/4] hive git commit: HIVE-11055 HPL/SQL - Implementing Procedural
SQL in Hive (PL/HQL Contribution) (Dmitry Tolpeko via gates)
Posted by ga...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java b/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
new file mode 100644
index 0000000..9ec8959
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
@@ -0,0 +1,1950 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+import java.math.BigDecimal;
+import java.io.ByteArrayInputStream;
+import java.io.FileInputStream;
+import java.io.InputStream;
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Stack;
+import java.util.Iterator;
+import java.sql.Connection;
+import java.sql.SQLException;
+
+import org.antlr.v4.runtime.ANTLRInputStream;
+import org.antlr.v4.runtime.CommonTokenStream;
+import org.antlr.v4.runtime.ParserRuleContext;
+import org.antlr.v4.runtime.Token;
+import org.antlr.v4.runtime.misc.NotNull;
+import org.antlr.v4.runtime.tree.ParseTree;
+import org.apache.commons.io.FileUtils;
+import org.apache.hive.hplsql.functions.*;
+
+/**
+ * HPL/SQL script executor
+ *
+ */
+public class Exec extends HplsqlBaseVisitor<Integer> {
+
+ public static final String VERSION = "HPL/SQL 0.3.11";
+ public static final String SQLCODE = "SQLCODE";
+ public static final String SQLSTATE = "SQLSTATE";
+ public static final String HOSTCODE = "HOSTCODE";
+
+ Exec exec = null;
+ ParseTree tree = null;
+
+ public enum OnError {EXCEPTION, SETERROR, STOP};
+
+ // Scopes of execution (code blocks) with own local variables, parameters and exception handlers
+ Stack<Scope> scopes = new Stack<Scope>();
+ Scope currentScope;
+
+ Stack<Var> stack = new Stack<Var>();
+ Stack<String> labels = new Stack<String>();
+
+ Stack<Signal> signals = new Stack<Signal>();
+ Signal currentSignal;
+ Scope currentHandlerScope;
+ boolean resignal = false;
+
+ HashMap<String, String> managedTables = new HashMap<String, String>();
+ HashMap<String, String> objectMap = new HashMap<String, String>();
+ HashMap<String, String> objectConnMap = new HashMap<String, String>();
+
+ public ArrayList<String> stmtConnList = new ArrayList<String>();
+
+ Arguments arguments = new Arguments();
+ public Conf conf;
+ Expression expr;
+ Function function;
+ Converter converter;
+ Select select;
+ Stmt stmt;
+ Conn conn;
+
+ int rowCount = 0;
+
+ String execString;
+ String execFile;
+ String execMain;
+ StringBuilder localUdf = new StringBuilder();
+ boolean initRoutines = false;
+ public boolean buildSql = false;
+ boolean udfRegistered = false;
+ boolean udfRun = false;
+
+ boolean dotHplsqlrcExists = false;
+ boolean hplsqlrcExists = false;
+
+ boolean trace = false;
+ boolean info = true;
+ boolean offline = false;
+
+ Exec() {
+ exec = this;
+ }
+
+ Exec(Exec exec) {
+ this.exec = exec;
+ }
+
+ /**
+ * Set a variable using a value from the parameter or the stack
+ */
+ public Var setVariable(String name, Var value) {
+ if (value == null || value == Var.Empty) {
+ if (exec.stack.empty()) {
+ return Var.Empty;
+ }
+ value = exec.stack.pop();
+ }
+ if (name.startsWith("hplsql.")) {
+ exec.conf.setOption(name, value.toString());
+ return Var.Empty;
+ }
+ Var var = findVariable(name);
+ if (var != null) {
+ var.cast(value);
+ }
+ else {
+ var = new Var(value);
+ var.setName(name);
+ exec.currentScope.addVariable(var);
+ }
+ return var;
+ }
+
+ public Var setVariable(String name) {
+ return setVariable(name, Var.Empty);
+ }
+
+ public Var setVariable(String name, String value) {
+ return setVariable(name, new Var(value));
+ }
+
+ public Var setVariable(String name, int value) {
+ return setVariable(name, new Var(new Long(value)));
+ }
+
+ /**
+ * Set variable to NULL
+ */
+ public Var setVariableToNull(String name) {
+ Var var = findVariable(name);
+ if (var != null) {
+ var.removeValue();
+ }
+ else {
+ var = new Var();
+ var.setName(name);
+ exec.currentScope.addVariable(var);
+ }
+ return var;
+ }
+
+ /**
+ * Add a local variable to the current scope
+ */
+ public void addVariable(Var var) {
+ if (exec.currentScope != null) {
+ exec.currentScope.addVariable(var);
+ }
+ }
+
+ /**
+ * Add a condition handler to the current scope
+ */
+ public void addHandler(Handler handler) {
+ if (exec.currentScope != null) {
+ exec.currentScope.addHandler(handler);
+ }
+ }
+
+ /**
+ * Push a value to the stack
+ */
+ public void stackPush(Var var) {
+ exec.stack.push(var);
+ }
+
+ /**
+ * Push a string value to the stack
+ */
+ public void stackPush(String val) {
+ exec.stack.push(new Var(val));
+ }
+
+ public void stackPush(StringBuilder val) {
+ stackPush(val.toString());
+ }
+
+ /**
+ * Push a boolean value to the stack
+ */
+ public void stackPush(boolean val) {
+ exec.stack.push(new Var(val));
+ }
+
+ /**
+ * Select a value from the stack, but not remove
+ */
+ public Var stackPeek() {
+ return exec.stack.peek();
+ }
+
+ /**
+ * Pop a value from the stack
+ */
+ public Var stackPop() {
+ if (!exec.stack.isEmpty()) {
+ return exec.stack.pop();
+ }
+ return null;
+ }
+
+ /**
+ * Find an existing variable by name
+ */
+ public Var findVariable(String name) {
+ Scope cur = exec.currentScope;
+ String name2 = null;
+ if (name.startsWith(":")) {
+ name2 = name.substring(1);
+ }
+ while (cur != null) {
+ for (Var v : cur.vars) {
+ if (name.equalsIgnoreCase(v.getName()) ||
+ (name2 != null && name2.equalsIgnoreCase(v.getName()))) {
+ return v;
+ }
+ }
+ cur = cur.parent;
+ }
+ return null;
+ }
+
+ public Var findVariable(Var name) {
+ return findVariable(name.getName());
+ }
+
+ /**
+ * Enter a new scope
+ */
+ public void enterScope(Scope.Type type) {
+ exec.currentScope = new Scope(exec.currentScope, type);
+ exec.scopes.push(exec.currentScope);
+ }
+
+ /**
+ * Leave the current scope
+ */
+ public void leaveScope() {
+ if (!exec.signals.empty()) {
+ Scope scope = exec.scopes.peek();
+ Signal signal = exec.signals.peek();
+ if (exec.conf.onError != OnError.SETERROR) {
+ runExitHandler();
+ }
+ if (signal.type == Signal.Type.LEAVE_ROUTINE && scope.type == Scope.Type.ROUTINE) {
+ exec.signals.pop();
+ }
+ }
+ exec.currentScope = exec.scopes.pop().getParent();
+ }
+
+ /**
+ * Send a signal
+ */
+ public void signal(Signal signal) {
+ exec.signals.push(signal);
+ }
+
+ public void signal(Signal.Type type, String value, Exception exception) {
+ signal(new Signal(type, value, exception));
+ }
+
+ public void signal(Signal.Type type, String value) {
+ signal(type, value, null);
+ }
+
+ public void signal(Signal.Type type) {
+ signal(type, null, null);
+ }
+
+ public void signal(Query query) {
+ setSqlCode(query.getException());
+ signal(Signal.Type.SQLEXCEPTION, query.errorText(), query.getException());
+ }
+
+ public void signal(Exception exception) {
+ setSqlCode(exception);
+ signal(Signal.Type.SQLEXCEPTION, exception.getMessage(), exception);
+ }
+
+ /**
+ * Resignal the condition
+ */
+ public void resignal() {
+ resignal(exec.currentSignal);
+ }
+
+ public void resignal(Signal signal) {
+ if (signal != null) {
+ exec.resignal = true;
+ signal(signal);
+ }
+ }
+
+ /**
+ * Run CONTINUE handlers
+ */
+ boolean runContinueHandler() {
+ Scope cur = exec.currentScope;
+ exec.currentSignal = exec.signals.pop();
+ while (cur != null) {
+ for (Handler h : cur.handlers) {
+ if (h.execType != Handler.ExecType.CONTINUE) {
+ continue;
+ }
+ if ((h.type != Signal.Type.USERDEFINED && h.type == exec.currentSignal.type) ||
+ (h.type == Signal.Type.USERDEFINED && h.type == exec.currentSignal.type &&
+ h.value.equalsIgnoreCase(exec.currentSignal.value))) {
+ trace(h.ctx, "CONTINUE HANDLER");
+ enterScope(Scope.Type.HANDLER);
+ exec.currentHandlerScope = h.scope;
+ visit(h.ctx.single_block_stmt());
+ leaveScope();
+ exec.currentSignal = null;
+ return true;
+ }
+ }
+ cur = cur.parent;
+ }
+ exec.signals.push(exec.currentSignal);
+ exec.currentSignal = null;
+ return false;
+ }
+
+ /**
+ * Run EXIT handler defined for the current scope
+ */
+ boolean runExitHandler() {
+ exec.currentSignal = exec.signals.pop();
+ for (Handler h : currentScope.handlers) {
+ if (h.execType != Handler.ExecType.EXIT) {
+ continue;
+ }
+ if ((h.type != Signal.Type.USERDEFINED && h.type == exec.currentSignal.type) ||
+ (h.type == Signal.Type.USERDEFINED && h.type == exec.currentSignal.type &&
+ h.value.equalsIgnoreCase(currentSignal.value))) {
+ trace(h.ctx, "EXIT HANDLER");
+ enterScope(Scope.Type.HANDLER);
+ exec.currentHandlerScope = h.scope;
+ visit(h.ctx.single_block_stmt());
+ leaveScope();
+ exec.currentSignal = null;
+ return true;
+ }
+ }
+ exec.signals.push(exec.currentSignal);
+ exec.currentSignal = null;
+ return false;
+ }
+
+ /**
+ * Pop the last signal
+ */
+ public Signal signalPop() {
+ if (!exec.signals.empty()) {
+ return exec.signals.pop();
+ }
+ return null;
+ }
+
+ /**
+ * Peek the last signal
+ */
+ public Signal signalPeek() {
+ if (!exec.signals.empty()) {
+ return exec.signals.peek();
+ }
+ return null;
+ }
+
+ /**
+ * Pop the current label
+ */
+ public String labelPop() {
+ if(!exec.labels.empty()) {
+ return exec.labels.pop();
+ }
+ return "";
+ }
+
+ /**
+ * Execute a SQL query (SELECT)
+ */
+ public Query executeQuery(ParserRuleContext ctx, Query query, String connProfile) {
+ if (!exec.offline) {
+ exec.rowCount = 0;
+ exec.conn.executeQuery(query, connProfile);
+ return query;
+ }
+ setSqlNoData();
+ trace(ctx, "Not executed - offline mode set");
+ return query;
+ }
+
+ public Query executeQuery(ParserRuleContext ctx, String sql, String connProfile) {
+ return executeQuery(ctx, new Query(sql), connProfile);
+ }
+
+ /**
+ * Execute a SQL statement
+ */
+ public Query executeSql(ParserRuleContext ctx, String sql, String connProfile) {
+ if (!exec.offline) {
+ exec.rowCount = 0;
+ Query query = conn.executeSql(sql, connProfile);
+ exec.rowCount = query.getRowCount();
+ return query;
+ }
+ trace(ctx, "Not executed - offline mode set");
+ return new Query("");
+ }
+
+ /**
+ * Close the query object
+ */
+ public void closeQuery(Query query, String conn) {
+ if(!exec.offline) {
+ exec.conn.closeQuery(query, conn);
+ }
+ }
+
+ /**
+ * Register JARs, FILEs and CREATE TEMPORARY FUNCTION for UDF call
+ */
+ public void registerUdf() {
+ if (udfRegistered) {
+ return;
+ }
+ ArrayList<String> sql = new ArrayList<String>();
+ String dir = Utils.getExecDir();
+ sql.add("ADD JAR " + dir + "hplsql.jar");
+ sql.add("ADD JAR " + dir + "antlr-runtime-4.5.jar");
+ sql.add("ADD FILE " + dir + Conf.SITE_XML);
+ if (dotHplsqlrcExists) {
+ sql.add("ADD FILE " + dir + Conf.DOT_HPLSQLRC);
+ }
+ if (hplsqlrcExists) {
+ sql.add("ADD FILE " + dir + Conf.HPLSQLRC);
+ }
+ String lu = createLocalUdf();
+ if (lu != null) {
+ sql.add("ADD FILE " + lu);
+ }
+ sql.add("CREATE TEMPORARY FUNCTION hplsql AS 'org.apache.hive.hplsql.Udf'");
+ exec.conn.addPreSql(exec.conf.defaultConnection, sql);
+ udfRegistered = true;
+ }
+
+ /**
+ * Initialize options
+ */
+ void initOptions() {
+ Iterator<Map.Entry<String,String>> i = exec.conf.iterator();
+ while (i.hasNext()) {
+ Entry<String,String> item = (Entry<String,String>)i.next();
+ String key = (String)item.getKey();
+ String value = (String)item.getValue();
+ if (key == null || value == null) {
+ continue;
+ }
+ else if (key.compareToIgnoreCase(Conf.CONN_DEFAULT) == 0) {
+ exec.conf.defaultConnection = value;
+ }
+ else if (key.startsWith("hplsql.conn.init.")) {
+ exec.conn.addConnectionInit(key.substring(16), value);
+ }
+ else if (key.startsWith(Conf.CONN_CONVERT)) {
+ exec.conf.setConnectionConvert(key.substring(19), value);
+ }
+ else if (key.startsWith("hplsql.conn.")) {
+ exec.conn.addConnection(key.substring(11), value);
+ }
+ else if (key.startsWith("hplsql.")) {
+ exec.conf.setOption(key, value);
+ }
+ }
+ }
+
+ /**
+ * Set SQLCODE
+ */
+ public void setSqlCode(int sqlcode) {
+ Var var = findVariable(SQLCODE);
+ if (var != null) {
+ var.setValue(new Long(sqlcode));
+ }
+ }
+
+ public void setSqlCode(Exception exception) {
+ if (exception instanceof SQLException) {
+ setSqlCode(((SQLException)exception).getErrorCode());
+ setSqlState(((SQLException)exception).getSQLState());
+ }
+ else {
+ setSqlCode(-1);
+ setSqlState("02000");
+ }
+ }
+
+ /**
+ * Set SQLSTATE
+ */
+ public void setSqlState(String sqlstate) {
+ Var var = findVariable(SQLSTATE);
+ if (var != null) {
+ var.setValue(sqlstate);
+ }
+ }
+
+ /**
+ * Set HOSTCODE
+ */
+ public void setHostCode(int code) {
+ Var var = findVariable(HOSTCODE);
+ if (var != null) {
+ var.setValue(new Long(code));
+ }
+ }
+
+ /**
+ * Set successful execution for SQL
+ */
+ public void setSqlSuccess() {
+ setSqlCode(0);
+ setSqlState("00000");
+ }
+
+ /**
+ * Set SQL_NO_DATA as the result of SQL execution
+ */
+ public void setSqlNoData() {
+ setSqlCode(100);
+ setSqlState("01000");
+ }
+
+ /**
+ * Compile and run PL/HQL script
+ */
+ public Integer run(String[] args) throws Exception {
+ if (init(args) != 0) {
+ return 1;
+ }
+ Var result = run();
+ if (result != null) {
+ System.out.println(result.toString());
+ }
+ cleanup();
+ printExceptions();
+ return getProgramReturnCode();
+ }
+
+ /**
+ * Run already compiled PL/HQL script (also used from Hive UDF)
+ */
+ public Var run() {
+ if (tree == null) {
+ return null;
+ }
+ if (execMain != null) {
+ initRoutines = true;
+ visit(tree);
+ initRoutines = false;
+ exec.function.execProc(execMain);
+ }
+ else {
+ visit(tree);
+ }
+ return stackPop();
+ }
+
+ /**
+ * Initialize PL/HQL
+ */
+ Integer init(String[] args) throws Exception {
+ if (!parseArguments(args)) {
+ return 1;
+ }
+ conf = new Conf();
+ conf.init();
+ conn = new Conn(this);
+ initOptions();
+
+ expr = new Expression(this);
+ select = new Select(this);
+ stmt = new Stmt(this);
+ converter = new Converter(this);
+
+ function = new Function(this);
+ new FunctionDatetime(this).register(function);
+ new FunctionMisc(this).register(function);
+ new FunctionString(this).register(function);
+ new FunctionOra(this).register(function);
+
+ enterScope(Scope.Type.FILE);
+ addVariable(new Var(SQLCODE, Var.Type.BIGINT, 0L));
+ addVariable(new Var(SQLSTATE, Var.Type.STRING, "00000"));
+ addVariable(new Var(HOSTCODE, Var.Type.BIGINT, 0L));
+
+ for (Map.Entry<String, String> v : arguments.getVars().entrySet()) {
+ addVariable(new Var(v.getKey(), Var.Type.STRING, v.getValue()));
+ }
+ InputStream input = null;
+ if (execString != null) {
+ input = new ByteArrayInputStream(execString.getBytes("UTF-8"));
+ }
+ else {
+ input = new FileInputStream(execFile);
+ }
+ HplsqlLexer lexer = new HplsqlLexer(new ANTLRInputStream(input));
+ CommonTokenStream tokens = new CommonTokenStream(lexer);
+ HplsqlParser parser = new HplsqlParser(tokens);
+ tree = parser.program();
+ if (trace) {
+ System.err.println("Configuration file: " + conf.getLocation());
+ System.err.println("Parser tree: " + tree.toStringTree(parser));
+ }
+ includeRcFile();
+ return 0;
+ }
+
+ /**
+ * Parse command line arguments
+ */
+ boolean parseArguments(String[] args) {
+ boolean parsed = arguments.parse(args);
+ if (parsed && arguments.hasVersionOption()) {
+ System.err.println(VERSION);
+ return false;
+ }
+ if (!parsed || arguments.hasHelpOption() ||
+ (arguments.getExecString() == null && arguments.getFileName() == null)) {
+ arguments.printHelp();
+ return false;
+ }
+ execString = arguments.getExecString();
+ execFile = arguments.getFileName();
+ execMain = arguments.getMain();
+ if (arguments.hasTraceOption()) {
+ trace = true;
+ }
+ if (arguments.hasOfflineOption()) {
+ offline = true;
+ }
+ if (execString != null && execFile != null) {
+ System.err.println("The '-e' and '-f' options cannot be specified simultaneously.");
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * Include statements from .hplsqlrc and hplsql rc files
+ */
+ void includeRcFile() {
+ if (includeFile(Conf.DOT_HPLSQLRC)) {
+ dotHplsqlrcExists = true;
+ }
+ else {
+ if (includeFile(Conf.HPLSQLRC)) {
+ hplsqlrcExists = true;
+ }
+ }
+ if (udfRun) {
+ includeFile(Conf.HPLSQL_LOCALS_SQL);
+ }
+ }
+
+ /**
+ * Include statements from a file
+ */
+ boolean includeFile(String file) {
+ try {
+ String content = FileUtils.readFileToString(new java.io.File(file), "UTF-8");
+ if (content != null && !content.isEmpty()) {
+ if (trace) {
+ trace(null, "INLCUDE CONTENT " + file + " (non-empty)");
+ }
+ new Exec(this).include(content);
+ return true;
+ }
+ }
+ catch (Exception e) {}
+ return false;
+ }
+
+ /**
+ * Execute statements from an include file
+ */
+ void include(String content) throws Exception {
+ InputStream input = new ByteArrayInputStream(content.getBytes("UTF-8"));
+ HplsqlLexer lexer = new HplsqlLexer(new ANTLRInputStream(input));
+ CommonTokenStream tokens = new CommonTokenStream(lexer);
+ HplsqlParser parser = new HplsqlParser(tokens);
+ ParseTree tree = parser.program();
+ visit(tree);
+ }
+
+ /**
+ * Start executing PL/HQL script
+ */
+ @Override
+ public Integer visitProgram(HplsqlParser.ProgramContext ctx) {
+ enterScope(Scope.Type.FILE);
+ Integer rc = visitChildren(ctx);
+ leaveScope();
+ return rc;
+ }
+
+ /**
+ * Enter BEGIN-END block
+ */
+ @Override
+ public Integer visitBegin_end_block(HplsqlParser.Begin_end_blockContext ctx) {
+ enterScope(Scope.Type.BEGIN_END);
+ Integer rc = visitChildren(ctx);
+ leaveScope();
+ return rc;
+ }
+
+ /**
+ * Free resources before exit
+ */
+ void cleanup() {
+ for (Map.Entry<String, String> i : managedTables.entrySet()) {
+ String sql = "DROP TABLE IF EXISTS " + i.getValue();
+ Query query = executeSql(null, sql, exec.conf.defaultConnection);
+ closeQuery(query, exec.conf.defaultConnection);
+ if (trace) {
+ trace(null, sql);
+ }
+ }
+ }
+
+ /**
+ * Output information about unhandled exceptions
+ */
+ void printExceptions() {
+ while (!signals.empty()) {
+ Signal sig = signals.pop();
+ if (sig.type == Signal.Type.SQLEXCEPTION) {
+ System.err.println("Unhandled exception in PL/HQL");
+ }
+ if (sig.exception != null) {
+ sig.exception.printStackTrace();
+ }
+ else if (sig.value != null) {
+ System.err.println(sig.value);
+ }
+ }
+ }
+
+ /**
+ * Get the program return code
+ */
+ Integer getProgramReturnCode() {
+ Integer rc = 0;
+ if(!signals.empty()) {
+ Signal sig = signals.pop();
+ if(sig.type == Signal.Type.LEAVE_ROUTINE && sig.value != null) {
+ try {
+ rc = Integer.parseInt(sig.value);
+ }
+ catch(NumberFormatException e) {
+ rc = 1;
+ }
+ }
+ }
+ return rc;
+ }
+
+ /**
+ * Executing a statement
+ */
+ @Override
+ public Integer visitStmt(HplsqlParser.StmtContext ctx) {
+ if (ctx.semicolon_stmt() != null) {
+ return 0;
+ }
+ if (initRoutines && ctx.create_procedure_stmt() == null && ctx.create_function_stmt() == null) {
+ return 0;
+ }
+ if (exec.resignal) {
+ if (exec.currentScope != exec.currentHandlerScope.parent) {
+ return 0;
+ }
+ exec.resignal = false;
+ }
+ if (!exec.signals.empty() && exec.conf.onError != OnError.SETERROR) {
+ if (!runContinueHandler()) {
+ return 0;
+ }
+ }
+ Var prevResult = stackPop();
+ if (prevResult != null) {
+ System.out.println(prevResult.toString());
+ }
+ return visitChildren(ctx);
+ }
+
+ /**
+ * Executing or building SELECT statement
+ */
+ @Override
+ public Integer visitSelect_stmt(HplsqlParser.Select_stmtContext ctx) {
+ return exec.select.select(ctx);
+ }
+
+ @Override
+ public Integer visitCte_select_stmt(HplsqlParser.Cte_select_stmtContext ctx) {
+ return exec.select.cte(ctx);
+ }
+
+ @Override
+ public Integer visitFullselect_stmt(HplsqlParser.Fullselect_stmtContext ctx) {
+ return exec.select.fullselect(ctx);
+ }
+
+ @Override
+ public Integer visitSubselect_stmt(HplsqlParser.Subselect_stmtContext ctx) {
+ return exec.select.subselect(ctx);
+ }
+
+ @Override
+ public Integer visitSelect_list(HplsqlParser.Select_listContext ctx) {
+ return exec.select.selectList(ctx);
+ }
+
+ @Override
+ public Integer visitFrom_clause(HplsqlParser.From_clauseContext ctx) {
+ return exec.select.from(ctx);
+ }
+
+ @Override
+ public Integer visitFrom_table_name_clause(HplsqlParser.From_table_name_clauseContext ctx) {
+ return exec.select.fromTable(ctx);
+ }
+
+ @Override
+ public Integer visitFrom_join_clause(HplsqlParser.From_join_clauseContext ctx) {
+ return exec.select.fromJoin(ctx);
+ }
+
+ @Override
+ public Integer visitFrom_table_values_clause(HplsqlParser.From_table_values_clauseContext ctx) {
+ return exec.select.fromTableValues(ctx);
+ }
+
+ @Override
+ public Integer visitWhere_clause(HplsqlParser.Where_clauseContext ctx) {
+ return exec.select.where(ctx);
+ }
+
+ @Override
+ public Integer visitSelect_options_item(HplsqlParser.Select_options_itemContext ctx) {
+ return exec.select.option(ctx);
+ }
+
+ /**
+ * Table name
+ */
+ @Override
+ public Integer visitTable_name(HplsqlParser.Table_nameContext ctx) {
+ String name = ctx.getText().toUpperCase();
+ String actualName = exec.managedTables.get(name);
+ String conn = exec.objectConnMap.get(name);
+ if (conn == null) {
+ conn = conf.defaultConnection;
+ }
+ stmtConnList.add(conn);
+ if (actualName != null) {
+ stackPush(actualName);
+ return 0;
+ }
+ actualName = exec.objectMap.get(name);
+ if (actualName != null) {
+ stackPush(actualName);
+ return 0;
+ }
+ stackPush(ctx.getText());
+ return 0;
+ }
+
+ /**
+ * SQL INSERT statement
+ */
+ @Override
+ public Integer visitInsert_stmt(HplsqlParser.Insert_stmtContext ctx) {
+ return exec.stmt.insert(ctx);
+ }
+
+ /**
+ * EXCEPTION block
+ */
+ @Override
+ public Integer visitException_block_item(HplsqlParser.Exception_block_itemContext ctx) {
+ if (exec.signals.empty()) {
+ return 0;
+ }
+ if (exec.conf.onError == OnError.SETERROR || exec.conf.onError == OnError.STOP) {
+ exec.signals.pop();
+ return 0;
+ }
+ if (ctx.L_ID().toString().equalsIgnoreCase("OTHERS")) {
+ trace(ctx, "EXCEPTION HANDLER");
+ exec.signals.pop();
+ enterScope(Scope.Type.HANDLER);
+ visit(ctx.block());
+ leaveScope();
+ }
+ return 0;
+ }
+
+ /**
+ * DECLARE variable statement
+ */
+ @Override
+ public Integer visitDeclare_var_item(HplsqlParser.Declare_var_itemContext ctx) {
+ String type = ctx.dtype().getText();
+ String len = null;
+ String scale = null;
+ Var default_ = null;
+ if (ctx.dtype_len() != null) {
+ len = ctx.dtype_len().L_INT(0).getText();
+ if (ctx.dtype_len().L_INT(1) != null) {
+ scale = ctx.dtype_len().L_INT(1).getText();
+ }
+ }
+ if (ctx.dtype_default() != null) {
+ default_ = evalPop(ctx.dtype_default());
+ }
+ int cnt = ctx.ident().size(); // Number of variables declared with the same data type and default
+ for (int i = 0; i < cnt; i++) {
+ String name = ctx.ident(i).getText();
+ Var var = new Var(name, type, len, scale, default_);
+ addVariable(var);
+ if (trace) {
+ if (default_ != null) {
+ trace(ctx, "DECLARE " + name + " " + type + " = " + var.toSqlString());
+ }
+ else {
+ trace(ctx, "DECLARE " + name + " " + type);
+ }
+ }
+ }
+ return 0;
+ }
+
+ /**
+ * DECLARE cursor statement
+ */
+ @Override
+ public Integer visitDeclare_cursor_item(HplsqlParser.Declare_cursor_itemContext ctx) {
+ return exec.stmt.declareCursor(ctx);
+ }
+
+ /**
+ * DROP statement
+ */
+ @Override
+ public Integer visitDrop_stmt(HplsqlParser.Drop_stmtContext ctx) {
+ return exec.stmt.drop(ctx);
+ }
+
+ /**
+ * OPEN cursor statement
+ */
+ @Override
+ public Integer visitOpen_stmt(HplsqlParser.Open_stmtContext ctx) {
+ return exec.stmt.open(ctx);
+ }
+
+ /**
+ * FETCH cursor statement
+ */
+ @Override
+ public Integer visitFetch_stmt(HplsqlParser.Fetch_stmtContext ctx) {
+ return exec.stmt.fetch(ctx);
+ }
+
+ /**
+ * CLOSE cursor statement
+ */
+ @Override
+ public Integer visitClose_stmt(HplsqlParser.Close_stmtContext ctx) {
+ return exec.stmt.close(ctx);
+ }
+
+ /**
+ * COPY statement
+ */
+ @Override
+ public Integer visitCopy_stmt(HplsqlParser.Copy_stmtContext ctx) {
+ return new Copy(exec).run(ctx);
+ }
+
+ /**
+ * COPY FROM LOCAL statement
+ */
+ @Override
+ public Integer visitCopy_from_local_stmt(HplsqlParser.Copy_from_local_stmtContext ctx) {
+ return new Copy(exec).runFromLocal(ctx);
+ }
+
+ /**
+ * DECLARE HANDLER statement
+ */
+ @Override
+ public Integer visitDeclare_handler_item(HplsqlParser.Declare_handler_itemContext ctx) {
+ trace(ctx, "DECLARE HANDLER");
+ Handler.ExecType execType = Handler.ExecType.EXIT;
+ Signal.Type type = Signal.Type.SQLEXCEPTION;
+ String value = null;
+ if (ctx.T_CONTINUE() != null) {
+ execType = Handler.ExecType.CONTINUE;
+ }
+ if (ctx.ident() != null) {
+ type = Signal.Type.USERDEFINED;
+ value = ctx.ident().getText();
+ }
+ else if (ctx.T_NOT() != null && ctx.T_FOUND() != null) {
+ type = Signal.Type.NOTFOUND;
+ }
+ addHandler(new Handler(execType, type, value, exec.currentScope, ctx));
+ return 0;
+ }
+
+ /**
+ * DECLARE CONDITION
+ */
+ @Override
+ public Integer visitDeclare_condition_item(HplsqlParser.Declare_condition_itemContext ctx) {
+ return 0;
+ }
+
+ /**
+ * DECLARE TEMPORARY TABLE statement
+ */
+ @Override
+ public Integer visitDeclare_temporary_table_item(HplsqlParser.Declare_temporary_table_itemContext ctx) {
+ return exec.stmt.declareTemporaryTable(ctx);
+ }
+
+ /**
+ * CREATE TABLE statement
+ */
+ @Override
+ public Integer visitCreate_table_stmt(HplsqlParser.Create_table_stmtContext ctx) {
+ return exec.stmt.createTable(ctx);
+ }
+
+ @Override
+ public Integer visitCreate_table_options_hive_item(HplsqlParser.Create_table_options_hive_itemContext ctx) {
+ return exec.stmt.createTableHiveOptions(ctx);
+ }
+
+ /**
+ * CREATE LOCAL TEMPORARY | VOLATILE TABLE statement
+ */
+ @Override
+ public Integer visitCreate_local_temp_table_stmt(HplsqlParser.Create_local_temp_table_stmtContext ctx) {
+ return exec.stmt.createLocalTemporaryTable(ctx);
+ }
+
+ /**
+ * CREATE FUNCTION statement
+ */
+ @Override
+ public Integer visitCreate_function_stmt(HplsqlParser.Create_function_stmtContext ctx) {
+ exec.function.addUserFunction(ctx);
+ addLocalUdf(ctx);
+ return 0;
+ }
+
+ /**
+ * CREATE PROCEDURE statement
+ */
+ @Override
+ public Integer visitCreate_procedure_stmt(HplsqlParser.Create_procedure_stmtContext ctx) {
+ exec.function.addUserProcedure(ctx);
+ addLocalUdf(ctx); // Add procedures as they can be invoked by functions
+ return 0;
+ }
+
+ /**
+ * CREATE INDEX statement
+ */
+ @Override
+ public Integer visitCreate_index_stmt(HplsqlParser.Create_index_stmtContext ctx) {
+ return 0;
+ }
+
+ /**
+ * Add functions and procedures defined in the current script
+ */
+ void addLocalUdf(ParserRuleContext ctx) {
+ if (exec == this) {
+ localUdf.append(exec.getFormattedText(ctx));
+ localUdf.append("\n");
+ }
+ }
+
+ /**
+ * Save local functions and procedures to a file (will be added to the distributed cache)
+ */
+ String createLocalUdf() {
+ if(localUdf.length() == 0) {
+ return null;
+ }
+ try {
+ String file = System.getProperty("user.dir") + "/" + Conf.HPLSQL_LOCALS_SQL;
+ PrintWriter writer = new PrintWriter(file, "UTF-8");
+ writer.print(localUdf);
+ writer.close();
+ return file;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return null;
+ }
+
+ /**
+ * Assignment statement for single value
+ */
+ @Override
+ public Integer visitAssignment_stmt_single_item(HplsqlParser.Assignment_stmt_single_itemContext ctx) {
+ String name = ctx.ident().getText();
+ visit(ctx.expr());
+ Var var = setVariable(name);
+ if (trace) {
+ trace(ctx, "SET " + name + " = " + var.toSqlString());
+ }
+ return 0;
+ }
+
+ /**
+ * Assignment statement for multiple values
+ */
+ @Override
+ public Integer visitAssignment_stmt_multiple_item(HplsqlParser.Assignment_stmt_multiple_itemContext ctx) {
+ int cnt = ctx.ident().size();
+ int ecnt = ctx.expr().size();
+ for (int i = 0; i < cnt; i++) {
+ String name = ctx.ident(i).getText();
+ if (i < ecnt) {
+ visit(ctx.expr(i));
+ Var var = setVariable(name);
+ if (trace) {
+ trace(ctx, "SET " + name + " = " + var.toString());
+ }
+ }
+ }
+ return 0;
+ }
+
+ /**
+ * Assignment from SELECT statement
+ */
+ @Override
+ public Integer visitAssignment_stmt_select_item(HplsqlParser.Assignment_stmt_select_itemContext ctx) {
+ return stmt.assignFromSelect(ctx);
+ }
+
+ /**
+ * Evaluate an expression
+ */
+ @Override
+ public Integer visitExpr(HplsqlParser.ExprContext ctx) {
+ if (exec.buildSql) {
+ exec.expr.execSql(ctx);
+ }
+ else {
+ exec.expr.exec(ctx);
+ }
+ return 0;
+ }
+
+ /**
+ * Evaluate a boolean expression
+ */
+ @Override
+ public Integer visitBool_expr(HplsqlParser.Bool_exprContext ctx) {
+ if (exec.buildSql) {
+ exec.expr.execBoolSql(ctx);
+ }
+ else {
+ exec.expr.execBool(ctx);
+ }
+ return 0;
+ }
+
+ @Override
+ public Integer visitBool_expr_binary(HplsqlParser.Bool_expr_binaryContext ctx) {
+ if (exec.buildSql) {
+ exec.expr.execBoolBinarySql(ctx);
+ }
+ else {
+ exec.expr.execBoolBinary(ctx);
+ }
+ return 0;
+ }
+
+ @Override
+ public Integer visitBool_expr_unary(HplsqlParser.Bool_expr_unaryContext ctx) {
+ if (exec.buildSql) {
+ exec.expr.execBoolUnarySql(ctx);
+ }
+ else {
+ exec.expr.execBoolUnary(ctx);
+ }
+ return 0;
+ }
+
+ /**
+ * Function call
+ */
+ @Override
+ public Integer visitExpr_func(HplsqlParser.Expr_funcContext ctx) {
+ String name = ctx.ident().getText();
+ if (exec.buildSql) {
+ exec.function.execSql(name, ctx.expr_func_params());
+ }
+ else {
+ exec.function.exec(name, ctx.expr_func_params());
+ }
+ return 0;
+ }
+
+ /**
+ * Aggregate or window function call
+ */
+ @Override
+ public Integer visitExpr_agg_window_func(HplsqlParser.Expr_agg_window_funcContext ctx) {
+ exec.function.execAggWindowSql(ctx);
+ return 0;
+ }
+
+ /**
+ * Function with specific syntax
+ */
+ @Override
+ public Integer visitExpr_spec_func(HplsqlParser.Expr_spec_funcContext ctx) {
+ if (exec.buildSql) {
+ exec.function.specExecSql(ctx);
+ }
+ else {
+ exec.function.specExec(ctx);
+ }
+ return 0;
+ }
+
+ /**
+ * INCLUDE statement
+ */
+ @Override
+ public Integer visitInclude_stmt(@NotNull HplsqlParser.Include_stmtContext ctx) {
+ return exec.stmt.include(ctx);
+ }
+
+ /**
+ * IF statement (PL/SQL syntax)
+ */
+ @Override
+ public Integer visitIf_plsql_stmt(HplsqlParser.If_plsql_stmtContext ctx) {
+ return exec.stmt.ifPlsql(ctx);
+ }
+
+ /**
+ * IF statement (Transact-SQL syntax)
+ */
+ @Override
+ public Integer visitIf_tsql_stmt(HplsqlParser.If_tsql_stmtContext ctx) {
+ return exec.stmt.ifTsql(ctx);
+ }
+
+ /**
+ * USE statement
+ */
+ @Override
+ public Integer visitUse_stmt(HplsqlParser.Use_stmtContext ctx) {
+ return exec.stmt.use(ctx);
+ }
+
+ /**
+ * VALUES statement
+ */
+ @Override
+ public Integer visitValues_into_stmt(HplsqlParser.Values_into_stmtContext ctx) {
+ return exec.stmt.values(ctx);
+ }
+
+ /**
+ * WHILE statement
+ */
+ @Override
+ public Integer visitWhile_stmt(HplsqlParser.While_stmtContext ctx) {
+ return exec.stmt.while_(ctx);
+ }
+
+ /**
+ * FOR cursor statement
+ */
+ @Override
+ public Integer visitFor_cursor_stmt(HplsqlParser.For_cursor_stmtContext ctx) {
+ return exec.stmt.forCursor(ctx);
+ }
+
+ /**
+ * FOR (integer range) statement
+ */
+ @Override
+ public Integer visitFor_range_stmt(HplsqlParser.For_range_stmtContext ctx) {
+ return exec.stmt.forRange(ctx);
+ }
+
+ /**
+ * EXEC, EXECUTE and EXECUTE IMMEDIATE statement to execute dynamic SQL
+ */
+ @Override
+ public Integer visitExec_stmt(HplsqlParser.Exec_stmtContext ctx) {
+ return exec.stmt.exec(ctx);
+ }
+
+ /**
+ * CALL statement
+ */
+ @Override
+ public Integer visitCall_stmt(HplsqlParser.Call_stmtContext ctx) {
+ if (exec.function.execProc(ctx.expr_func_params(), ctx.ident().getText())) {
+ return 0;
+ }
+ return -1;
+ }
+
+ /**
+ * EXIT statement (leave the specified loop with a condition)
+ */
+ @Override
+ public Integer visitExit_stmt(HplsqlParser.Exit_stmtContext ctx) {
+ return exec.stmt.exit(ctx);
+ }
+
+ /**
+ * BREAK statement (leave the innermost loop unconditionally)
+ */
+ @Override
+ public Integer visitBreak_stmt(HplsqlParser.Break_stmtContext ctx) {
+ return exec.stmt.break_(ctx);
+ }
+
+ /**
+ * LEAVE statement (leave the specified loop unconditionally)
+ */
+ @Override
+ public Integer visitLeave_stmt(HplsqlParser.Leave_stmtContext ctx) {
+ return exec.stmt.leave(ctx);
+ }
+
+ /**
+ * PRINT statement
+ */
+ @Override
+ public Integer visitPrint_stmt(HplsqlParser.Print_stmtContext ctx) {
+ return exec.stmt.print(ctx);
+ }
+
+ /**
+ * SIGNAL statement
+ */
+ @Override
+ public Integer visitSignal_stmt(HplsqlParser.Signal_stmtContext ctx) {
+ return exec.stmt.signal(ctx);
+ }
+
+ /**
+ * RESIGNAL statement
+ */
+ @Override
+ public Integer visitResignal_stmt(HplsqlParser.Resignal_stmtContext ctx) {
+ return exec.stmt.resignal(ctx);
+ }
+
+ /**
+ * RETURN statement
+ */
+ @Override
+ public Integer visitReturn_stmt(HplsqlParser.Return_stmtContext ctx) {
+ return exec.stmt.return_(ctx);
+ }
+
+ /**
+ * MAP OBJECT statement
+ */
+ @Override
+ public Integer visitMap_object_stmt(HplsqlParser.Map_object_stmtContext ctx) {
+ String source = evalPop(ctx.expr(0)).toString();
+ String target = null;
+ String conn = null;
+ if (ctx.T_TO() != null) {
+ target = evalPop(ctx.expr(1)).toString();
+ exec.objectMap.put(source.toUpperCase(), target);
+ }
+ if (ctx.T_AT() != null) {
+ if (ctx.T_TO() == null) {
+ conn = evalPop(ctx.expr(1)).toString();
+ }
+ else {
+ conn = evalPop(ctx.expr(2)).toString();
+ }
+ exec.objectConnMap.put(source.toUpperCase(), conn);
+ }
+ if (trace) {
+ String log = "MAP OBJECT " + source;
+ if (target != null) {
+ log += " AS " + target;
+ }
+ if (conn != null) {
+ log += " AT " + conn;
+ }
+ trace(ctx, log);
+ }
+ return 0;
+ }
+
+ /**
+ * UPDATE statement
+ */
+ @Override
+ public Integer visitUpdate_stmt(HplsqlParser.Update_stmtContext ctx) {
+ return stmt.update(ctx);
+ }
+
+ /**
+ * DELETE statement
+ */
+ @Override
+ public Integer visitDelete_stmt(HplsqlParser.Delete_stmtContext ctx) {
+ return stmt.delete(ctx);
+ }
+
+ /**
+ * MERGE statement
+ */
+ @Override
+ public Integer visitMerge_stmt(HplsqlParser.Merge_stmtContext ctx) {
+ return stmt.merge(ctx);
+ }
+
+ /**
+ * Run a Hive command line
+ */
+ @Override
+ public Integer visitHive(@NotNull HplsqlParser.HiveContext ctx) {
+ trace(ctx, "HIVE");
+ ArrayList<String> cmd = new ArrayList<String>();
+ cmd.add("hive");
+ Var params = new Var(Var.Type.STRINGLIST, cmd);
+ stackPush(params);
+ visitChildren(ctx);
+ stackPop();
+ try {
+ String[] cmdarr = new String[cmd.size()];
+ cmd.toArray(cmdarr);
+ if(trace) {
+ trace(ctx, "HIVE Parameters: " + Utils.toString(cmdarr, ' '));
+ }
+ if (!offline) {
+ Process p = Runtime.getRuntime().exec(cmdarr);
+ new StreamGobbler(p.getInputStream()).start();
+ new StreamGobbler(p.getErrorStream()).start();
+ int rc = p.waitFor();
+ if (trace) {
+ trace(ctx, "HIVE Process exit code: " + rc);
+ }
+ }
+ } catch (Exception e) {
+ setSqlCode(-1);
+ signal(Signal.Type.SQLEXCEPTION, e.getMessage(), e);
+ return -1;
+ }
+ return 0;
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public Integer visitHive_item(HplsqlParser.Hive_itemContext ctx) {
+ Var params = stackPeek();
+ ArrayList<String> a = (ArrayList<String>)params.value;
+ if(ctx.P_e() != null) {
+ a.add("-e");
+ a.add(evalPop(ctx.expr()).toString());
+ }
+ else if(ctx.P_f() != null) {
+ a.add("-f");
+ a.add(evalPop(ctx.expr()).toString());
+ }
+ else if(ctx.P_hiveconf() != null) {
+ a.add("-hiveconf");
+ a.add(ctx.L_ID().toString() + "=" + evalPop(ctx.expr()).toString());
+ }
+ return 0;
+ }
+
+ /**
+ * Executing OS command
+ */
+ @Override
+ public Integer visitHost_cmd(HplsqlParser.Host_cmdContext ctx) {
+ trace(ctx, "HOST");
+ execHost(ctx, ctx.start.getInputStream().getText(
+ new org.antlr.v4.runtime.misc.Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())));
+ return 0;
+ }
+
+ @Override
+ public Integer visitHost_stmt(HplsqlParser.Host_stmtContext ctx) {
+ trace(ctx, "HOST");
+ execHost(ctx, evalPop(ctx.expr()).toString());
+ return 0;
+ }
+
+ public void execHost(ParserRuleContext ctx, String cmd) {
+ try {
+ if (trace) {
+ trace(ctx, "HOST Command: " + cmd);
+ }
+ Process p = Runtime.getRuntime().exec(cmd);
+ new StreamGobbler(p.getInputStream()).start();
+ new StreamGobbler(p.getErrorStream()).start();
+ int rc = p.waitFor();
+ if (trace) {
+ trace(ctx, "HOST Process exit code: " + rc);
+ }
+ setHostCode(rc);
+ } catch (Exception e) {
+ setHostCode(1);
+ signal(Signal.Type.SQLEXCEPTION);
+ }
+ }
+
+ /**
+ * Standalone expression (as a statement)
+ */
+ @Override
+ public Integer visitExpr_stmt(HplsqlParser.Expr_stmtContext ctx) {
+ visitChildren(ctx);
+ return 0;
+ }
+
+ /**
+ * String concatenation operator
+ */
+ @Override
+ public Integer visitExpr_concat(HplsqlParser.Expr_concatContext ctx) {
+ if (exec.buildSql) {
+ exec.expr.operatorConcatSql(ctx);
+ }
+ else {
+ exec.expr.operatorConcat(ctx);
+ }
+ return 0;
+ }
+
+ /**
+ * Simple CASE expression
+ */
+ @Override
+ public Integer visitExpr_case_simple(HplsqlParser.Expr_case_simpleContext ctx) {
+ if (exec.buildSql) {
+ exec.expr.execSimpleCaseSql(ctx);
+ }
+ else {
+ exec.expr.execSimpleCase(ctx);
+ }
+ return 0;
+ }
+
+ /**
+ * Searched CASE expression
+ */
+ @Override
+ public Integer visitExpr_case_searched(HplsqlParser.Expr_case_searchedContext ctx) {
+ if (exec.buildSql) {
+ exec.expr.execSearchedCaseSql(ctx);
+ }
+ else {
+ exec.expr.execSearchedCase(ctx);
+ }
+ return 0;
+ }
+
+ /**
+ * GET DIAGNOSTICS EXCEPTION statement
+ */
+ @Override
+ public Integer visitGet_diag_stmt_exception_item(HplsqlParser.Get_diag_stmt_exception_itemContext ctx) {
+ return exec.stmt.getDiagnosticsException(ctx);
+ }
+
+ /**
+ * GET DIAGNOSTICS ROW_COUNT statement
+ */
+ @Override
+ public Integer visitGet_diag_stmt_rowcount_item(HplsqlParser.Get_diag_stmt_rowcount_itemContext ctx) {
+ return exec.stmt.getDiagnosticsRowCount(ctx);
+ }
+
+ /**
+ * GRANT statement
+ */
+ @Override
+ public Integer visitGrant_stmt(HplsqlParser.Grant_stmtContext ctx) {
+ trace(ctx, "GRANT");
+ return 0;
+ }
+
+ /**
+ * Label
+ */
+ @Override
+ public Integer visitLabel(HplsqlParser.LabelContext ctx) {
+ exec.labels.push(ctx.L_ID().toString());
+ return 0;
+ }
+
+ /**
+ * Identifier
+ */
+ @Override
+ public Integer visitIdent(HplsqlParser.IdentContext ctx) {
+ String ident = ctx.getText();
+ Var var = findVariable(ident);
+ if (var != null) {
+ if (!exec.buildSql) {
+ exec.stackPush(var);
+ }
+ else {
+ exec.stackPush(new Var(ident, Var.Type.STRING, var.toSqlString()));
+ }
+ }
+ else {
+ exec.stackPush(new Var(Var.Type.IDENT, ident));
+ }
+ return 0;
+ }
+
+ /**
+ * Single quoted string literal
+ */
+ @Override
+ public Integer visitSingle_quotedString(HplsqlParser.Single_quotedStringContext ctx) {
+ if (exec.buildSql) {
+ exec.stackPush(ctx.getText());
+ }
+ else {
+ exec.stackPush(Utils.unquoteString(ctx.getText()));
+ }
+ return 0;
+ }
+
+ /**
+ * Integer literal, signed or unsigned
+ */
+ @Override
+ public Integer visitInt_number(HplsqlParser.Int_numberContext ctx) {
+ exec.stack.push(new Var(new Long(ctx.getText())));
+ return 0;
+ }
+
+ /**
+ * Interval number (1 DAYS i.e)
+ */
+ @Override
+ public Integer visitInterval_number(HplsqlParser.Interval_numberContext ctx) {
+ int num = evalPop(ctx.int_number()).intValue();
+ Interval interval = new Interval().set(num, ctx.interval_item().getText());
+ stackPush(new Var(interval));
+ return 0;
+ }
+
+ /**
+ * Decimal literal, signed or unsigned
+ */
+ @Override
+ public Integer visitDec_number(HplsqlParser.Dec_numberContext ctx) {
+ stackPush(new Var(new BigDecimal(ctx.getText())));
+ return 0;
+ }
+
+ /**
+ * NULL constant
+ */
+ @Override
+ public Integer visitNull_const(HplsqlParser.Null_constContext ctx) {
+ stackPush(new Var());
+ return 0;
+ }
+
+ /**
+ * DATE 'YYYY-MM-DD' literal
+ */
+ @Override
+ public Integer visitDate_literal(HplsqlParser.Date_literalContext ctx) {
+ String str = evalPop(ctx.string()).toString();
+ stackPush(new Var(Var.Type.DATE, Utils.toDate(str)));
+ return 0;
+ }
+
+ /**
+ * TIMESTAMP 'YYYY-MM-DD HH:MI:SS.FFF' literal
+ */
+ @Override
+ public Integer visitTimestamp_literal(HplsqlParser.Timestamp_literalContext ctx) {
+ String str = evalPop(ctx.string()).toString();
+ int len = str.length();
+ int precision = 0;
+ if (len > 19 && len <= 29) {
+ precision = len - 20;
+ if (precision > 3) {
+ precision = 3;
+ }
+ }
+ stackPush(new Var(Utils.toTimestamp(str), precision));
+ return 0;
+ }
+
+ /**
+ * Define the connection profile to execute the current statement
+ */
+ String getStatementConnection() {
+ if (exec.stmtConnList.contains(exec.conf.defaultConnection)) {
+ return exec.conf.defaultConnection;
+ }
+ else if (!exec.stmtConnList.isEmpty()) {
+ return exec.stmtConnList.get(0);
+ }
+ return exec.conf.defaultConnection;
+ }
+
+ /**
+ * Define the connection profile for the specified object
+ * @return
+ */
+ String getObjectConnection(String name) {
+ String conn = exec.objectConnMap.get(name.toUpperCase());
+ if (conn != null) {
+ return conn;
+ }
+ return exec.conf.defaultConnection;
+ }
+
+ /**
+ * Get the connection (open the new connection if not available)
+ * @throws Exception
+ */
+ Connection getConnection(String conn) throws Exception {
+ return exec.conn.getConnection(conn);
+ }
+
+ /**
+ * Return the connection to the pool
+ */
+ void returnConnection(String name, Connection conn) {
+ exec.conn.returnConnection(name, conn);
+ }
+
+ /**
+ * Define the database type by profile name
+ */
+ Conn.Type getConnectionType(String conn) {
+ return exec.conn.getType(conn);
+ }
+
+ /**
+ * Get the current database type
+ */
+ public Conn.Type getConnectionType() {
+ return getConnectionType(exec.conf.defaultConnection);
+ }
+
+ /**
+ * Add managed temporary table
+ */
+ public void addManagedTable(String name, String managedName) {
+ exec.managedTables.put(name, managedName);
+ }
+
+ /**
+ * Get node text including spaces
+ */
+ String getText(ParserRuleContext ctx) {
+ return ctx.start.getInputStream().getText(new org.antlr.v4.runtime.misc.Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
+ }
+
+ String getText(ParserRuleContext ctx, Token start, Token stop) {
+ return ctx.start.getInputStream().getText(new org.antlr.v4.runtime.misc.Interval(start.getStartIndex(), stop.getStopIndex()));
+ }
+
+ /**
+ * Evaluate the expression and pop value from the stack
+ */
+ Var evalPop(ParserRuleContext ctx) {
+ visit(ctx);
+ if (!exec.stack.isEmpty()) {
+ return exec.stackPop();
+ }
+ return Var.Empty;
+ }
+
+ Var evalPop(ParserRuleContext ctx, long def) {
+ visit(ctx);
+ if (!exec.stack.isEmpty()) {
+ return stackPop();
+ }
+ return new Var(def);
+ }
+
+ /**
+ * Evaluate the data type and length
+ *
+ */
+ String evalPop(HplsqlParser.DtypeContext type, HplsqlParser.Dtype_lenContext len) {
+ if (isConvert(exec.conf.defaultConnection)) {
+ return exec.converter.dataType(type, len);
+ }
+ return getText(type, type.getStart(), len.getStop());
+ }
+
+ /**
+ * Evaluate the expression to NULL
+ */
+ void evalNull() {
+ stackPush(Var.Null);
+ }
+
+ /**
+ * Get formatted text between 2 tokens
+ */
+ public String getFormattedText(ParserRuleContext ctx) {
+ return ctx.start.getInputStream().getText(
+ new org.antlr.v4.runtime.misc.Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
+ }
+
+ /**
+ * Flag whether executed from UDF or not
+ */
+ void setUdfRun(boolean udfRun) {
+ this.udfRun = udfRun;
+ }
+
+ /**
+ * Whether on-the-fly SQL conversion is required for the connection
+ */
+ boolean isConvert(String connName) {
+ return exec.conf.getConnectionConvert(connName);
+ }
+
+ /**
+ * Increment the row count
+ */
+ public int incRowCount() {
+ return exec.rowCount++;
+ }
+
+ /**
+ * Set the row count
+ */
+ public void setRowCount(int rowCount) {
+ exec.rowCount = rowCount;
+ }
+
+ /**
+ * Trace information
+ */
+ public void trace(ParserRuleContext ctx, String message) {
+ if (!trace) {
+ return;
+ }
+ if (ctx != null) {
+ System.out.println("Ln:" + ctx.getStart().getLine() + " " + message);
+ }
+ else {
+ System.out.println(message);
+ }
+ }
+
+ /**
+ * Informational messages
+ */
+ public void info(ParserRuleContext ctx, String message) {
+ if (!info) {
+ return;
+ }
+ if (ctx != null) {
+ System.err.println("Ln:" + ctx.getStart().getLine() + " " + message);
+ }
+ else {
+ System.err.println(message);
+ }
+ }
+
+ public Stack<Var> getStack() {
+ return exec.stack;
+ }
+
+ public int getRowCount() {
+ return exec.rowCount;
+ }
+
+ public Conf getConf() {
+ return exec.conf;
+ }
+
+ public boolean getTrace() {
+ return exec.trace;
+ }
+
+ public boolean getInfo() {
+ return exec.info;
+ }
+
+ public boolean getOffline() {
+ return exec.offline;
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/Expression.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Expression.java b/hplsql/src/main/java/org/apache/hive/hplsql/Expression.java
new file mode 100644
index 0000000..f8b01e1
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Expression.java
@@ -0,0 +1,574 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+import java.sql.Date;
+import java.sql.Timestamp;
+import java.util.Calendar;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+import org.apache.hive.hplsql.Var.Type;
+
+/**
+ * Expressions
+ */
+public class Expression {
+
+ Exec exec;
+ boolean trace = false;
+
+ Expression(Exec e) {
+ exec = e;
+ trace = exec.getTrace();
+ }
+
+ /**
+ * Evaluate an expression
+ */
+ public void exec(HplsqlParser.ExprContext ctx) {
+ if (ctx.T_ADD() != null) {
+ operatorAdd(ctx);
+ }
+ else if (ctx.T_SUB() != null) {
+ operatorSub(ctx);
+ }
+ else if (ctx.interval_item() != null) {
+ createInterval(ctx);
+ }
+ else {
+ visitChildren(ctx);
+ }
+ }
+
+ /**
+ * Evaluate an expression in executable SQL statement
+ */
+ public void execSql(HplsqlParser.ExprContext ctx) {
+ StringBuilder sql = new StringBuilder();
+ if (ctx.T_OPEN_P() != null) {
+ sql.append("(");
+ sql.append(evalPop(ctx.expr(0)).toString());
+ sql.append(")");
+ }
+ else if (ctx.T_ADD() != null) {
+ sql.append(evalPop(ctx.expr(0)).toString());
+ sql.append(" + ");
+ sql.append(evalPop(ctx.expr(1)).toString());
+ }
+ else if (ctx.T_SUB() != null) {
+ sql.append(evalPop(ctx.expr(0)).toString());
+ sql.append(" - ");
+ sql.append(evalPop(ctx.expr(1)).toString());
+ }
+ else if (ctx.interval_item() != null) {
+ sql.append(exec.getFormattedText(ctx));
+ }
+ else {
+ visitChildren(ctx);
+ sql.append(exec.stackPop().toString());
+ }
+ exec.stackPush(sql);
+ }
+
+ /**
+ * Evaluate a boolean expression
+ */
+ public void execBool(HplsqlParser.Bool_exprContext ctx) {
+ if (ctx.T_OPEN_P() != null) {
+ eval(ctx.bool_expr(0));
+ return;
+ }
+ else if (ctx.bool_expr_atom() != null) {
+ eval(ctx.bool_expr_atom());
+ return;
+ }
+ Var result = evalPop(ctx.bool_expr(0));
+ if (ctx.bool_expr_logical_operator() != null) {
+ if (ctx.bool_expr_logical_operator().T_AND() != null) {
+ if (result.isTrue()) {
+ result = evalPop(ctx.bool_expr(1));
+ }
+ }
+ else if (ctx.bool_expr_logical_operator().T_OR() != null) {
+ if (!result.isTrue()) {
+ result = evalPop(ctx.bool_expr(1));
+ }
+ }
+ }
+ exec.stackPush(result);
+ }
+
+ /**
+ * Evaluate a boolean expression in executable SQL statement
+ */
+ public void execBoolSql(HplsqlParser.Bool_exprContext ctx) {
+ StringBuilder sql = new StringBuilder();
+ if (ctx.T_OPEN_P() != null) {
+ sql.append("(");
+ sql.append(evalPop(ctx.bool_expr(0)).toString());
+ sql.append(")");
+ }
+ else if (ctx.bool_expr_atom() != null) {
+ sql.append(evalPop(ctx.bool_expr_atom()).toString());
+ }
+ else if (ctx.bool_expr_logical_operator() != null) {
+ sql.append(evalPop(ctx.bool_expr(0)).toString());
+ sql.append(" " + ctx.bool_expr_logical_operator().getText() + " ");
+ sql.append(evalPop(ctx.bool_expr(1)).toString());
+ }
+ exec.stackPush(sql);
+ }
+
+ /**
+ * Binary boolean expression
+ */
+ public Integer execBoolBinary(HplsqlParser.Bool_expr_binaryContext ctx) {
+ HplsqlParser.Bool_expr_binary_operatorContext op = ctx.bool_expr_binary_operator();
+ if (op.T_EQUAL() != null || op.T_EQUAL2() != null) {
+ operatorEqual(ctx, true);
+ }
+ else if (op.T_NOTEQUAL() != null || op.T_NOTEQUAL2() != null) {
+ operatorEqual(ctx, false);
+ }
+ else if (op.T_GREATER() != null || op.T_LESS() != null || op.T_GREATEREQUAL() != null || op.T_LESSEQUAL() != null) {
+ operatorCompare(ctx, op);
+ }
+ else {
+ exec.stackPush(false);
+ }
+ return 0;
+ }
+
+ /**
+ * Binary boolean expression in executable SQL statement
+ */
+ public Integer execBoolBinarySql(HplsqlParser.Bool_expr_binaryContext ctx) {
+ StringBuilder sql = new StringBuilder();
+ sql.append(evalPop(ctx.expr(0)).toString());
+ sql.append(" " + exec.getFormattedText(ctx.bool_expr_binary_operator()) + " ");
+ sql.append(evalPop(ctx.expr(1)).toString());
+ exec.stackPush(sql);
+ return 0;
+ }
+
+ /**
+ * Unary boolean expression
+ */
+ public Integer execBoolUnary(HplsqlParser.Bool_expr_unaryContext ctx) {
+ boolean val = false;
+ if (ctx.T_IS() != null) {
+ val = evalPop(ctx.expr(0)).isNull();
+ if (ctx.T_NOT() != null) {
+ val = !val;
+ }
+ }
+ else if (ctx.T_BETWEEN() != null) {
+ Var v = evalPop(ctx.expr(0));
+ Var v1 = evalPop(ctx.expr(1));
+ int cmp = v.compareTo(v1);
+ if (cmp >= 0) {
+ Var v2 = evalPop(ctx.expr(2));
+ cmp = v.compareTo(v2);
+ if (cmp <= 0) {
+ val = true;
+ }
+ }
+ }
+ exec.stackPush(val);
+ return 0;
+ }
+
+ /**
+ * Unary boolean expression in executable SQL statement
+ */
+ public Integer execBoolUnarySql(HplsqlParser.Bool_expr_unaryContext ctx) {
+ StringBuilder sql = new StringBuilder();
+ if (ctx.T_IS() != null) {
+ sql.append(evalPop(ctx.expr(0)).toString());
+ sql.append(" " + exec.getText(ctx, ctx.T_IS().getSymbol(), ctx.T_NULL().getSymbol()));
+ }
+ else if (ctx.T_BETWEEN() != null) {
+ sql.append(evalPop(ctx.expr(0)).toString());
+ sql.append(" " + ctx.T_BETWEEN().getText() + " ");
+ sql.append(evalPop(ctx.expr(1)).toString());
+ sql.append(" " + ctx.T_AND().getText() + " ");
+ sql.append(evalPop(ctx.expr(2)).toString());
+ }
+ else if (ctx.bool_expr_single_in() != null) {
+ singleInClauseSql(ctx.bool_expr_single_in(), sql);
+ }
+ else if (ctx.bool_expr_multi_in() != null) {
+ multiInClauseSql(ctx.bool_expr_multi_in(), sql);
+ }
+ exec.stackPush(sql);
+ return 0;
+ }
+
+ /**
+ * Single value IN clause in executable SQL statement
+ */
+ public void singleInClauseSql(HplsqlParser.Bool_expr_single_inContext ctx, StringBuilder sql) {
+ sql.append(evalPop(ctx.expr(0)).toString());
+ if (ctx.T_NOT() != null) {
+ sql.append(" " + ctx.T_NOT().getText());
+ }
+ sql.append(" " + ctx.T_IN().getText() + " (");
+ if (ctx.select_stmt() != null) {
+ sql.append(evalPop(ctx.select_stmt()));
+ }
+ else {
+ int cnt = ctx.expr().size();
+ for (int i = 1; i < cnt; i++) {
+ sql.append(evalPop(ctx.expr(i)).toString());
+ if (i + 1 < cnt) {
+ sql.append(", ");
+ }
+ }
+ }
+ sql.append(")");
+ }
+
+ /**
+ * Multi-value IN clause in executable SQL statement
+ */
+ public void multiInClauseSql(HplsqlParser.Bool_expr_multi_inContext ctx, StringBuilder sql) {
+ int cnt = ctx.expr().size();
+ sql.append("(");
+ for (int i = 0; i < cnt; i++) {
+ sql.append(evalPop(ctx.expr(i)).toString());
+ if (i + 1 < cnt) {
+ sql.append(", ");
+ }
+ }
+ sql.append(")");
+ if (ctx.T_NOT() != null) {
+ sql.append(" " + ctx.T_NOT().getText());
+ }
+ sql.append(" " + ctx.T_IN().getText() + " (");
+ if (ctx.select_stmt() != null) {
+ sql.append(evalPop(ctx.select_stmt()));
+ }
+ sql.append(")");
+ }
+
+ /**
+ * Addition operator
+ */
+ public void operatorAdd(HplsqlParser.ExprContext ctx) {
+ Var v1 = evalPop(ctx.expr(0));
+ Var v2 = evalPop(ctx.expr(1));
+ if (v1.value == null || v2.value == null) {
+ evalNull();
+ }
+ else if (v1.type == Type.BIGINT && v2.type == Type.BIGINT) {
+ exec.stackPush(new Var((Long)v1.value + (Long)v2.value));
+ }
+ else if (v1.type == Type.BIGINT && v2.type == Type.DATE) {
+ exec.stackPush(changeDateByInt((Date)v2.value, (Long)v1.value, true /*add*/));
+ }
+ else if (v1.type == Type.DATE && v2.type == Type.BIGINT) {
+ exec.stackPush(changeDateByInt((Date)v1.value, (Long)v2.value, true /*add*/));
+ }
+ else if (v1.type == Type.DATE && v2.type == Type.INTERVAL) {
+ exec.stackPush(new Var(((Interval)v2.value).dateChange((Date)v1.value, true /*add*/)));
+ }
+ else if (v1.type == Type.TIMESTAMP && v2.type == Type.INTERVAL) {
+ exec.stackPush(new Var(((Interval)v2.value).timestampChange((Timestamp)v1.value, true /*add*/), v1.scale));
+ }
+ }
+
+ /**
+ * Subtraction operator
+ */
+ public void operatorSub(HplsqlParser.ExprContext ctx) {
+ Var v1 = evalPop(ctx.expr(0));
+ Var v2 = evalPop(ctx.expr(1));
+ if (v1.value == null || v2.value == null) {
+ evalNull();
+ }
+ else if (v1.type == Type.BIGINT && v2.type == Type.BIGINT) {
+ exec.stackPush(new Var((Long)v1.value - (Long)v2.value));
+ }
+ else if (v1.type == Type.DATE && v2.type == Type.BIGINT) {
+ exec.stackPush(changeDateByInt((Date)v1.value, (Long)v2.value, false /*subtract*/));
+ }
+ else if (v1.type == Type.DATE && v2.type == Type.INTERVAL) {
+ exec.stackPush(new Var(((Interval)v2.value).dateChange((Date)v1.value, false /*subtract*/)));
+ }
+ else if (v1.type == Type.TIMESTAMP && v2.type == Type.INTERVAL) {
+ exec.stackPush(new Var(((Interval)v2.value).timestampChange((Timestamp)v1.value, false /*subtract*/), v1.scale));
+ }
+ }
+
+ /**
+ * Add or subtract the specified number of days from DATE
+ */
+ public Var changeDateByInt(Date d, Long i, boolean add) {
+ Calendar c = Calendar.getInstance();
+ c.setTimeInMillis(d.getTime());
+ int days = i.intValue();
+ if(!add) {
+ days *= -1;
+ }
+ c.add(Calendar.DAY_OF_MONTH, days);
+ return new Var(new Date(c.getTimeInMillis()));
+ }
+
+ /**
+ * Equality operator
+ */
+ public void operatorEqual(HplsqlParser.Bool_expr_binaryContext ctx, boolean equal) {
+ Var v1 = evalPop(ctx.expr(0));
+ Var v2 = evalPop(ctx.expr(1));
+ boolean eq = v1.equals(v2);
+ if (!equal) {
+ eq = !eq;
+ }
+ exec.stackPush(eq);
+ }
+
+ /**
+ * Comparison operator
+ */
+ public void operatorCompare(HplsqlParser.Bool_expr_binaryContext ctx, HplsqlParser.Bool_expr_binary_operatorContext op) {
+ Var v1 = evalPop(ctx.expr(0));
+ Var v2 = evalPop(ctx.expr(1));
+ int cmp = v1.compareTo(v2);
+ boolean bool = false;
+ if (op.T_GREATER() != null) {
+ if (cmp > 0) {
+ bool = true;
+ }
+ }
+ else if (op.T_GREATEREQUAL() != null) {
+ if (cmp >= 0) {
+ bool = true;
+ }
+ }
+ if (op.T_LESS() != null) {
+ if (cmp < 0) {
+ bool = true;
+ }
+ }
+ else if (op.T_LESSEQUAL() != null) {
+ if (cmp <= 0) {
+ bool = true;
+ }
+ }
+ exec.stackPush(bool);
+ }
+
+ /**
+ * String concatenation operator
+ */
+ public void operatorConcat(HplsqlParser.Expr_concatContext ctx) {
+ StringBuilder val = new StringBuilder();
+ int cnt = ctx.expr_concat_item().size();
+ boolean nulls = true;
+ for (int i = 0; i < cnt; i++) {
+ Var c = evalPop(ctx.expr_concat_item(i));
+ if (!c.isNull()) {
+ val.append(c.toString());
+ nulls = false;
+ }
+ }
+ if (nulls) {
+ evalNull();
+ }
+ else {
+ evalString(val);
+ }
+ }
+
+ /**
+ * String concatenation operator in executable SQL statement
+ */
+ public void operatorConcatSql(HplsqlParser.Expr_concatContext ctx) {
+ StringBuilder sql = new StringBuilder();
+ sql.append("CONCAT(");
+ int cnt = ctx.expr_concat_item().size();
+ for (int i = 0; i < cnt; i++) {
+ sql.append(evalPop(ctx.expr_concat_item(i)).toString());
+ if (i + 1 < cnt) {
+ sql.append(", ");
+ }
+ }
+ sql.append(")");
+ exec.stackPush(sql);
+ }
+
+ /**
+ * Simple CASE expression
+ */
+ public void execSimpleCase(HplsqlParser.Expr_case_simpleContext ctx) {
+ int i = 1;
+ int cnt = ctx.expr().size();
+ boolean found = false;
+ Var val = evalPop(ctx.expr(0));
+ while(i < cnt) {
+ Var when = evalPop(ctx.expr(i));
+ if(val.compareTo(when) == 0) {
+ visit(ctx.expr(i + 1));
+ found = true;
+ break;
+ }
+ i += 2;
+ }
+ if(!found) {
+ if(ctx.T_ELSE() != null) {
+ visit(ctx.expr(cnt - 1));
+ }
+ else {
+ evalNull();
+ }
+ }
+ }
+
+ /**
+ * Simple CASE expression in executable SQL statement
+ */
+ public void execSimpleCaseSql(HplsqlParser.Expr_case_simpleContext ctx) {
+ StringBuilder sql = new StringBuilder();
+ sql.append("CASE ");
+ sql.append(evalPop(ctx.expr(0)).toString());
+ int cnt = ctx.T_WHEN().size();
+ for (int i = 0; i < cnt; i++) {
+ sql.append(" WHEN ");
+ sql.append(evalPop(ctx.expr(i * 2 + 1)).toString());
+ sql.append(" THEN ");
+ sql.append(evalPop(ctx.expr(i * 2 + 2)).toString());
+ }
+ if (ctx.T_ELSE() != null) {
+ sql.append(" ELSE ");
+ sql.append(evalPop(ctx.expr(cnt * 2 + 1)).toString());
+ }
+ sql.append(" END");
+ exec.stackPush(sql);
+ }
+
+ /**
+ * Searched CASE expression
+ */
+ public void execSearchedCase(HplsqlParser.Expr_case_searchedContext ctx) {
+ int cnt = ctx.bool_expr().size();
+ boolean found = false;
+ for(int i = 0; i < cnt; i++) {
+ if(evalPop(ctx.bool_expr(i)).isTrue()) {
+ visit(ctx.expr(i));
+ found = true;
+ break;
+ }
+ }
+ if(!found) {
+ if(ctx.T_ELSE() != null) {
+ visit(ctx.expr(cnt));
+ }
+ else {
+ evalNull();
+ }
+ }
+ }
+
+ /**
+ * Searched CASE expression in executable SQL statement
+ */
+ public void execSearchedCaseSql(HplsqlParser.Expr_case_searchedContext ctx) {
+ StringBuilder sql = new StringBuilder();
+ sql.append("CASE");
+ int cnt = ctx.T_WHEN().size();
+ for (int i = 0; i < cnt; i++) {
+ sql.append(" WHEN ");
+ sql.append(evalPop(ctx.bool_expr(i)).toString());
+ sql.append(" THEN ");
+ sql.append(evalPop(ctx.expr(i)).toString());
+ }
+ if (ctx.T_ELSE() != null) {
+ sql.append(" ELSE ");
+ sql.append(evalPop(ctx.expr(cnt)).toString());
+ }
+ sql.append(" END");
+ exec.stackPush(sql);
+ }
+
+ /**
+ * Create an interval variable
+ */
+ public void createInterval(HplsqlParser.ExprContext ctx) {
+ int num = evalPop(ctx.expr(0)).intValue();
+ Interval interval = new Interval().set(num, ctx.interval_item().getText());
+ exec.stackPush(new Var(interval));
+ }
+
+ /**
+ * Evaluate the expression and push the value to the stack
+ */
+ void eval(ParserRuleContext ctx) {
+ visit(ctx);
+ }
+
+ /**
+ * Evaluate the expression and pop value from the stack
+ */
+ Var evalPop(ParserRuleContext ctx) {
+ visit(ctx);
+ if (!exec.stack.isEmpty()) {
+ return exec.stackPop();
+ }
+ return Var.Empty;
+ }
+
+ /**
+ * Evaluate the expression to specified String value
+ */
+ void evalString(String string) {
+ exec.stackPush(new Var(string));
+ }
+
+ void evalString(StringBuilder string) {
+ evalString(string.toString());
+ }
+
+ /**
+ * Evaluate the expression to NULL
+ */
+ void evalNull() {
+ exec.stackPush(Var.Null);
+ }
+
+ /**
+ * Execute rules
+ */
+ Integer visit(ParserRuleContext ctx) {
+ return exec.visit(ctx);
+ }
+
+ /**
+ * Execute children rules
+ */
+ Integer visitChildren(ParserRuleContext ctx) {
+ return exec.visitChildren(ctx);
+ }
+
+ /**
+ * Trace information
+ */
+ public void trace(ParserRuleContext ctx, String message) {
+ exec.trace(ctx, message);
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/File.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/File.java b/hplsql/src/main/java/org/apache/hive/hplsql/File.java
new file mode 100644
index 0000000..6a8ddfe
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/File.java
@@ -0,0 +1,132 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.Path;
+
+/**
+ * HDFS file operations
+ */
+public class File {
+ Path path;
+ FileSystem fs;
+ FSDataInputStream in;
+ FSDataOutputStream out;
+
+ /**
+ * Create FileSystem object
+ */
+ public FileSystem createFs() throws IOException {
+ fs = FileSystem.get(new Configuration());
+ return fs;
+ }
+
+ /**
+ * Create a file
+ */
+ public void create(String dir, String file, boolean overwrite) {
+ path = new Path(dir, file);
+ try {
+ if (fs == null) {
+ fs = FileSystem.get(new Configuration());
+ }
+ out = fs.create(path, overwrite);
+ }
+ catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Open an existing file
+ */
+ public void open(String dir, String file) {
+ path = new Path(dir, file);
+ try {
+ if (fs == null) {
+ fs = FileSystem.get(new Configuration());
+ }
+ in = fs.open(path);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Read a character from input
+ * @throws IOException
+ */
+ public char readChar() throws IOException {
+ return in.readChar();
+ }
+
+ /**
+ * Write string to file
+ */
+ public void writeString(String str) {
+ try {
+ out.writeChars(str);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Close a file
+ */
+ public void close() {
+ try {
+ if (in != null) {
+ in.close();
+ }
+ if (out != null) {
+ out.close();
+ }
+ in = null;
+ out = null;
+ path = null;
+ fs = null;
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Get the fully-qualified path
+ * NOTE: FileSystem.resolvePath() is not available in Hadoop 1.2.1
+ * @throws IOException
+ */
+ public Path resolvePath(Path path) throws IOException {
+ return fs.getFileStatus(path).getPath();
+ }
+
+ @Override
+ public String toString() {
+ if (path != null) {
+ return "FILE <" + path.toString() + ">";
+ }
+ return "FILE <null>";
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/Handler.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Handler.java b/hplsql/src/main/java/org/apache/hive/hplsql/Handler.java
new file mode 100644
index 0000000..6c292ef
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Handler.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+import org.apache.hive.hplsql.Signal.Type;
+
+/**
+ * HPL/SQL condition and exception handler
+ */
+public class Handler {
+ public enum ExecType { CONTINUE, EXIT };
+ ExecType execType;
+ Type type;
+ String value;
+ Scope scope;
+ HplsqlParser.Declare_handler_itemContext ctx;
+
+ Handler(ExecType execType, Type type, String value, Scope scope, HplsqlParser.Declare_handler_itemContext ctx) {
+ this.execType = execType;
+ this.type = type;
+ this.value = value;
+ this.scope = scope;
+ this.ctx = ctx;
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/Hplsql.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Hplsql.java b/hplsql/src/main/java/org/apache/hive/hplsql/Hplsql.java
new file mode 100644
index 0000000..c2545f7
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Hplsql.java
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+public class Hplsql {
+ public static void main(String[] args) throws Exception {
+ System.exit(new Exec().run(args));
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/Interval.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Interval.java b/hplsql/src/main/java/org/apache/hive/hplsql/Interval.java
new file mode 100644
index 0000000..92c5d52
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Interval.java
@@ -0,0 +1,109 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+import java.sql.Date;
+import java.sql.Timestamp;
+import java.util.Calendar;
+
+/**
+ * Date and time interval
+ */
+public class Interval {
+ int days = 0;
+ int milliseconds = 0;
+
+ /**
+ * Add or subtract interval value to the specified date
+ */
+ public Date dateChange(Date in, boolean add) {
+ Calendar c = Calendar.getInstance();
+ c.setTimeInMillis(in.getTime());
+ calendarChange(c, add);
+ return new Date(c.getTimeInMillis());
+ }
+
+ /**
+ * Add or subtract interval value to the specified timestamp
+ */
+ public Timestamp timestampChange(Timestamp in, boolean add) {
+ Calendar c = Calendar.getInstance();
+ c.setTimeInMillis(in.getTime());
+ calendarChange(c, add);
+ return new Timestamp(c.getTimeInMillis());
+ }
+
+ /**
+ * Add interval value to the specified Calendar value
+ */
+ public Calendar calendarChange(Calendar c, boolean add) {
+ int a = 1;
+ if (!add) {
+ a = -1;
+ }
+ if (days != 0) {
+ c.add(Calendar.DAY_OF_MONTH, days * a);
+ }
+ if (milliseconds != 0) {
+ c.setTimeInMillis(c.getTimeInMillis() + milliseconds * a);
+ }
+ return c;
+ }
+
+ /**
+ * Set interval value
+ */
+ public Interval set(int value, String item) {
+ if (item.compareToIgnoreCase("DAYS") == 0 || item.compareToIgnoreCase("DAY") == 0) {
+ setDays(value);
+ }
+ if (item.compareToIgnoreCase("MICROSECONDS") == 0 || item.compareToIgnoreCase("MICROSECOND") == 0) {
+ setMilliseconds(value);
+ }
+ return this;
+ }
+
+ /**
+ * Set interval items
+ */
+ public void setDays(int days) {
+ this.days = days;
+ }
+
+ public void setMilliseconds(int milliseconds) {
+ this.milliseconds = milliseconds;
+ }
+
+ /**
+ * Convert interval to string
+ */
+ @Override
+ public String toString() {
+ StringBuilder s = new StringBuilder();
+ if (days != 0) {
+ s.append(days);
+ s.append(" days");
+ }
+ if (milliseconds != 0) {
+ s.append(milliseconds);
+ s.append(" milliseconds");
+ }
+ return s.toString();
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/Query.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Query.java b/hplsql/src/main/java/org/apache/hive/hplsql/Query.java
new file mode 100644
index 0000000..23d963f
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Query.java
@@ -0,0 +1,155 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+
+public class Query {
+ String sql;
+ ParserRuleContext sqlExpr;
+ ParserRuleContext sqlSelect;
+
+ Connection conn;
+ Statement stmt;
+ ResultSet rs;
+ Exception exception;
+
+ Query() {
+ }
+
+ Query(String sql) {
+ this.sql = sql;
+ }
+
+ /**
+ * Set query objects
+ */
+ public void set(Connection conn, Statement stmt, ResultSet rs) {
+ this.conn = conn;
+ this.stmt = stmt;
+ this.rs = rs;
+ }
+
+ /**
+ * Get the number of rows
+ */
+ public int getRowCount() {
+ if (!error() && stmt != null) {
+ try {
+ return stmt.getUpdateCount();
+ } catch (SQLException e) {}
+ }
+ return -1;
+ }
+
+ /**
+ * Close statement results
+ */
+ public void closeStatement() {
+ try {
+ if(rs != null) {
+ rs.close();
+ rs = null;
+ }
+ if(stmt != null) {
+ stmt.close();
+ stmt = null;
+ }
+ } catch (SQLException e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Set SQL statement
+ */
+ public void setSql(String sql) {
+ this.sql = sql;
+ }
+
+ /**
+ * Set expression context
+ */
+ public void setExprCtx(ParserRuleContext sqlExpr) {
+ this.sqlExpr = sqlExpr;
+ }
+
+ /**
+ * Set SELECT statement context
+ */
+ public void setSelectCtx(ParserRuleContext sqlSelect) {
+ this.sqlSelect = sqlSelect;
+ }
+
+ /**
+ * Set an execution error
+ */
+ public void setError(Exception e) {
+ exception = e;
+ }
+
+ /**
+ * Print error stack trace
+ */
+ public void printStackTrace() {
+ if(exception != null) {
+ exception.printStackTrace();
+ }
+ }
+
+ /**
+ * Get the result set object
+ */
+ public ResultSet getResultSet() {
+ return rs;
+ }
+
+ /**
+ * Get the connection object
+ */
+ public Connection getConnection() {
+ return conn;
+ }
+
+ /**
+ * Return error information
+ */
+ public boolean error() {
+ return exception != null;
+ }
+
+ public String errorText() {
+ if(exception != null) {
+ if(exception instanceof ClassNotFoundException) {
+ return "ClassNotFoundException: " + exception.getMessage();
+ }
+ return exception.getMessage();
+ }
+ return "";
+ }
+
+ public Exception getException() {
+ return exception;
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/Scope.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Scope.java b/hplsql/src/main/java/org/apache/hive/hplsql/Scope.java
new file mode 100644
index 0000000..317a94f
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Scope.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+import java.util.ArrayList;
+
+/**
+ * HPL/SQL block scope
+ */
+public class Scope {
+
+ // Types
+ public enum Type { FILE, BEGIN_END, LOOP, HANDLER, ROUTINE };
+
+ // Local variables
+ ArrayList<Var> vars = new ArrayList<Var>();
+ // Condition handlers
+ ArrayList<Handler> handlers = new ArrayList<Handler>();
+
+ Scope parent;
+ Type type;
+
+ Scope(Type type) {
+ this.parent = null;
+ this.type = type;
+ }
+
+ Scope(Scope parent, Type type) {
+ this.parent = parent;
+ this.type = type;
+ }
+
+ /**
+ * Add a local variable
+ */
+ void addVariable(Var var) {
+ vars.add(var);
+ }
+
+ /**
+ * Add a condition handler
+ */
+ void addHandler(Handler handler) {
+ handlers.add(handler);
+ }
+
+ /**
+ * Get the parent scope
+ */
+ Scope getParent() {
+ return parent;
+ }
+}
[4/4] hive git commit: HIVE-11055 HPL/SQL - Implementing Procedural
SQL in Hive (PL/HQL Contribution) (Dmitry Tolpeko via gates)
Posted by ga...@apache.org.
HIVE-11055 HPL/SQL - Implementing Procedural SQL in Hive (PL/HQL Contribution) (Dmitry Tolpeko via gates)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/052643cb
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/052643cb
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/052643cb
Branch: refs/heads/master
Commit: 052643cb8d1fa3811cbc0e1f56b0cd10ca419b8d
Parents: edb7b88
Author: Alan Gates <ga...@hortonworks.com>
Authored: Wed Jul 1 09:16:44 2015 -0700
Committer: Alan Gates <ga...@hortonworks.com>
Committed: Wed Jul 1 09:16:44 2015 -0700
----------------------------------------------------------------------
bin/ext/hplsql.sh | 37 +
bin/hplsql | 25 +
bin/hplsql.cmd | 58 +
hplsql/pom.xml | 123 ++
.../antlr4/org/apache/hive/hplsql/Hplsql.g4 | 1426 +++++++++++++
.../java/org/apache/hive/hplsql/Arguments.java | 206 ++
.../main/java/org/apache/hive/hplsql/Conf.java | 175 ++
.../main/java/org/apache/hive/hplsql/Conn.java | 243 +++
.../java/org/apache/hive/hplsql/Converter.java | 56 +
.../main/java/org/apache/hive/hplsql/Copy.java | 426 ++++
.../main/java/org/apache/hive/hplsql/Exec.java | 1950 ++++++++++++++++++
.../java/org/apache/hive/hplsql/Expression.java | 574 ++++++
.../main/java/org/apache/hive/hplsql/File.java | 132 ++
.../java/org/apache/hive/hplsql/Handler.java | 41 +
.../java/org/apache/hive/hplsql/Hplsql.java | 25 +
.../java/org/apache/hive/hplsql/Interval.java | 109 +
.../main/java/org/apache/hive/hplsql/Query.java | 155 ++
.../main/java/org/apache/hive/hplsql/Scope.java | 69 +
.../java/org/apache/hive/hplsql/Select.java | 411 ++++
.../java/org/apache/hive/hplsql/Signal.java | 48 +
.../main/java/org/apache/hive/hplsql/Stmt.java | 1021 +++++++++
.../org/apache/hive/hplsql/StreamGobbler.java | 51 +
.../main/java/org/apache/hive/hplsql/Timer.java | 59 +
.../main/java/org/apache/hive/hplsql/Udf.java | 117 ++
.../main/java/org/apache/hive/hplsql/Utils.java | 289 +++
.../main/java/org/apache/hive/hplsql/Var.java | 430 ++++
.../apache/hive/hplsql/functions/Function.java | 709 +++++++
.../hive/hplsql/functions/FunctionDatetime.java | 151 ++
.../hive/hplsql/functions/FunctionMisc.java | 188 ++
.../hive/hplsql/functions/FunctionOra.java | 231 +++
.../hive/hplsql/functions/FunctionString.java | 276 +++
pom.xml | 1 +
32 files changed, 9812 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/bin/ext/hplsql.sh
----------------------------------------------------------------------
diff --git a/bin/ext/hplsql.sh b/bin/ext/hplsql.sh
new file mode 100644
index 0000000..ddaf324
--- /dev/null
+++ b/bin/ext/hplsql.sh
@@ -0,0 +1,37 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=hplsql
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+hplsql () {
+ CLASS=org.apache.hive.hplsql.Hplsql;
+
+ # include only the HPL/SQL jar and its dependencies
+ hplsqlJarPath=`ls ${HIVE_LIB}/hive-hplsql-*.jar`
+ antlrJarPath="${HIVE_LIB}/antlr-runtime-4.5.jar"
+ hadoopClasspath=""
+ if [[ -n "${HADOOP_CLASSPATH}" ]]
+ then
+ hadoopClasspath="${HADOOP_CLASSPATH}:"
+ fi
+ export HADOOP_CLASSPATH="${hadoopClasspath}${HIVE_CONF_DIR}:${hplsqlJarPath}:${antlrJarPath}"
+
+ exec $HADOOP jar ${hplsqlJarPath} $CLASS $HIVE_OPTS "$@"
+}
+
+hplsql_help () {
+ hplsql "--help"
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/bin/hplsql
----------------------------------------------------------------------
diff --git a/bin/hplsql b/bin/hplsql
new file mode 100644
index 0000000..6a5da7e
--- /dev/null
+++ b/bin/hplsql
@@ -0,0 +1,25 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+# Set Hadoop User classpath to true so that httpclient jars are taken from
+# hive lib instead of hadoop lib.
+export HADOOP_USER_CLASSPATH_FIRST=true
+
+. "$bin"/hive --service hplsql "$@"
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/bin/hplsql.cmd
----------------------------------------------------------------------
diff --git a/bin/hplsql.cmd b/bin/hplsql.cmd
new file mode 100644
index 0000000..6717a1c
--- /dev/null
+++ b/bin/hplsql.cmd
@@ -0,0 +1,58 @@
+@echo off
+@rem Licensed to the Apache Software Foundation (ASF) under one or more
+@rem contributor license agreements. See the NOTICE file distributed with
+@rem this work for additional information regarding copyright ownership.
+@rem The ASF licenses this file to You under the Apache License, Version 2.0
+@rem (the "License"); you may not use this file except in compliance with
+@rem the License. You may obtain a copy of the License at
+@rem
+@rem http://www.apache.org/licenses/LICENSE-2.0
+@rem
+@rem Unless required by applicable law or agreed to in writing, software
+@rem distributed under the License is distributed on an "AS IS" BASIS,
+@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+@rem See the License for the specific language governing permissions and
+@rem limitations under the License.
+SetLocal EnableDelayedExpansion
+
+pushd %CD%\..
+if not defined HIVE_HOME (
+ set HIVE_HOME=%CD%
+)
+popd
+
+if "%HADOOP_BIN_PATH:~-1%" == "\" (
+ set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%
+)
+
+if not defined JAVA_HOME (
+ echo Error: JAVA_HOME is not set.
+ goto :eof
+)
+
+@rem get the hadoop envrionment
+if not exist %HADOOP_HOME%\libexec\hadoop-config.cmd (
+ @echo +================================================================+
+ @echo ^| Error: HADOOP_HOME is not set correctly ^|
+ @echo +----------------------------------------------------------------+
+ @echo ^| Please set your HADOOP_HOME variable to the absolute path of ^|
+ @echo ^| the directory that contains \libexec\hadoop-config.cmd ^|
+ @echo +================================================================+
+ exit /b 1
+)
+@rem supress the HADOOP_HOME warnings in 1.x.x
+set HADOOP_HOME_WARN_SUPPRESS=true
+
+@rem include only the HPL/SQL jar and its dependencies
+pushd %HIVE_HOME%\lib
+for /f %%a IN ('dir /b hive-hplsql-**.jar') do (
+ set HADOOP_CLASSPATH=%HADOOP_CLASSPATH%;%HIVE_HOME%\lib\%%a
+)
+set HADOOP_CLASSPATH=%HADOOP_CLASSPATH%;%HIVE_HOME%\lib\antlr-runtime-4.5.jar
+popd
+set HADOOP_USER_CLASSPATH_FIRST=true
+call %HADOOP_HOME%\libexec\hadoop-config.cmd
+
+call "%JAVA_HOME%\bin\java" %JAVA_HEAP_MAX% %HADOOP_OPTS% -classpath %HADOOP_CLASSPATH% org.apache.hive.hplsql.Hplsql %*
+
+endlocal
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/pom.xml
----------------------------------------------------------------------
diff --git a/hplsql/pom.xml b/hplsql/pom.xml
new file mode 100644
index 0000000..d096e90
--- /dev/null
+++ b/hplsql/pom.xml
@@ -0,0 +1,123 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>2.0.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-hplsql</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive HPL/SQL</name>
+
+ <properties>
+ <hive.path.to.root>..</hive.path.to.root>
+ <packaging.minimizeJar>false</packaging.minimizeJar>
+ </properties>
+
+ <dependencies>
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ <version>${guava.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-collections</groupId>
+ <artifactId>commons-collections</artifactId>
+ <version>3.2.1</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-cli</groupId>
+ <artifactId>commons-cli</artifactId>
+ <version>${commons-cli.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ <version>${commons-lang.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ <version>${commons-logging.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ <version>${commons-io.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.antlr</groupId>
+ <artifactId>antlr4-runtime</artifactId>
+ <version>4.5</version>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.antlr</groupId>
+ <artifactId>antlr4-maven-plugin</artifactId>
+ <version>4.5</version>
+ <configuration>
+ <visitor>true</visitor>
+ </configuration>
+ <executions>
+ <execution>
+ <goals>
+ <goal>antlr4</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+</project>
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4
----------------------------------------------------------------------
diff --git a/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4 b/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4
new file mode 100644
index 0000000..852716b
--- /dev/null
+++ b/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4
@@ -0,0 +1,1426 @@
+/**
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+*/
+
+// HPL/SQL Procedural SQL Extension Grammar
+grammar Hplsql;
+
+program : block ;
+
+block : (begin_end_block | stmt)+ ; // Multiple consecutive blocks/statements
+
+begin_end_block :
+ declare_block? T_BEGIN block exception_block? T_END
+ ;
+
+single_block_stmt : // Single BEGIN END block (but nested blocks are possible) or single statement
+ T_BEGIN block T_END
+ | stmt T_SEMICOLON?
+ ;
+
+stmt :
+ assignment_stmt
+ | break_stmt
+ | call_stmt
+ | close_stmt
+ | copy_from_local_stmt
+ | copy_stmt
+ | commit_stmt
+ | create_function_stmt
+ | create_index_stmt
+ | create_local_temp_table_stmt
+ | create_procedure_stmt
+ | create_table_stmt
+ | declare_stmt
+ | delete_stmt
+ | drop_stmt
+ | exec_stmt
+ | exit_stmt
+ | fetch_stmt
+ | for_cursor_stmt
+ | for_range_stmt
+ | if_stmt
+ | include_stmt
+ | insert_stmt
+ | get_diag_stmt
+ | grant_stmt
+ | leave_stmt
+ | map_object_stmt
+ | merge_stmt
+ | open_stmt
+ | print_stmt
+ | resignal_stmt
+ | return_stmt
+ | rollback_stmt
+ | select_stmt
+ | signal_stmt
+ | update_stmt
+ | use_stmt
+ | values_into_stmt
+ | while_stmt
+ | label
+ | hive
+ | host
+ | expr_stmt
+ | semicolon_stmt // Placed here to allow null statements ;;...
+ ;
+
+semicolon_stmt :
+ T_SEMICOLON
+ | '@' | '#' | '/'
+ ;
+
+exception_block : // Exception block
+ T_EXCEPTION exception_block_item+
+ ;
+
+exception_block_item :
+ T_WHEN L_ID T_THEN block ~(T_WHEN | T_END)
+ ;
+
+expr_stmt : // Standalone expression
+ expr
+ ;
+
+assignment_stmt : // Assignment statement
+ T_SET? assignment_stmt_item (T_COMMA assignment_stmt_item)*
+ ;
+
+assignment_stmt_item :
+ assignment_stmt_single_item
+ | assignment_stmt_multiple_item
+ | assignment_stmt_select_item
+ ;
+
+assignment_stmt_single_item :
+ ident T_COLON? T_EQUAL expr
+ ;
+
+assignment_stmt_multiple_item :
+ T_OPEN_P ident (T_COMMA ident)* T_CLOSE_P T_COLON? T_EQUAL T_OPEN_P expr (T_COMMA expr)* T_CLOSE_P
+ ;
+
+assignment_stmt_select_item :
+ (ident | (T_OPEN_P ident (T_COMMA ident)* T_CLOSE_P)) T_COLON? T_EQUAL T_OPEN_P select_stmt T_CLOSE_P
+ ;
+
+break_stmt :
+ T_BREAK
+ ;
+
+call_stmt :
+ T_CALL ident expr_func_params?
+ ;
+
+declare_stmt : // Declaration statement
+ T_DECLARE declare_stmt_item (T_COMMA declare_stmt_item)*
+ ;
+
+declare_block : // Declaration block
+ T_DECLARE declare_stmt_item T_SEMICOLON (declare_stmt_item T_SEMICOLON)*
+ ;
+
+
+declare_stmt_item :
+ declare_var_item
+ | declare_condition_item
+ | declare_cursor_item
+ | declare_handler_item
+ | declare_temporary_table_item
+ ;
+
+declare_var_item :
+ ident (T_COMMA ident)* T_AS? dtype dtype_len? dtype_attr* dtype_default?
+ ;
+
+declare_condition_item : // Condition declaration
+ ident T_CONDITION
+ ;
+
+declare_cursor_item : // Cursor declaration
+ (T_CURSOR ident | ident T_CURSOR) declare_cursor_return? (T_IS | T_AS | T_FOR) (select_stmt | expr )
+ ;
+
+declare_cursor_return :
+ T_WITHOUT T_RETURN
+ | T_WITH T_RETURN T_ONLY? (T_TO (T_CALLER | T_CLIENT))?
+ ;
+
+declare_handler_item : // Condition handler declaration
+ (T_CONTINUE | T_EXIT) T_HANDLER T_FOR (T_SQLEXCEPTION | T_SQLWARNING | T_NOT T_FOUND | ident) single_block_stmt
+ ;
+
+declare_temporary_table_item : // DECLARE TEMPORARY TABLE statement
+ T_GLOBAL? T_TEMPORARY T_TABLE ident T_OPEN_P create_table_columns T_CLOSE_P create_table_options?
+ ;
+
+create_table_stmt :
+ T_CREATE T_TABLE (T_IF T_NOT T_EXISTS)? ident T_OPEN_P create_table_columns T_CLOSE_P create_table_options?
+ ;
+
+create_local_temp_table_stmt :
+ T_CREATE (T_LOCAL T_TEMPORARY | (T_SET | T_MULTISET)? T_VOLATILE) T_TABLE ident T_OPEN_P create_table_columns T_CLOSE_P create_table_options?
+ ;
+
+create_table_columns :
+ create_table_columns_item (T_COMMA create_table_columns_item)*
+ ;
+
+create_table_columns_item :
+ ident dtype dtype_len? dtype_attr* create_table_column_inline_cons?
+ | T_CONSTRAINT ident create_table_column_cons
+ ;
+
+create_table_column_inline_cons :
+ dtype_default
+ | T_NOT? T_NULL
+ | T_PRIMARY T_KEY
+ ;
+
+create_table_column_cons :
+ T_PRIMARY T_KEY T_OPEN_P ident (T_COMMA ident)* T_CLOSE_P
+ ;
+
+create_table_options :
+ create_table_options_item+
+ ;
+
+create_table_options_item :
+ T_ON T_COMMIT (T_DELETE | T_PRESERVE) T_ROWS
+ | create_table_options_db2_item
+ | create_table_options_hive_item
+ ;
+
+create_table_options_db2_item :
+ T_IN ident
+ | T_WITH T_REPLACE
+ | T_DISTRIBUTE T_BY T_HASH T_OPEN_P ident (T_COMMA ident)* T_CLOSE_P
+ | T_LOGGED
+ | T_NOT T_LOGGED
+ ;
+
+create_table_options_hive_item :
+ create_table_hive_row_format
+ ;
+
+create_table_hive_row_format :
+ T_ROW T_FORMAT T_DELIMITED create_table_hive_row_format_fields*
+ ;
+
+create_table_hive_row_format_fields :
+ T_FIELDS T_TERMINATED T_BY expr (T_ESCAPED T_BY expr)?
+ | T_COLLECTION T_ITEMS T_TERMINATED T_BY expr
+ | T_MAP T_KEYS T_TERMINATED T_BY expr
+ | T_LINES T_TERMINATED T_BY expr
+ | T_NULL T_DEFINED T_AS expr
+ ;
+
+dtype : // Data types
+ T_CHAR
+ | T_BIGINT
+ | T_DATE
+ | T_DEC
+ | T_DECIMAL
+ | T_FLOAT
+ | T_INT
+ | T_INTEGER
+ | T_NUMBER
+ | T_SMALLINT
+ | T_STRING
+ | T_TIMESTAMP
+ | T_VARCHAR
+ | T_VARCHAR2
+ | L_ID // User-defined data type
+ ;
+
+dtype_len : // Data type length or size specification
+ T_OPEN_P L_INT (T_COMMA L_INT)? T_CLOSE_P
+ ;
+
+dtype_attr :
+ T_CHARACTER T_SET ident
+ | T_NOT? (T_CASESPECIFIC | T_CS)
+ ;
+
+dtype_default : // Default clause in variable declaration
+ T_COLON? T_EQUAL expr
+ | T_DEFAULT expr
+ ;
+
+create_function_stmt :
+ (T_ALTER | T_CREATE (T_OR T_REPLACE)? | T_REPLACE) T_FUNCTION ident create_routine_params create_function_return (T_AS | T_IS)? single_block_stmt
+ ;
+
+create_function_return :
+ (T_RETURN | T_RETURNS) dtype dtype_len?
+ ;
+
+create_procedure_stmt :
+ (T_ALTER | T_CREATE (T_OR T_REPLACE)? | T_REPLACE) (T_PROCEDURE | T_PROC) ident create_routine_params create_routine_options? (T_AS | T_IS)? label? single_block_stmt (ident T_SEMICOLON)?
+ ;
+
+create_routine_params :
+ T_OPEN_P (create_routine_param_item (T_COMMA create_routine_param_item)*)? T_CLOSE_P
+ ;
+
+create_routine_param_item :
+ (T_IN | T_OUT | T_INOUT | T_IN T_OUT)? ident dtype dtype_len? dtype_attr* dtype_default?
+ | ident (T_IN | T_OUT | T_INOUT | T_IN T_OUT)? dtype dtype_len? dtype_attr* dtype_default?
+ ;
+
+create_routine_options :
+ create_routine_option+
+ ;
+create_routine_option :
+ T_LANGUAGE T_SQL
+ | T_SQL T_SECURITY (T_CREATOR | T_DEFINER | T_INVOKER | T_OWNER)
+ | T_DYNAMIC T_RESULT T_SETS L_INT
+ ;
+
+drop_stmt : // DROP statement
+ T_DROP T_TABLE (T_IF T_EXISTS)? table_name
+ ;
+
+exec_stmt : // EXEC, EXECUTE IMMEDIATE statement
+ (T_EXEC | T_EXECUTE) T_IMMEDIATE? expr (T_INTO L_ID (T_COMMA L_ID)*)? using_clause?
+ ;
+
+if_stmt : // IF statement
+ if_plsql_stmt
+ | if_tsql_stmt
+ ;
+
+if_plsql_stmt :
+ T_IF bool_expr T_THEN block elseif_block* else_block? T_END T_IF
+ ;
+
+if_tsql_stmt :
+ T_IF bool_expr single_block_stmt (T_ELSE single_block_stmt)?
+ ;
+
+elseif_block :
+ (T_ELSIF | T_ELSEIF) bool_expr T_THEN block
+ ;
+
+else_block :
+ T_ELSE block
+ ;
+
+include_stmt : // INCLUDE statement
+ T_INCLUDE file_name
+ ;
+
+insert_stmt : // INSERT statement
+ T_INSERT (T_OVERWRITE T_TABLE | T_INTO T_TABLE?) table_name insert_stmt_cols? (select_stmt | insert_stmt_rows)
+ ;
+
+insert_stmt_cols :
+ T_OPEN_P ident (T_COMMA ident)* T_CLOSE_P
+ ;
+
+insert_stmt_rows :
+ T_VALUES insert_stmt_row (T_COMMA insert_stmt_row)*
+ ;
+
+insert_stmt_row:
+ T_OPEN_P expr (T_COMMA expr)* T_CLOSE_P
+ ;
+
+exit_stmt :
+ T_EXIT L_ID? (T_WHEN bool_expr)?
+ ;
+
+get_diag_stmt : // GET DIAGNOSTICS statement
+ T_GET T_DIAGNOSTICS get_diag_stmt_item
+ ;
+
+get_diag_stmt_item :
+ get_diag_stmt_exception_item
+ | get_diag_stmt_rowcount_item
+ ;
+
+get_diag_stmt_exception_item :
+ T_EXCEPTION L_INT ident T_EQUAL T_MESSAGE_TEXT
+ ;
+
+get_diag_stmt_rowcount_item :
+ ident T_EQUAL T_ROW_COUNT
+ ;
+
+grant_stmt :
+ T_GRANT grant_stmt_item (T_COMMA grant_stmt_item)* T_TO ident
+ ;
+
+grant_stmt_item :
+ T_EXECUTE T_ON T_PROCEDURE ident
+ ;
+
+leave_stmt :
+ T_LEAVE L_ID?
+ ;
+
+map_object_stmt :
+ T_MAP T_OBJECT expr (T_TO expr)? (T_AT expr)?
+ ;
+
+open_stmt : // OPEN cursor statement
+ T_OPEN L_ID (T_FOR (expr | select_stmt))?
+ ;
+
+fetch_stmt : // FETCH cursor statement
+ T_FETCH T_FROM? L_ID T_INTO L_ID (T_COMMA L_ID)*
+ ;
+
+close_stmt : // CLOSE cursor statement
+ T_CLOSE L_ID
+ ;
+
+copy_from_local_stmt : // COPY FROM LOCAL statement
+ T_COPY T_FROM T_LOCAL copy_source (T_COMMA copy_source)* T_TO copy_target copy_file_option*
+ ;
+
+copy_stmt : // COPY statement
+ T_COPY (table_name | T_OPEN_P select_stmt T_CLOSE_P) T_TO copy_target copy_option*
+ ;
+
+copy_source :
+ (ident | expr | L_FILE)
+ ;
+
+copy_target :
+ (ident | expr | L_FILE)
+ ;
+
+copy_option :
+ T_AT ident
+ | T_BATCHSIZE expr
+ | T_DELIMITER expr
+ | T_SQLINSERT ident
+ ;
+
+copy_file_option :
+ T_DELETE
+ | T_IGNORE
+ | T_OVERWRITE
+ ;
+
+commit_stmt : // COMMIT statement
+ T_COMMIT T_WORK?
+ ;
+
+create_index_stmt : // CREATE INDEX statement
+ T_CREATE T_UNIQUE? T_INDEX ident T_ON table_name T_OPEN_P create_index_col (T_COMMA create_index_col)* T_CLOSE_P
+ ;
+
+create_index_col :
+ ident (T_ASC | T_DESC)?
+ ;
+
+print_stmt : // PRINT statement
+ T_PRINT expr
+ | T_PRINT T_OPEN_P expr T_CLOSE_P
+ ;
+
+resignal_stmt : // RESIGNAL statement
+ T_RESIGNAL (T_SQLSTATE T_VALUE? expr (T_SET T_MESSAGE_TEXT T_EQUAL expr)? )?
+ ;
+
+return_stmt : // RETURN statement
+ T_RETURN expr?
+ ;
+
+rollback_stmt : // ROLLBACK statement
+ T_ROLLBACK T_WORK?
+ ;
+
+signal_stmt : // SIGNAL statement
+ T_SIGNAL ident
+ ;
+
+use_stmt : // USE statement
+ T_USE expr
+ ;
+
+values_into_stmt : // VALUES INTO statement
+ T_VALUES T_OPEN_P? expr (T_COMMA expr)* T_CLOSE_P? T_INTO T_OPEN_P? ident (T_COMMA ident)* T_CLOSE_P?
+ ;
+
+while_stmt : // WHILE loop statement
+ T_WHILE bool_expr (T_DO | T_LOOP | T_THEN | T_BEGIN) block T_END (T_WHILE | T_LOOP)?
+ ;
+
+for_cursor_stmt : // FOR (cursor) statement
+ T_FOR L_ID T_IN T_OPEN_P? select_stmt T_CLOSE_P? T_LOOP block T_END T_LOOP
+ ;
+
+for_range_stmt : // FOR (Integer range) statement
+ T_FOR L_ID T_IN T_REVERSE? expr T_DOT2 expr ((T_BY | T_STEP) expr)? T_LOOP block T_END T_LOOP
+ ;
+
+label :
+ L_LABEL
+ | T_LESS T_LESS L_ID T_GREATER T_GREATER
+ ;
+
+using_clause : // USING var,... clause
+ T_USING expr (T_COMMA expr)*
+ ;
+
+select_stmt : // SELECT statement
+ cte_select_stmt? fullselect_stmt
+ ;
+
+cte_select_stmt :
+ T_WITH cte_select_stmt_item (T_COMMA cte_select_stmt_item)*
+ ;
+
+cte_select_stmt_item :
+ ident cte_select_cols? T_AS T_OPEN_P fullselect_stmt T_CLOSE_P
+ ;
+
+cte_select_cols :
+ T_OPEN_P ident (T_COMMA ident)* T_CLOSE_P
+ ;
+
+fullselect_stmt :
+ fullselect_stmt_item (fullselect_set_clause fullselect_stmt_item)*
+ ;
+
+fullselect_stmt_item :
+ subselect_stmt
+ | T_OPEN_P fullselect_stmt T_CLOSE_P
+ ;
+
+fullselect_set_clause :
+ T_UNION T_ALL?
+ | T_EXCEPT T_ALL?
+ | T_INTERSECT T_ALL?
+ ;
+
+subselect_stmt :
+ (T_SELECT | T_SEL) select_list into_clause? from_clause? where_clause? group_by_clause? having_clause? order_by_clause? select_options?
+ ;
+
+select_list :
+ select_list_set? select_list_limit? select_list_item (T_COMMA select_list_item)*
+ ;
+
+select_list_set :
+ T_ALL
+ | T_DISTINCT
+ ;
+
+select_list_limit :
+ T_TOP expr
+ ;
+
+select_list_item :
+ (expr select_list_alias? | select_list_asterisk)
+ ;
+
+select_list_alias :
+ T_AS? L_ID
+ | T_OPEN_P T_TITLE L_S_STRING T_CLOSE_P
+ ;
+
+select_list_asterisk :
+ (L_ID '.')? '*'
+ ;
+
+into_clause :
+ T_INTO ident (T_COMMA ident)*
+ ;
+
+from_clause :
+ T_FROM from_table_clause (from_join_clause)*
+ ;
+
+from_table_clause :
+ from_table_name_clause
+ | from_subselect_clause
+ | from_table_values_clause
+ ;
+
+from_table_name_clause :
+ table_name from_alias_clause?
+ ;
+
+from_subselect_clause :
+ T_OPEN_P subselect_stmt T_CLOSE_P from_alias_clause?
+ ;
+
+from_join_clause :
+ T_COMMA from_table_clause
+ | from_join_type_clause from_table_clause T_ON bool_expr
+ ;
+
+from_join_type_clause :
+ T_INNER T_JOIN
+ | (T_LEFT | T_RIGHT | T_FULL) T_OUTER? T_JOIN
+ ;
+
+from_table_values_clause:
+ T_TABLE T_OPEN_P T_VALUES from_table_values_row (T_COMMA from_table_values_row)* T_CLOSE_P from_alias_clause?
+ ;
+
+from_table_values_row:
+ expr
+ | T_OPEN_P expr (T_COMMA expr)* T_CLOSE_P
+ ;
+
+from_alias_clause :
+ {!_input.LT(1).getText().equalsIgnoreCase("GROUP") &&
+ !_input.LT(1).getText().equalsIgnoreCase("ORDER") &&
+ !_input.LT(1).getText().equalsIgnoreCase("LIMIT")}?
+ T_AS? ident (T_OPEN_P L_ID (T_COMMA L_ID)* T_CLOSE_P)?
+ ;
+
+table_name :
+ ident
+ ;
+
+where_clause :
+ T_WHERE bool_expr
+ ;
+
+group_by_clause :
+ T_GROUP T_BY expr (T_COMMA expr)*
+ ;
+
+having_clause :
+ T_HAVING bool_expr
+ ;
+
+order_by_clause :
+ T_ORDER T_BY expr (T_ASC | T_DESC)? (T_COMMA expr (T_ASC | T_DESC)?)*
+ ;
+
+select_options :
+ select_options_item+
+ ;
+
+select_options_item :
+ T_LIMIT expr
+ | T_WITH (T_RR | T_RS | T_CS | T_UR)
+ ;
+
+update_stmt : // UPDATE statement
+ T_UPDATE update_table T_SET assignment_stmt_item (T_COMMA assignment_stmt_item)* where_clause? update_upsert?
+ ;
+
+update_table :
+ (table_name | (T_OPEN_P select_stmt T_CLOSE_P)) (T_AS? ident)?
+ ;
+
+update_upsert :
+ T_ELSE insert_stmt
+ ;
+
+merge_stmt : // MERGE statement
+ T_MERGE T_INTO merge_table T_USING merge_table T_ON bool_expr merge_condition+
+ ;
+
+merge_table :
+ (table_name | (T_OPEN_P select_stmt T_CLOSE_P)) (T_AS? ident)?
+ ;
+
+merge_condition :
+ T_WHEN T_NOT? T_MATCHED (T_AND bool_expr)? T_THEN merge_action
+ | T_ELSE T_IGNORE
+ ;
+
+merge_action :
+ T_INSERT insert_stmt_cols? T_VALUES insert_stmt_row
+ | T_UPDATE T_SET assignment_stmt_item (T_COMMA assignment_stmt_item)*
+ | T_DELETE
+ ;
+
+delete_stmt : // DELETE statement
+ T_DELETE T_FROM? table_name (T_AS? ident)? where_clause?
+ ;
+
+bool_expr : // Boolean condition
+ T_OPEN_P bool_expr T_CLOSE_P
+ | bool_expr bool_expr_logical_operator bool_expr
+ | bool_expr_atom
+ ;
+
+bool_expr_atom :
+ bool_expr_unary
+ | bool_expr_binary
+ ;
+
+bool_expr_unary :
+ expr T_IS T_NOT? T_NULL
+ | expr T_BETWEEN expr T_AND expr
+ | bool_expr_single_in
+ | bool_expr_multi_in
+ ;
+
+bool_expr_single_in :
+ expr T_NOT? T_IN T_OPEN_P ((expr (T_COMMA expr)*) | select_stmt) T_CLOSE_P
+ ;
+
+bool_expr_multi_in :
+ T_OPEN_P expr (T_COMMA expr)* T_CLOSE_P T_NOT? T_IN T_OPEN_P select_stmt T_CLOSE_P
+ ;
+
+bool_expr_binary :
+ expr bool_expr_binary_operator expr
+ ;
+
+bool_expr_logical_operator :
+ T_AND
+ | T_OR
+ ;
+
+bool_expr_binary_operator :
+ T_EQUAL
+ | T_EQUAL2
+ | T_NOTEQUAL
+ | T_NOTEQUAL2
+ | T_LESS
+ | T_LESSEQUAL
+ | T_GREATER
+ | T_GREATEREQUAL
+ | T_NOT? (T_LIKE | T_RLIKE | T_REGEXP)
+ ;
+
+expr :
+ expr interval_item
+ | expr T_MUL expr
+ | expr T_DIV expr
+ | expr T_ADD expr
+ | expr T_SUB expr
+ | T_OPEN_P expr T_CLOSE_P
+ | expr_concat
+ | expr_case
+ | expr_agg_window_func
+ | expr_spec_func
+ | expr_func
+ | expr_atom
+ ;
+
+
+expr_atom :
+ date_literal
+ | timestamp_literal
+ | ident
+ | string
+ | dec_number
+ | interval_number
+ | int_number
+ | null_const
+ ;
+
+interval_item :
+ T_DAY
+ | T_DAYS
+ | T_MICROSECOND
+ | T_MICROSECONDS
+ ;
+
+interval_number :
+ int_number interval_item
+ ;
+
+expr_concat : // String concatenation operator
+ expr_concat_item (T_PIPE | T_CONCAT) expr_concat_item ((T_PIPE | T_CONCAT) expr_concat_item)*
+ ;
+
+expr_concat_item :
+ T_OPEN_P expr T_CLOSE_P
+ | expr_case
+ | expr_agg_window_func
+ | expr_spec_func
+ | expr_func
+ | expr_atom
+ ;
+
+expr_case : // CASE expression
+ expr_case_simple
+ | expr_case_searched
+ ;
+
+expr_case_simple :
+ T_CASE expr (T_WHEN expr T_THEN expr)+ (T_ELSE expr)? T_END
+ ;
+
+expr_case_searched :
+ T_CASE (T_WHEN bool_expr T_THEN expr)+ (T_ELSE expr)? T_END
+ ;
+
+expr_agg_window_func :
+ T_AVG T_OPEN_P expr_func_all_distinct? expr T_CLOSE_P expr_func_over_clause?
+ | T_COUNT T_OPEN_P ((expr_func_all_distinct? expr) | '*') T_CLOSE_P expr_func_over_clause?
+ | T_COUNT_BIG T_OPEN_P ((expr_func_all_distinct? expr) | '*') T_CLOSE_P expr_func_over_clause?
+ | T_DENSE_RANK T_OPEN_P T_CLOSE_P expr_func_over_clause
+ | T_FIRST_VALUE T_OPEN_P expr T_CLOSE_P expr_func_over_clause
+ | T_LAG T_OPEN_P expr (T_COMMA expr (T_COMMA expr)?)? T_CLOSE_P expr_func_over_clause
+ | T_LAST_VALUE T_OPEN_P expr T_CLOSE_P expr_func_over_clause
+ | T_LEAD T_OPEN_P expr (T_COMMA expr (T_COMMA expr)?)? T_CLOSE_P expr_func_over_clause
+ | T_MAX T_OPEN_P expr_func_all_distinct? expr T_CLOSE_P expr_func_over_clause?
+ | T_MIN T_OPEN_P expr_func_all_distinct? expr T_CLOSE_P expr_func_over_clause?
+ | T_RANK T_OPEN_P T_CLOSE_P expr_func_over_clause
+ | T_ROW_NUMBER T_OPEN_P T_CLOSE_P expr_func_over_clause
+ | T_STDEV T_OPEN_P expr_func_all_distinct? expr T_CLOSE_P expr_func_over_clause?
+ | T_SUM T_OPEN_P expr_func_all_distinct? expr T_CLOSE_P expr_func_over_clause?
+ | T_VAR T_OPEN_P expr_func_all_distinct? expr T_CLOSE_P expr_func_over_clause?
+ | T_VARIANCE T_OPEN_P expr_func_all_distinct? expr T_CLOSE_P expr_func_over_clause?
+ ;
+
+expr_func_all_distinct :
+ T_ALL
+ | T_DISTINCT
+ ;
+
+expr_func_over_clause :
+ T_OVER T_OPEN_P expr_func_partition_by_clause? order_by_clause? T_CLOSE_P
+ ;
+
+expr_func_partition_by_clause :
+ T_PARTITION T_BY ident (T_COMMA ident)*
+ ;
+
+expr_spec_func :
+ T_ACTIVITY_COUNT
+ | T_CAST T_OPEN_P expr T_AS dtype dtype_len? T_CLOSE_P
+ | T_COUNT T_OPEN_P (expr | '*') T_CLOSE_P
+ | T_CURRENT_DATE | T_CURRENT T_DATE
+ | (T_CURRENT_TIMESTAMP | T_CURRENT T_TIMESTAMP) (T_OPEN_P expr T_CLOSE_P)?
+ | T_CURRENT_USER | T_CURRENT T_USER
+ | T_MAX_PART_STRING T_OPEN_P expr (T_COMMA expr (T_COMMA expr T_EQUAL expr)*)? T_CLOSE_P
+ | T_MIN_PART_STRING T_OPEN_P expr (T_COMMA expr (T_COMMA expr T_EQUAL expr)*)? T_CLOSE_P
+ | T_MAX_PART_INT T_OPEN_P expr (T_COMMA expr (T_COMMA expr T_EQUAL expr)*)? T_CLOSE_P
+ | T_MIN_PART_INT T_OPEN_P expr (T_COMMA expr (T_COMMA expr T_EQUAL expr)*)? T_CLOSE_P
+ | T_MAX_PART_DATE T_OPEN_P expr (T_COMMA expr (T_COMMA expr T_EQUAL expr)*)? T_CLOSE_P
+ | T_MIN_PART_DATE T_OPEN_P expr (T_COMMA expr (T_COMMA expr T_EQUAL expr)*)? T_CLOSE_P
+ | T_PART_LOC T_OPEN_P expr (T_COMMA expr T_EQUAL expr)+ (T_COMMA expr)? T_CLOSE_P
+ | T_TRIM T_OPEN_P expr T_CLOSE_P
+ | T_SUBSTRING T_OPEN_P expr T_FROM expr (T_FOR expr)? T_CLOSE_P
+ | T_SYSDATE
+ | T_USER
+ ;
+
+expr_func :
+ ident expr_func_params
+ ;
+
+expr_func_params :
+ T_OPEN_P (expr (T_COMMA expr)*)? T_CLOSE_P
+ ;
+
+hive :
+ T_HIVE hive_item*
+ ;
+
+hive_item :
+ P_e expr
+ | P_f expr
+ | P_hiveconf L_ID T_EQUAL expr
+ | P_i expr
+ | P_S
+ | P_h
+ ;
+
+host :
+ '!' host_cmd ';' // OS command
+ | host_stmt
+ ;
+
+host_cmd :
+ .*?
+ ;
+
+host_stmt :
+ T_HOST expr
+ ;
+
+file_name :
+ L_ID | L_FILE
+ ;
+
+date_literal : // DATE 'YYYY-MM-DD' literal
+ T_DATE string
+ ;
+
+timestamp_literal : // TIMESTAMP 'YYYY-MM-DD HH:MI:SS.FFF' literal
+ T_TIMESTAMP string
+ ;
+
+ident :
+ L_ID
+ | non_reserved_words
+ ;
+
+string : // String literal (single or double quoted)
+ L_S_STRING # single_quotedString
+ | L_D_STRING # double_quotedString
+ ;
+
+int_number : // Integer (positive or negative)
+ ('-' | '+')? L_INT
+ ;
+
+dec_number : // Decimal number (positive or negative)
+ ('-' | '+')? L_DEC
+ ;
+
+null_const : // NULL constant
+ T_NULL
+ ;
+
+non_reserved_words : // Tokens that are not reserved words and can be used as identifiers
+ T_ACTIVITY_COUNT
+ | T_ALL
+ | T_ALTER
+ | T_AND
+ | T_AS
+ | T_ASC
+ | T_AT
+ | T_AVG
+ | T_BATCHSIZE
+ | T_BEGIN
+ | T_BETWEEN
+ | T_BIGINT
+ | T_BREAK
+ | T_BY
+ | T_CALL
+ | T_CALLER
+ | T_CASE
+ | T_CASESPECIFIC
+ | T_CAST
+ | T_CHAR
+ | T_CHARACTER
+ | T_CLIENT
+ | T_CLOSE
+ | T_COLLECTION
+ | T_COPY
+ | T_COMMIT
+ | T_CONCAT
+ | T_CONDITION
+ | T_CONSTRAINT
+ | T_CONTINUE
+ | T_COUNT
+ | T_COUNT_BIG
+ | T_CREATE
+ | T_CREATOR
+ | T_CS
+ | T_CURRENT
+ | T_CURRENT_DATE
+ | T_CURRENT_TIMESTAMP
+ | T_CURRENT_USER
+ | T_CURSOR
+ | T_DATE
+ | T_DAY
+ | T_DAYS
+ | T_DEC
+ | T_DECIMAL
+ | T_DECLARE
+ | T_DEFAULT
+ | T_DEFINED
+ | T_DEFINER
+ | T_DELETE
+ | T_DELIMITED
+ | T_DELIMITER
+ | T_DENSE_RANK
+ | T_DESC
+ | T_DIAGNOSTICS
+ | T_DISTINCT
+ | T_DISTRIBUTE
+ | T_DO
+ | T_DROP
+ | T_DYNAMIC
+ // T_ELSE reserved word
+ // T_ELSEIF reserved word
+ // T_ELSIF reserved word
+ // T_END reserved word
+ | T_ESCAPED
+ | T_EXCEPT
+ | T_EXEC
+ | T_EXECUTE
+ | T_EXCEPTION
+ | T_EXISTS
+ | T_EXIT
+ | T_FETCH
+ | T_FIELDS
+ | T_FILE
+ | T_FIRST_VALUE
+ | T_FLOAT
+ | T_FOR
+ | T_FORMAT
+ | T_FOUND
+ | T_FROM
+ | T_FULL
+ | T_FUNCTION
+ | T_GET
+ | T_GLOBAL
+ | T_GRANT
+ | T_GROUP
+ | T_HANDLER
+ | T_HASH
+ | T_HAVING
+ | T_HIVE
+ | T_HOST
+ | T_IF
+ | T_IGNORE
+ | T_IMMEDIATE
+ | T_IN
+ | T_INCLUDE
+ | T_INDEX
+ | T_INNER
+ | T_INOUT
+ | T_INSERT
+ | T_INT
+ | T_INTEGER
+ | T_INTERSECT
+ | T_INTO
+ | T_INVOKER
+ | T_ITEMS
+ | T_IS
+ | T_JOIN
+ | T_KEY
+ | T_KEYS
+ | T_LAG
+ | T_LANGUAGE
+ | T_LAST_VALUE
+ | T_LEAD
+ | T_LEAVE
+ | T_LEFT
+ | T_LIKE
+ | T_LIMIT
+ | T_LINES
+ | T_LOCAL
+ | T_LOGGED
+ | T_LOOP
+ | T_MAP
+ | T_MATCHED
+ | T_MAX
+ | T_MERGE
+ | T_MESSAGE_TEXT
+ | T_MICROSECOND
+ | T_MICROSECONDS
+ | T_MIN
+ | T_MULTISET
+ | T_NOT
+ // T_NULL reserved word
+ | T_NUMBER
+ | T_OBJECT
+ | T_ON
+ | T_ONLY
+ | T_OPEN
+ | T_OR
+ | T_ORDER
+ | T_OUT
+ | T_OUTER
+ | T_OVER
+ | T_OVERWRITE
+ | T_OWNER
+ | T_PART_LOC
+ | T_PARTITION
+ | T_PRESERVE
+ | T_PRIMARY
+ | T_PRINT
+ | T_PROC
+ | T_PROCEDURE
+ | T_RANK
+ | T_REGEXP
+ | T_RR
+ | T_REPLACE
+ | T_RESIGNAL
+ | T_RESULT
+ | T_RETURN
+ | T_RETURNS
+ | T_REVERSE
+ | T_RIGHT
+ | T_RLIKE
+ | T_RS
+ | T_ROLLBACK
+ | T_ROW
+ | T_ROWS
+ | T_ROW_COUNT
+ | T_ROW_NUMBER
+ | T_SECURITY
+ | T_SEL
+ | T_SELECT
+ | T_SET
+ | T_SETS
+ | T_SIGNAL
+ | T_SMALLINT
+ | T_SQL
+ | T_SQLEXCEPTION
+ | T_SQLINSERT
+ | T_SQLSTATE
+ | T_SQLWARNING
+ | T_STEP
+ | T_STDEV
+ | T_STRING
+ | T_SUBSTRING
+ | T_SUM
+ | T_SYSDATE
+ | T_TABLE
+ | T_TEMPORARY
+ | T_TERMINATED
+ | T_THEN
+ | T_TIMESTAMP
+ | T_TITLE
+ | T_TO
+ | T_TOP
+ | T_TRIM
+ // T_UNION reserved word
+ | T_UNIQUE
+ | T_UPDATE
+ | T_UR
+ | T_USE
+ | T_USER
+ | T_USING
+ | T_VALUE
+ | T_VALUES
+ | T_VAR
+ | T_VARCHAR
+ | T_VARCHAR2
+ | T_VARIANCE
+ | T_VOLATILE
+ // T_WHEN reserved word
+ // T_WHERE reserved word
+ | T_WHILE
+ | T_WITH
+ | T_WITHOUT
+ | T_WORK
+ ;
+
+// Lexer rules
+T_ALL : A L L ;
+T_ALTER : A L T E R ;
+T_AND : A N D ;
+T_AS : A S ;
+T_ASC : A S C ;
+T_AT : A T ;
+T_AVG : A V G ;
+T_BATCHSIZE : B A T C H S I Z E ;
+T_BEGIN : B E G I N ;
+T_BETWEEN : B E T W E E N ;
+T_BIGINT : B I G I N T ;
+T_BREAK : B R E A K ;
+T_BY : B Y ;
+T_CALL : C A L L ;
+T_CALLER : C A L L E R ;
+T_CASE : C A S E ;
+T_CASESPECIFIC : C A S E S P E C I F I C ;
+T_CAST : C A S T ;
+T_CHAR : C H A R ;
+T_CHARACTER : C H A R A C T E R ;
+T_CLIENT : C L I E N T ;
+T_CLOSE : C L O S E ;
+T_COLLECTION : C O L L E C T I O N ;
+T_COPY : C O P Y ;
+T_COMMIT : C O M M I T ;
+T_CONCAT : C O N C A T;
+T_CONDITION : C O N D I T I O N ;
+T_CONSTRAINT : C O N S T R A I N T ;
+T_CONTINUE : C O N T I N U E ;
+T_COUNT : C O U N T ;
+T_COUNT_BIG : C O U N T '_' B I G;
+T_CREATE : C R E A T E ;
+T_CREATOR : C R E A T O R ;
+T_CS : C S;
+T_CURRENT : C U R R E N T ;
+T_CURSOR : C U R S O R ;
+T_DATE : D A T E ;
+T_DAY : D A Y ;
+T_DAYS : D A Y S ;
+T_DEC : D E C ;
+T_DECIMAL : D E C I M A L ;
+T_DECLARE : D E C L A R E ;
+T_DEFAULT : D E F A U L T ;
+T_DEFINED : D E F I N E D ;
+T_DEFINER : D E F I N E R ;
+T_DELETE : D E L E T E ;
+T_DELIMITED : D E L I M I T E D ;
+T_DELIMITER : D E L I M I T E R ;
+T_DESC : D E S C ;
+T_DIAGNOSTICS : D I A G N O S T I C S ;
+T_DISTINCT : D I S T I N C T ;
+T_DISTRIBUTE : D I S T R I B U T E ;
+T_DO : D O ;
+T_DROP : D R O P ;
+T_DYNAMIC : D Y N A M I C ;
+T_ELSE : E L S E ;
+T_ELSEIF : E L S E I F ;
+T_ELSIF : E L S I F ;
+T_END : E N D ;
+T_ESCAPED : E S C A P E D ;
+T_EXCEPT : E X C E P T ;
+T_EXEC : E X E C ;
+T_EXECUTE : E X E C U T E ;
+T_EXCEPTION : E X C E P T I O N ;
+T_EXISTS : E X I S T S ;
+T_EXIT : E X I T ;
+T_FETCH : F E T C H ;
+T_FIELDS : F I E L D S ;
+T_FILE : F I L E ;
+T_FLOAT : F L O A T ;
+T_FOR : F O R ;
+T_FORMAT : F O R M A T ;
+T_FOUND : F O U N D ;
+T_FROM : F R O M ;
+T_FULL : F U L L ;
+T_FUNCTION : F U N C T I O N ;
+T_GET : G E T ;
+T_GLOBAL : G L O B A L ;
+T_GRANT : G R A N T ;
+T_GROUP : G R O U P ;
+T_HANDLER : H A N D L E R ;
+T_HASH : H A S H ;
+T_HAVING : H A V I N G ;
+T_HIVE : H I V E ;
+T_HOST : H O S T ;
+T_IF : I F ;
+T_IGNORE : I G N O R E ;
+T_IMMEDIATE : I M M E D I A T E ;
+T_IN : I N ;
+T_INCLUDE : I N C L U D E ;
+T_INDEX : I N D E X ;
+T_INNER : I N N E R ;
+T_INOUT : I N O U T;
+T_INSERT : I N S E R T ;
+T_INT : I N T ;
+T_INTEGER : I N T E G E R ;
+T_INTERSECT : I N T E R S E C T ;
+T_INTO : I N T O ;
+T_INVOKER : I N V O K E R ;
+T_IS : I S ;
+T_ITEMS : I T E M S ;
+T_JOIN : J O I N ;
+T_KEY : K E Y ;
+T_KEYS : K E Y S ;
+T_LANGUAGE : L A N G U A G E ;
+T_LEAVE : L E A V E ;
+T_LEFT : L E F T ;
+T_LIKE : L I K E ;
+T_LIMIT : L I M I T ;
+T_LINES : L I N E S ;
+T_LOCAL : L O C A L ;
+T_LOGGED : L O G G E D ;
+T_LOOP : L O O P ;
+T_MAP : M A P ;
+T_MATCHED : M A T C H E D ;
+T_MAX : M A X ;
+T_MERGE : M E R G E ;
+T_MESSAGE_TEXT : M E S S A G E '_' T E X T ;
+T_MICROSECOND : M I C R O S E C O N D ;
+T_MICROSECONDS : M I C R O S E C O N D S;
+T_MIN : M I N ;
+T_MULTISET : M U L T I S E T ;
+T_NOT : N O T ;
+T_NULL : N U L L ;
+T_NUMBER : N U M B E R ;
+T_OBJECT : O B J E C T ;
+T_ON : O N ;
+T_ONLY : O N L Y ;
+T_OPEN : O P E N ;
+T_OR : O R ;
+T_ORDER : O R D E R;
+T_OUT : O U T ;
+T_OUTER : O U T E R ;
+T_OVER : O V E R ;
+T_OVERWRITE : O V E R W R I T E ;
+T_OWNER : O W N E R ;
+T_PARTITION : P A R T I T I O N ;
+T_PRESERVE : P R E S E R V E ;
+T_PRIMARY : P R I M A R Y ;
+T_PRINT : P R I N T ;
+T_PROC : P R O C ;
+T_PROCEDURE : P R O C E D U R E;
+T_REGEXP : R E G E X P ;
+T_REPLACE : R E P L A C E ;
+T_RESIGNAL : R E S I G N A L ;
+T_RESULT : R E S U L T ;
+T_RETURN : R E T U R N ;
+T_RETURNS : R E T U R N S ;
+T_REVERSE : R E V E R S E ;
+T_RIGHT : R I G H T ;
+T_RLIKE : R L I K E ;
+T_ROLLBACK : R O L L B A C K ;
+T_ROW : R O W ;
+T_ROWS : R O W S ;
+T_ROW_COUNT : R O W '_' C O U N T ;
+T_RR : R R;
+T_RS : R S ;
+T_TRIM : T R I M ;
+T_SECURITY : S E C U R I T Y ;
+T_SEL : S E L ;
+T_SELECT : S E L E C T ;
+T_SET : S E T ;
+T_SETS : S E T S;
+T_SIGNAL : S I G N A L ;
+T_SMALLINT : S M A L L I N T ;
+T_SQL : S Q L ;
+T_SQLEXCEPTION : S Q L E X C E P T I O N ;
+T_SQLINSERT : S Q L I N S E R T ;
+T_SQLSTATE : S Q L S T A T E ;
+T_SQLWARNING : S Q L W A R N I N G ;
+T_STEP : S T E P ;
+T_STRING : S T R I N G ;
+T_SUBSTRING : S U B S T R I N G ;
+T_SUM : S U M ;
+T_TABLE : T A B L E ;
+T_TEMPORARY : T E M P O R A R Y ;
+T_TERMINATED : T E R M I N A T E D ;
+T_THEN : T H E N ;
+T_TIMESTAMP : T I M E S T A M P ;
+T_TITLE : T I T L E ;
+T_TO : T O ;
+T_TOP : T O P ;
+T_UNION : U N I O N ;
+T_UNIQUE : U N I Q U E ;
+T_UPDATE : U P D A T E ;
+T_UR : U R ;
+T_USE : U S E ;
+T_USING : U S I N G ;
+T_VALUE : V A L U E ;
+T_VALUES : V A L U E S ;
+T_VAR : V A R ;
+T_VARCHAR : V A R C H A R ;
+T_VARCHAR2 : V A R C H A R '2' ;
+T_VOLATILE : V O L A T I L E ;
+T_WHEN : W H E N ;
+T_WHERE : W H E R E ;
+T_WHILE : W H I L E ;
+T_WITH : W I T H ;
+T_WITHOUT : W I T H O U T ;
+T_WORK : W O R K ;
+
+// Functions with specific syntax
+T_ACTIVITY_COUNT : A C T I V I T Y '_' C O U N T ;
+T_CURRENT_DATE : C U R R E N T '_' D A T E ;
+T_CURRENT_TIMESTAMP : C U R R E N T '_' T I M E S T A M P ;
+T_CURRENT_USER : C U R R E N T '_' U S E R ;
+T_DENSE_RANK : D E N S E '_' R A N K ;
+T_FIRST_VALUE : F I R S T '_' V A L U E;
+T_LAG : L A G ;
+T_LAST_VALUE : L A S T '_' V A L U E;
+T_LEAD : L E A D ;
+T_MAX_PART_STRING : M A X '_' P A R T '_' S T R I N G ;
+T_MIN_PART_STRING : M I N '_' P A R T '_' S T R I N G ;
+T_MAX_PART_INT : M A X '_' P A R T '_' I N T ;
+T_MIN_PART_INT : M I N '_' P A R T '_' I N T ;
+T_MAX_PART_DATE : M A X '_' P A R T '_' D A T E ;
+T_MIN_PART_DATE : M I N '_' P A R T '_' D A T E ;
+T_PART_LOC : P A R T '_' L O C ;
+T_RANK : R A N K ;
+T_ROW_NUMBER : R O W '_' N U M B E R;
+T_STDEV : S T D E V ;
+T_SYSDATE : S Y S D A T E ;
+T_VARIANCE : V A R I A N C E ;
+T_USER : U S E R;
+
+T_ADD : '+' ;
+T_COLON : ':' ;
+T_COMMA : ',' ;
+T_PIPE : '||' ;
+T_DIV : '/' ;
+T_DOT2 : '..' ;
+T_EQUAL : '=' ;
+T_EQUAL2 : '==' ;
+T_NOTEQUAL : '<>' ;
+T_NOTEQUAL2 : '!=' ;
+T_GREATER : '>' ;
+T_GREATEREQUAL : '>=' ;
+T_LESS : '<' ;
+T_LESSEQUAL : '<=' ;
+T_MUL : '*' ;
+T_OPEN_B : '{' ;
+T_OPEN_P : '(' ;
+T_CLOSE_B : '}' ;
+T_CLOSE_P : ')' ;
+T_SEMICOLON : ';' ;
+T_SUB : '-' ;
+
+P_e : '-e' ;
+P_f : '-f' ;
+P_hiveconf : '-hiveconf' ;
+P_i : '-i' ;
+P_S : '-S' ;
+P_h : '-h' ;
+
+L_ID : L_ID_PART (L_BLANK* '.' L_BLANK* L_ID_PART)* // Identifier
+ ;
+L_S_STRING : '\'' (('\'' '\'') | ('\\' '\'') | ~('\''))* '\'' // Single quoted string literal
+ ;
+L_D_STRING : '"' (L_STR_ESC_D | .)*? '"' // Double quoted string literal
+ ;
+L_INT : L_DIGIT+ ; // Integer
+L_DEC : L_DIGIT+ '.' ~'.' L_DIGIT* // Decimal number
+ | '.' L_DIGIT+
+ ;
+L_WS : L_BLANK+ -> skip ; // Whitespace
+L_M_COMMENT : '/*' .*? '*/' -> channel(HIDDEN) ; // Multiline comment
+L_S_COMMENT : ('--' | '//') .*? '\r'? '\n' -> channel(HIDDEN) ; // Single line comment
+
+L_FILE : '/'? L_ID ('/' L_ID)* // File path
+ | ([a-zA-Z] ':' '\\'?)? L_ID ('\\' L_ID)*
+ ;
+
+L_LABEL : ([a-zA-Z] | L_DIGIT | '_')* ':'
+ ;
+
+fragment
+L_ID_PART :
+ [a-zA-Z] ([a-zA-Z] | L_DIGIT | '_')* // Identifier part
+ | ('_' | '@' | ':' | '#' | '$') ([a-zA-Z] | L_DIGIT | '_' | '@' | ':' | '#' | '$')+ // (at least one char must follow special char)
+ | '"' .*? '"' // Quoted identifiers
+ | '[' .*? ']'
+ | '`' .*? '`'
+ ;
+fragment
+L_STR_ESC_D : // Double quoted string escape sequence
+ '""' | '\\"'
+ ;
+fragment
+L_DIGIT : [0-9] // Digit
+ ;
+fragment
+L_BLANK : (' ' | '\t' | '\r' | '\n')
+ ;
+
+// Support case-insensitive keywords and allowing case-sensitive identifiers
+fragment A : ('a'|'A') ;
+fragment B : ('b'|'B') ;
+fragment C : ('c'|'C') ;
+fragment D : ('d'|'D') ;
+fragment E : ('e'|'E') ;
+fragment F : ('f'|'F') ;
+fragment G : ('g'|'G') ;
+fragment H : ('h'|'H') ;
+fragment I : ('i'|'I') ;
+fragment J : ('j'|'J') ;
+fragment K : ('k'|'K') ;
+fragment L : ('l'|'L') ;
+fragment M : ('m'|'M') ;
+fragment N : ('n'|'N') ;
+fragment O : ('o'|'O') ;
+fragment P : ('p'|'P') ;
+fragment Q : ('q'|'Q') ;
+fragment R : ('r'|'R') ;
+fragment S : ('s'|'S') ;
+fragment T : ('t'|'T') ;
+fragment U : ('u'|'U') ;
+fragment V : ('v'|'V') ;
+fragment W : ('w'|'W') ;
+fragment X : ('x'|'X') ;
+fragment Y : ('y'|'Y') ;
+fragment Z : ('z'|'Z') ;
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/Arguments.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Arguments.java b/hplsql/src/main/java/org/apache/hive/hplsql/Arguments.java
new file mode 100644
index 0000000..604d9a7
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Arguments.java
@@ -0,0 +1,206 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.ParseException;
+
+public class Arguments {
+ private CommandLine commandLine;
+ private Options options = new Options();
+
+ String execString;
+ String fileName;
+ String main;
+ Map<String, String> vars = new HashMap<String, String>();
+
+ @SuppressWarnings("static-access")
+ Arguments() {
+ // -e 'query'
+ options.addOption(OptionBuilder
+ .hasArg()
+ .withArgName("quoted-query-string")
+ .withDescription("HPL/SQL from command line")
+ .create('e'));
+
+ // -f <file>
+ options.addOption(OptionBuilder
+ .hasArg()
+ .withArgName("filename")
+ .withDescription("HPL/SQL from a file")
+ .create('f'));
+
+ // -main entry_point_name
+ options.addOption(OptionBuilder
+ .hasArg()
+ .withArgName("procname")
+ .withDescription("Entry point (procedure or function name)")
+ .create("main"));
+
+ // -hiveconf x=y
+ options.addOption(OptionBuilder
+ .withValueSeparator()
+ .hasArgs(2)
+ .withArgName("property=value")
+ .withLongOpt("hiveconf")
+ .withDescription("Value for given property")
+ .create());
+
+ // Substitution option -d, --define
+ options.addOption(OptionBuilder
+ .withValueSeparator()
+ .hasArgs(2)
+ .withArgName("key=value")
+ .withLongOpt("define")
+ .withDescription("Variable subsitution e.g. -d A=B or --define A=B")
+ .create('d'));
+
+ // Substitution option --hivevar
+ options.addOption(OptionBuilder
+ .withValueSeparator()
+ .hasArgs(2)
+ .withArgName("key=value")
+ .withLongOpt("hivevar")
+ .withDescription("Variable subsitution e.g. --hivevar A=B")
+ .create());
+
+ // [-version|--version]
+ options.addOption(new Option("version", "version", false, "Print HPL/SQL version"));
+
+ // [-trace|--trace]
+ options.addOption(new Option("trace", "trace", false, "Print debug information"));
+
+ // [-offline|--offline]
+ options.addOption(new Option("offline", "offline", false, "Offline mode - skip SQL execution"));
+
+ // [-H|--help]
+ options.addOption(new Option("H", "help", false, "Print help information"));
+ }
+
+ /**
+ * Parse the command line arguments
+ */
+ public boolean parse(String[] args) {
+ try {
+ commandLine = new GnuParser().parse(options, args);
+ execString = commandLine.getOptionValue('e');
+ fileName = commandLine.getOptionValue('f');
+ main = commandLine.getOptionValue("main");
+ Properties p = commandLine.getOptionProperties("hiveconf");
+ for(String key : p.stringPropertyNames()) {
+ vars.put(key, p.getProperty(key));
+ }
+ p = commandLine.getOptionProperties("hivevar");
+ for(String key : p.stringPropertyNames()) {
+ vars.put(key, p.getProperty(key));
+ }
+ p = commandLine.getOptionProperties("define");
+ for(String key : p.stringPropertyNames()) {
+ vars.put(key, p.getProperty(key));
+ }
+ } catch (ParseException e) {
+ System.err.println(e.getMessage());
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * Get the value of execution option -e
+ */
+ public String getExecString() {
+ return execString;
+ }
+
+ /**
+ * Get the value of file option -f
+ */
+ public String getFileName() {
+ return fileName;
+ }
+
+ /**
+ * Get the value of -main option
+ */
+ public String getMain() {
+ return main;
+ }
+
+ /**
+ * Get the variables
+ */
+ public Map<String, String> getVars() {
+ return vars;
+ }
+
+ /**
+ * Test whether version option is set
+ */
+ public boolean hasVersionOption() {
+ if(commandLine.hasOption("version")) {
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * Test whether debug option is set
+ */
+ public boolean hasTraceOption() {
+ if(commandLine.hasOption("trace")) {
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * Test whether offline option is set
+ */
+ public boolean hasOfflineOption() {
+ if(commandLine.hasOption("offline")) {
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * Test whether help option is set
+ */
+ public boolean hasHelpOption() {
+ if(commandLine.hasOption('H')) {
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * Print help information
+ */
+ public void printHelp() {
+ new HelpFormatter().printHelp("hplsql", options);
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/Conf.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Conf.java b/hplsql/src/main/java/org/apache/hive/hplsql/Conf.java
new file mode 100644
index 0000000..88afbb5
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Conf.java
@@ -0,0 +1,175 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+import java.net.URL;
+import java.util.HashMap;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hive.hplsql.Exec.OnError;
+
+/**
+ * HPL/SQL run-time configuration
+ */
+public class Conf extends Configuration {
+
+ public static final String SITE_XML = "hplsql-site.xml";
+ public static final String DOT_HPLSQLRC = ".hplsqlrc";
+ public static final String HPLSQLRC = "hplsqlrc";
+ public static final String HPLSQL_LOCALS_SQL = "hplsql_locals.sql";
+
+ public static final String CONN_CONVERT = "hplsql.conn.convert.";
+ public static final String CONN_DEFAULT = "hplsql.conn.default";
+ public static final String DUAL_TABLE = "hplsql.dual.table";
+ public static final String INSERT_VALUES = "hplsql.insert.values";
+ public static final String ONERROR = "hplsql.onerror";
+ public static final String TEMP_TABLES = "hplsql.temp.tables";
+ public static final String TEMP_TABLES_SCHEMA = "hplsql.temp.tables.schema";
+ public static final String TEMP_TABLES_LOCATION = "hplsql.temp.tables.location";
+
+ public static final String TRUE = "true";
+ public static final String FALSE = "false";
+ public static final String YES = "yes";
+ public static final String NO = "no";
+
+ public enum InsertValues {NATIVE, SELECT};
+ public enum TempTables {NATIVE, MANAGED};
+
+ public String defaultConnection;
+
+ OnError onError = OnError.EXCEPTION;
+ InsertValues insertValues = InsertValues.NATIVE;
+ TempTables tempTables = TempTables.NATIVE;
+
+ String dualTable = "default.dual";
+
+ String tempTablesSchema = "";
+ String tempTablesLocation = "/tmp/hplsql";
+
+ HashMap<String, Boolean> connConvert = new HashMap<String, Boolean>();
+
+ /**
+ * Set an option
+ */
+ public void setOption(String key, String value) {
+ if (key.startsWith(CONN_CONVERT)) {
+ setConnectionConvert(key.substring(19), value);
+ }
+ else if (key.compareToIgnoreCase(CONN_DEFAULT) == 0) {
+ defaultConnection = value;
+ }
+ else if (key.compareToIgnoreCase(DUAL_TABLE) == 0) {
+ dualTable = value;
+ }
+ else if (key.compareToIgnoreCase(INSERT_VALUES) == 0) {
+ setInsertValues(value);
+ }
+ else if (key.compareToIgnoreCase(ONERROR) == 0) {
+ setOnError(value);
+ }
+ else if (key.compareToIgnoreCase(TEMP_TABLES) == 0) {
+ setTempTables(value);
+ }
+ else if (key.compareToIgnoreCase(TEMP_TABLES_SCHEMA) == 0) {
+ tempTablesSchema = value;
+ }
+ else if (key.compareToIgnoreCase(TEMP_TABLES_LOCATION) == 0) {
+ tempTablesLocation = value;
+ }
+ }
+
+ /**
+ * Set hplsql.insert.values option
+ */
+ private void setInsertValues(String value) {
+ if (value.compareToIgnoreCase("NATIVE") == 0) {
+ insertValues = InsertValues.NATIVE;
+ }
+ else if (value.compareToIgnoreCase("SELECT") == 0) {
+ insertValues = InsertValues.SELECT;
+ }
+ }
+
+ /**
+ * Set hplsql.temp.tables option
+ */
+ private void setTempTables(String value) {
+ if (value.compareToIgnoreCase("NATIVE") == 0) {
+ tempTables = TempTables.NATIVE;
+ }
+ else if (value.compareToIgnoreCase("MANAGED") == 0) {
+ tempTables = TempTables.MANAGED;
+ }
+ }
+
+ /**
+ * Set error handling approach
+ */
+ private void setOnError(String value) {
+ if (value.compareToIgnoreCase("EXCEPTION") == 0) {
+ onError = OnError.EXCEPTION;
+ }
+ else if (value.compareToIgnoreCase("SETERROR") == 0) {
+ onError = OnError.SETERROR;
+ }
+ if (value.compareToIgnoreCase("STOP") == 0) {
+ onError = OnError.STOP;
+ }
+ }
+
+ /**
+ * Set whether convert or not SQL for the specified connection profile
+ */
+ void setConnectionConvert(String name, String value) {
+ boolean convert = false;
+ if (value.compareToIgnoreCase(TRUE) == 0 || value.compareToIgnoreCase(YES) == 0) {
+ convert = true;
+ }
+ connConvert.put(name, convert);
+ }
+
+ /**
+ * Get whether convert or not SQL for the specified connection profile
+ */
+ boolean getConnectionConvert(String name) {
+ Boolean convert = connConvert.get(name);
+ if (convert != null) {
+ return convert.booleanValue();
+ }
+ return false;
+ }
+
+ /**
+ * Load parameters
+ */
+ public void init() {
+ addResource(SITE_XML);
+ }
+
+ /**
+ * Get the location of the configuration file
+ */
+ public String getLocation() {
+ URL url = getResource(SITE_XML);
+ if (url != null) {
+ return url.toString();
+ }
+ return "";
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/Conn.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Conn.java b/hplsql/src/main/java/org/apache/hive/hplsql/Conn.java
new file mode 100644
index 0000000..828fbc3
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Conn.java
@@ -0,0 +1,243 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Stack;
+import java.sql.DriverManager;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.sql.ResultSet;
+
+public class Conn {
+
+ public enum Type {DB2, HIVE, MYSQL, TERADATA};
+
+ HashMap<String, Stack<Connection>> connections = new HashMap<String, Stack<Connection>>();
+ HashMap<String, String> connStrings = new HashMap<String, String>();
+ HashMap<String, Type> connTypes = new HashMap<String, Type>();
+
+ HashMap<String, ArrayList<String>> connInits = new HashMap<String, ArrayList<String>>();
+ HashMap<String, ArrayList<String>> preSql = new HashMap<String, ArrayList<String>>();
+
+ Exec exec;
+ Timer timer = new Timer();
+ boolean trace = false;
+
+ Conn(Exec e) {
+ exec = e;
+ trace = exec.getTrace();
+ }
+
+ /**
+ * Execute a SQL query
+ */
+ public Query executeQuery(Query query, String connName) {
+ try {
+ Connection conn = getConnection(connName);
+ runPreSql(connName, conn);
+ Statement stmt = conn.createStatement();
+ timer.start();
+ ResultSet rs = stmt.executeQuery(query.sql);
+ timer.stop();
+ query.set(conn, stmt, rs);
+ if (trace) {
+ exec.trace(null, "Query executed successfully (" + timer.format() + ")");
+ }
+ } catch (Exception e) {
+ query.setError(e);
+ }
+ return query;
+ }
+
+ public Query executeQuery(String sql, String connName) {
+ return executeQuery(new Query(sql), connName);
+ }
+
+ /**
+ * Execute a SQL statement
+ */
+ public Query executeSql(String sql, String connName) {
+ Query query = new Query(sql);
+ try {
+ Connection conn = getConnection(connName);
+ runPreSql(connName, conn);
+ Statement stmt = conn.createStatement();
+ ResultSet rs = null;
+ if (stmt.execute(sql)) {
+ rs = stmt.getResultSet();
+ }
+ query.set(conn, stmt, rs);
+ } catch (Exception e) {
+ query.setError(e);
+ }
+ return query;
+ }
+
+ /**
+ * Close the query object
+ */
+ public void closeQuery(Query query, String connName) {
+ query.closeStatement();
+ returnConnection(connName, query.getConnection());
+ }
+
+ /**
+ * Run pre-SQL statements
+ * @throws SQLException
+ */
+ void runPreSql(String connName, Connection conn) throws SQLException {
+ ArrayList<String> sqls = preSql.get(connName);
+ if (sqls != null) {
+ Statement s = conn.createStatement();
+ for (String sql : sqls) {
+ s.execute(sql);
+ }
+ s.close();
+ preSql.remove(connName);
+ }
+ }
+
+ /**
+ * Get a connection
+ * @throws Exception
+ */
+ synchronized Connection getConnection(String connName) throws Exception {
+ Stack<Connection> connStack = connections.get(connName);
+ String connStr = connStrings.get(connName);
+ if (connStr == null) {
+ throw new Exception("Unknown connection profile: " + connName);
+ }
+ if (connStack != null && !connStack.empty()) { // Reuse an existing connection
+ return connStack.pop();
+ }
+ Connection c = openConnection(connStr);
+ ArrayList<String> sqls = connInits.get(connName); // Run initialization statements on the connection
+ if (sqls != null) {
+ Statement s = c.createStatement();
+ for (String sql : sqls) {
+ s.execute(sql);
+ }
+ s.close();
+ }
+ return c;
+ }
+
+ /**
+ * Open a new connection
+ * @throws Exception
+ */
+ Connection openConnection(String connStr) throws Exception {
+ String driver = "org.apache.hadoop.hive.jdbc.HiveDriver";
+ String url = "jdbc:hive://";
+ String usr = "";
+ String pwd = "";
+ if (connStr != null) {
+ String[] c = connStr.split(";");
+ if (c.length >= 1) {
+ driver = c[0];
+ }
+ if (c.length >= 2) {
+ url = c[1];
+ }
+ if (c.length >= 3) {
+ usr = c[2];
+ }
+ if (c.length >= 4) {
+ pwd = c[3];
+ }
+ }
+ Class.forName(driver);
+ timer.start();
+ Connection conn = DriverManager.getConnection(url, usr, pwd);
+ timer.stop();
+ if (trace) {
+ exec.trace(null, "Open connection: " + url + " (" + timer.format() + ")");
+ }
+ return conn;
+ }
+
+ /**
+ * Get the database type by profile name
+ */
+ Conn.Type getTypeByProfile(String name) {
+ return connTypes.get(name);
+ }
+
+ /**
+ * Get the database type by connection string
+ */
+ Conn.Type getType(String connStr) {
+ if (connStr.contains("hive.")) {
+ return Type.HIVE;
+ }
+ else if (connStr.contains("db2.")) {
+ return Type.DB2;
+ }
+ else if (connStr.contains("mysql.")) {
+ return Type.MYSQL;
+ }
+ else if (connStr.contains("teradata.")) {
+ return Type.TERADATA;
+ }
+ return Type.HIVE;
+ }
+
+ /**
+ * Return the connection to the pool
+ */
+ void returnConnection(String name, Connection conn) {
+ if (conn != null) {
+ connections.get(name).push(conn);
+ }
+ }
+
+ /**
+ * Add a new connection string
+ */
+ public void addConnection(String name, String connStr) {
+ connections.put(name, new Stack<Connection>());
+ connStrings.put(name, connStr);
+ connTypes.put(name, getType(connStr));
+ }
+
+ /**
+ * Add initialization statements for the specified connection
+ */
+ public void addConnectionInit(String name, String connInit) {
+ ArrayList<String> a = new ArrayList<String>();
+ String[] sa = connInit.split(";");
+ for (String s : sa) {
+ s = s.trim();
+ if (!s.isEmpty()) {
+ a.add(s);
+ }
+ }
+ connInits.put(name, a);
+ }
+
+ /**
+ * Add SQL statements to be executed before executing the next SQL statement (pre-SQL)
+ */
+ public void addPreSql(String name, ArrayList<String> sql) {
+ preSql.put(name, sql);
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/Converter.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Converter.java b/hplsql/src/main/java/org/apache/hive/hplsql/Converter.java
new file mode 100644
index 0000000..46f98a9
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Converter.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+/**
+ * On-the-fly SQL Converter
+ */
+public class Converter {
+
+ Exec exec;
+ boolean trace = false;
+
+ Converter(Exec e) {
+ exec = e;
+ trace = exec.getTrace();
+ }
+
+ /**
+ * Convert a data type
+ */
+ String dataType(HplsqlParser.DtypeContext type, HplsqlParser.Dtype_lenContext len) {
+ String d = null;
+ if (type.T_VARCHAR2() != null) {
+ d = "STRING";
+ }
+ else if (type.T_NUMBER() != null) {
+ d = "DECIMAL";
+ if (len != null) {
+ d += exec.getText(len);
+ }
+ }
+ if (d != null) {
+ return d;
+ }
+ else if (len != null) {
+ return exec.getText(type, type.getStart(), len.getStop());
+ }
+ return exec.getText(type, type.getStart(), type.getStop());
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/052643cb/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java b/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java
new file mode 100644
index 0000000..30b98ca
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java
@@ -0,0 +1,426 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.List;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hive.hplsql.Var;
+import org.antlr.v4.runtime.ParserRuleContext;
+import org.apache.commons.lang.StringEscapeUtils;
+import org.apache.commons.lang3.tuple.Pair;
+
+public class Copy {
+
+ Exec exec;
+ Timer timer = new Timer();
+ boolean trace = false;
+ boolean info = false;
+
+ long srcSizeInBytes = 0;
+
+ String delimiter = "\t";
+ boolean sqlInsert = false;
+ String sqlInsertName;
+ String targetConn;
+ int batchSize = 1000;
+
+ boolean overwrite = false;
+ boolean delete = false;
+ boolean ignore = false;
+
+ Copy(Exec e) {
+ exec = e;
+ trace = exec.getTrace();
+ info = exec.getInfo();
+ }
+
+ /**
+ * Run COPY command
+ */
+ Integer run(HplsqlParser.Copy_stmtContext ctx) {
+ trace(ctx, "COPY");
+ initOptions(ctx);
+ StringBuilder sql = new StringBuilder();
+ String conn = null;
+ if (ctx.table_name() != null) {
+ String table = evalPop(ctx.table_name()).toString();
+ conn = exec.getObjectConnection(ctx.table_name().getText());
+ sql.append("SELECT * FROM ");
+ sql.append(table);
+ }
+ else {
+ sql.append(evalPop(ctx.select_stmt()).toString());
+ conn = exec.getStatementConnection();
+ if (trace) {
+ trace(ctx, "Statement:\n" + sql);
+ }
+ }
+ Query query = exec.executeQuery(ctx, sql.toString(), conn);
+ if (query.error()) {
+ exec.signal(query);
+ return 1;
+ }
+ exec.setSqlSuccess();
+ try {
+ if (targetConn != null) {
+ copyToTable(ctx, query);
+ }
+ else {
+ copyToFile(ctx, query);
+ }
+ }
+ catch (Exception e) {
+ exec.signal(e);
+ return 1;
+ }
+ finally {
+ exec.closeQuery(query, conn);
+ }
+ return 0;
+ }
+
+ /**
+ * Copy the query results to another table
+ * @throws Exception
+ */
+ void copyToTable(HplsqlParser.Copy_stmtContext ctx, Query query) throws Exception {
+ ResultSet rs = query.getResultSet();
+ if (rs == null) {
+ return;
+ }
+ ResultSetMetaData rm = rs.getMetaData();
+ int cols = rm.getColumnCount();
+ int rows = 0;
+ if (trace) {
+ trace(ctx, "SELECT executed: " + cols + " columns");
+ }
+ Connection conn = exec.getConnection(targetConn);
+ StringBuilder sql = new StringBuilder();
+ sql.append("INSERT INTO " + sqlInsertName + " VALUES (");
+ for (int i = 0; i < cols; i++) {
+ sql.append("?");
+ if (i + 1 < cols) {
+ sql.append(",");
+ }
+ }
+ sql.append(")");
+ PreparedStatement ps = conn.prepareStatement(sql.toString());
+ long start = timer.start();
+ long prev = start;
+ boolean batchOpen = false;
+ while (rs.next()) {
+ for (int i = 1; i <= cols; i++) {
+ ps.setObject(i, rs.getObject(i));
+ }
+ rows++;
+ if (batchSize > 1) {
+ ps.addBatch();
+ batchOpen = true;
+ if (rows % batchSize == 0) {
+ ps.executeBatch();
+ batchOpen = false;
+ }
+ }
+ else {
+ ps.executeUpdate();
+ }
+ if (trace && rows % 100 == 0) {
+ long cur = timer.current();
+ if (cur - prev > 10000) {
+ trace(ctx, "Copying rows: " + rows + " (" + rows/((cur - start)/1000) + " rows/sec)");
+ prev = cur;
+ }
+ }
+ }
+ if (batchOpen) {
+ ps.executeBatch();
+ }
+ ps.close();
+ exec.returnConnection(targetConn, conn);
+ exec.setRowCount(rows);
+ long elapsed = timer.stop();
+ if (trace) {
+ trace(ctx, "COPY completed: " + rows + " row(s), " + timer.format() + ", " + rows/(elapsed/1000) + " rows/sec");
+ }
+ }
+
+ /**
+ * Copy the query results to a file
+ * @throws Exception
+ */
+ void copyToFile(HplsqlParser.Copy_stmtContext ctx, Query query) throws Exception {
+ ResultSet rs = query.getResultSet();
+ if (rs == null) {
+ return;
+ }
+ ResultSetMetaData rm = rs.getMetaData();
+ String filename = null;
+ if (ctx.copy_target().expr() != null) {
+ filename = evalPop(ctx.copy_target().expr()).toString();
+ }
+ else {
+ filename = ctx.copy_target().getText();
+ }
+ byte[] del = delimiter.getBytes();
+ byte[] rowdel = "\n".getBytes();
+ byte[] nullstr = "NULL".getBytes();
+ int cols = rm.getColumnCount();
+ int rows = 0;
+ if (trace) {
+ trace(ctx, "SELECT executed: " + cols + " columns, output file: " + filename);
+ }
+ java.io.File file = new java.io.File(filename);
+ FileOutputStream out = null;
+ try {
+ if (!file.exists()) {
+ file.createNewFile();
+ }
+ out = new FileOutputStream(file, false /*append*/);
+ String col;
+ String sql = "";
+ if (sqlInsert) {
+ sql = "INSERT INTO " + sqlInsertName + " VALUES (";
+ rowdel = ");\n".getBytes();
+ }
+ while (rs.next()) {
+ if (sqlInsert) {
+ out.write(sql.getBytes());
+ }
+ for (int i = 1; i <= cols; i++) {
+ if (i > 1) {
+ out.write(del);
+ }
+ col = rs.getString(i);
+ if (col != null) {
+ if (sqlInsert) {
+ col = Utils.quoteString(col);
+ }
+ out.write(col.getBytes());
+ }
+ else if (sqlInsert) {
+ out.write(nullstr);
+ }
+ }
+ out.write(rowdel);
+ rows++;
+ }
+ exec.setRowCount(rows);
+ }
+ finally {
+ if (out != null) {
+ out.close();
+ }
+ }
+ if (trace) {
+ trace(ctx, "COPY rows: " + rows);
+ }
+ }
+
+ /**
+ * Run COPY FROM LOCAL statement
+ */
+ public Integer runFromLocal(HplsqlParser.Copy_from_local_stmtContext ctx) {
+ trace(ctx, "COPY FROM LOCAL");
+ initFileOptions(ctx.copy_file_option());
+ HashMap<String, Pair<String, Long>> src = new HashMap<String, Pair<String, Long>>();
+ int cnt = ctx.copy_source().size();
+ for (int i = 0; i < cnt; i++) {
+ createLocalFileList(src, evalPop(ctx.copy_source(i)).toString(), null);
+ }
+ String dest = evalPop(ctx.copy_target()).toString();
+ if (info) {
+ info(ctx, "Files to copy: " + src.size() + " (" + Utils.formatSizeInBytes(srcSizeInBytes) + ")");
+ }
+ timer.start();
+ File file = new File();
+ FileSystem fs = null;
+ int succeed = 0;
+ int failed = 0;
+ long copiedSize = 0;
+ try {
+ fs = file.createFs();
+ boolean multi = false;
+ if (src.size() > 1) {
+ multi = true;
+ }
+ for (Map.Entry<String, Pair<String, Long>> i : src.entrySet()) {
+ try {
+ Path s = new Path(i.getKey());
+ Path d = null;
+ if (multi) {
+ String relativePath = i.getValue().getLeft();
+ if (relativePath == null) {
+ d = new Path(dest, s.getName());
+ }
+ else {
+ d = new Path(dest, relativePath + java.io.File.separator + s.getName());
+ }
+ }
+ else {
+ d = new Path(dest);
+ }
+ fs.copyFromLocalFile(delete, overwrite, s, d);
+ succeed++;
+ long size = i.getValue().getRight();
+ copiedSize += size;
+ if (info) {
+ info(ctx, "Copied: " + file.resolvePath(d) + " (" + Utils.formatSizeInBytes(size) + ")");
+ }
+ }
+ catch(IOException e) {
+ failed++;
+ if (!ignore) {
+ throw e;
+ }
+ }
+ }
+ }
+ catch(IOException e) {
+ exec.signal(e);
+ exec.setHostCode(1);
+ return 1;
+ }
+ finally {
+ long elapsed = timer.stop();
+ if (info) {
+ info(ctx, "COPY completed: " + succeed + " succeed, " + failed + " failed, " +
+ timer.format() + ", " + Utils.formatSizeInBytes(copiedSize) + ", " +
+ Utils.formatBytesPerSec(copiedSize, elapsed));
+ }
+ if (failed == 0) {
+ exec.setHostCode(0);
+ }
+ else {
+ exec.setHostCode(1);
+ }
+ file.close();
+ }
+ return 0;
+ }
+
+ /**
+ * Create the list of local files for the specified path (including subdirectories)
+ */
+ void createLocalFileList(HashMap<String, Pair<String, Long>> list, String path, String relativePath) {
+ java.io.File file = new java.io.File(path);
+ if (file.exists()) {
+ if (file.isDirectory()) {
+ for (java.io.File i : file.listFiles()) {
+ if (i.isDirectory()) {
+ String rel = null;
+ if (relativePath == null) {
+ rel = i.getName();
+ }
+ else {
+ rel = relativePath + java.io.File.separator + i.getName();
+ }
+ createLocalFileList(list, i.getAbsolutePath(), rel);
+ }
+ else {
+ long size = i.length();
+ list.put(i.getAbsolutePath(), Pair.of(relativePath, size));
+ srcSizeInBytes += size;
+ }
+ }
+ }
+ else {
+ long size = file.length();
+ list.put(file.getAbsolutePath(), Pair.of(relativePath, size));
+ srcSizeInBytes += size;
+ }
+ }
+ }
+
+ /**
+ * Initialize COPY command options
+ */
+ void initOptions(HplsqlParser.Copy_stmtContext ctx) {
+ int cnt = ctx.copy_option().size();
+ for (int i = 0; i < cnt; i++) {
+ HplsqlParser.Copy_optionContext option = ctx.copy_option(i);
+ if (option.T_DELIMITER() != null) {
+ delimiter = StringEscapeUtils.unescapeJava(evalPop(option.expr()).toString());
+ }
+ else if (option.T_SQLINSERT() != null) {
+ sqlInsert = true;
+ delimiter = ", ";
+ if (option.ident() != null) {
+ sqlInsertName = option.ident().getText();
+ }
+ }
+ else if (option.T_AT() != null) {
+ targetConn = option.ident().getText();
+ sqlInsertName = ctx.copy_target().ident().getText();
+ }
+ else if (option.T_BATCHSIZE() != null) {
+ batchSize = evalPop(option.expr()).intValue();
+ }
+ }
+ }
+
+ /**
+ * Initialize COPY FILE options
+ */
+ void initFileOptions(List<HplsqlParser.Copy_file_optionContext> options) {
+ srcSizeInBytes = 0;
+ for (HplsqlParser.Copy_file_optionContext i : options) {
+ if (i.T_OVERWRITE() != null) {
+ overwrite = true;
+ }
+ else if (i.T_DELETE() != null) {
+ delete = true;
+ }
+ else if (i.T_IGNORE() != null) {
+ ignore = true;
+ }
+ }
+ }
+
+ /**
+ * Evaluate the expression and pop value from the stack
+ */
+ Var evalPop(ParserRuleContext ctx) {
+ exec.visit(ctx);
+ if (!exec.stack.isEmpty()) {
+ return exec.stackPop();
+ }
+ return Var.Empty;
+ }
+
+ /**
+ * Trace and information
+ */
+ public void trace(ParserRuleContext ctx, String message) {
+ exec.trace(ctx, message);
+ }
+
+ public void info(ParserRuleContext ctx, String message) {
+ exec.info(ctx, message);
+ }
+}