You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2010/01/21 11:38:15 UTC
svn commit: r901644 [25/37] - in /hadoop/hive/trunk: ./
ql/src/java/org/apache/hadoop/hive/ql/
ql/src/java/org/apache/hadoop/hive/ql/exec/
ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/
ql/src/java/org/apache/hadoop/hive/ql/history/ ql/src/jav...
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java Thu Jan 21 10:37:58 2010
@@ -38,38 +38,37 @@
import org.apache.hadoop.hive.ql.parse.SemanticException;
/**
- * Implements predicate pushdown. Predicate pushdown is a term borrowed from relational
- * databases even though for Hive it is predicate pushup.
- * The basic idea is to process expressions as early in the plan as possible. The default plan
- * generation adds filters where they are seen but in some instances some of the filter expressions
- * can be pushed nearer to the operator that sees this particular data for the first time.
- * e.g.
- * select a.*, b.*
- * from a join b on (a.col1 = b.col1)
- * where a.col1 > 20 and b.col2 > 40
- *
- * For the above query, the predicates (a.col1 > 20) and (b.col2 > 40), without predicate pushdown,
- * would be evaluated after the join processing has been done. Suppose the two predicates filter out
- * most of the rows from a and b, the join is unnecessarily processing these rows.
- * With predicate pushdown, these two predicates will be processed before the join.
+ * Implements predicate pushdown. Predicate pushdown is a term borrowed from
+ * relational databases even though for Hive it is predicate pushup. The basic
+ * idea is to process expressions as early in the plan as possible. The default
+ * plan generation adds filters where they are seen but in some instances some
+ * of the filter expressions can be pushed nearer to the operator that sees this
+ * particular data for the first time. e.g. select a.*, b.* from a join b on
+ * (a.col1 = b.col1) where a.col1 > 20 and b.col2 > 40
*
- * Predicate pushdown is enabled by setting hive.optimize.ppd to true. It is disable by default.
+ * For the above query, the predicates (a.col1 > 20) and (b.col2 > 40), without
+ * predicate pushdown, would be evaluated after the join processing has been
+ * done. Suppose the two predicates filter out most of the rows from a and b,
+ * the join is unnecessarily processing these rows. With predicate pushdown,
+ * these two predicates will be processed before the join.
*
- * The high-level algorithm is describe here
- * - An operator is processed after all its children have been processed
- * - An operator processes its own predicates and then merges (conjunction) with the processed
- * predicates of its children. In case of multiple children, there are combined using
- * disjunction (OR).
- * - A predicate expression is processed for an operator using the following steps
- * - If the expr is a constant then it is a candidate for predicate pushdown
- * - If the expr is a col reference then it is a candidate and its alias is noted
- * - If the expr is an index and both the array and index expr are treated as children
- * - If the all child expr are candidates for pushdown and all of the expression reference
- * only one alias from the operator's RowResolver then the current expression is also a
- * candidate
- * One key thing to note is that some operators (Select, ReduceSink, GroupBy, Join etc) change
- * the columns as data flows through them. In such cases the column references are replaced by
- * the corresponding expression in the input data.
+ * Predicate pushdown is enabled by setting hive.optimize.ppd to true. It is
+ * disable by default.
+ *
+ * The high-level algorithm is describe here - An operator is processed after
+ * all its children have been processed - An operator processes its own
+ * predicates and then merges (conjunction) with the processed predicates of its
+ * children. In case of multiple children, there are combined using disjunction
+ * (OR). - A predicate expression is processed for an operator using the
+ * following steps - If the expr is a constant then it is a candidate for
+ * predicate pushdown - If the expr is a col reference then it is a candidate
+ * and its alias is noted - If the expr is an index and both the array and index
+ * expr are treated as children - If the all child expr are candidates for
+ * pushdown and all of the expression reference only one alias from the
+ * operator's RowResolver then the current expression is also a candidate One
+ * key thing to note is that some operators (Select, ReduceSink, GroupBy, Join
+ * etc) change the columns as data flows through them. In such cases the column
+ * references are replaced by the corresponding expression in the input data.
*/
public class PredicatePushDown implements Transform {
@@ -78,12 +77,12 @@
@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
- this.pGraphContext = pctx;
- this.opToParseCtxMap = pGraphContext.getOpParseCtx();
+ pGraphContext = pctx;
+ opToParseCtxMap = pGraphContext.getOpParseCtx();
// create a the context for walking operators
OpWalkerInfo opWalkerInfo = new OpWalkerInfo(opToParseCtxMap);
-
+
Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
opRules.put(new RuleRegExp("R1", "FIL%"), OpProcFactory.getFilterProc());
opRules.put(new RuleRegExp("R3", "JOIN%"), OpProcFactory.getJoinProc());
@@ -92,10 +91,12 @@
opRules.put(new RuleRegExp("R6", "SCR%"), OpProcFactory.getSCRProc());
opRules.put(new RuleRegExp("R6", "LIM%"), OpProcFactory.getLIMProc());
- // The dispatcher fires the processor corresponding to the closest matching rule and passes the context along
- Dispatcher disp = new DefaultRuleDispatcher(OpProcFactory.getDefaultProc(), opRules, opWalkerInfo);
+ // The dispatcher fires the processor corresponding to the closest matching
+ // rule and passes the context along
+ Dispatcher disp = new DefaultRuleDispatcher(OpProcFactory.getDefaultProc(),
+ opRules, opWalkerInfo);
GraphWalker ogw = new DefaultGraphWalker(disp);
-
+
// Create a list of topop nodes
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pGraphContext.getTopOps().values());
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java Thu Jan 21 10:37:58 2010
@@ -26,20 +26,22 @@
public class AddResourceProcessor implements CommandProcessor {
- public static final Log LOG = LogFactory.getLog(AddResourceProcessor.class.getName());
+ public static final Log LOG = LogFactory.getLog(AddResourceProcessor.class
+ .getName());
public static final LogHelper console = new LogHelper(LOG);
public void init() {
}
-
+
public int run(String command) {
SessionState ss = SessionState.get();
String[] tokens = command.split("\\s+");
SessionState.ResourceType t;
- if (tokens.length < 2 || (t = SessionState.find_resource_type(tokens[0])) == null) {
- console.printError("Usage: add [" +
- StringUtils.join(SessionState.ResourceType.values(),"|") +
- "] <value> [<value>]*");
+ if (tokens.length < 2
+ || (t = SessionState.find_resource_type(tokens[0])) == null) {
+ console.printError("Usage: add ["
+ + StringUtils.join(SessionState.ResourceType.values(), "|")
+ + "] <value> [<value>]*");
return 1;
}
for (int i = 1; i < tokens.length; i++) {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java Thu Jan 21 10:37:58 2010
@@ -20,5 +20,6 @@
public interface CommandProcessor {
public void init();
+
public int run(String command);
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java Thu Jan 21 10:37:58 2010
@@ -18,16 +18,16 @@
package org.apache.hadoop.hive.ql.processors;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.ql.Driver;
import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.session.SessionState;
public class CommandProcessorFactory {
-
+
public static CommandProcessor get(String cmd) {
String cmdl = cmd.toLowerCase();
- if(cmdl.equals("set")) {
+ if (cmdl.equals("set")) {
return new SetProcessor();
} else if (cmdl.equals("dfs")) {
SessionState ss = SessionState.get();
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java Thu Jan 21 10:37:58 2010
@@ -24,35 +24,36 @@
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
-
public class DeleteResourceProcessor implements CommandProcessor {
- public static final Log LOG = LogFactory.getLog(DeleteResourceProcessor.class.getName());
+ public static final Log LOG = LogFactory.getLog(DeleteResourceProcessor.class
+ .getName());
public static final LogHelper console = new LogHelper(LOG);
public void init() {
}
-
+
public int run(String command) {
SessionState ss = SessionState.get();
String[] tokens = command.split("\\s+");
SessionState.ResourceType t;
- if(tokens.length < 1 || (t = SessionState.find_resource_type(tokens[0])) == null) {
- console.printError("Usage: delete [" +
- StringUtils.join(SessionState.ResourceType.values(),"|") +
- "] <value> [<value>]*");
+ if (tokens.length < 1
+ || (t = SessionState.find_resource_type(tokens[0])) == null) {
+ console.printError("Usage: delete ["
+ + StringUtils.join(SessionState.ResourceType.values(), "|")
+ + "] <value> [<value>]*");
return 1;
}
if (tokens.length >= 2) {
- for(int i = 1; i < tokens.length; i++) {
+ for (int i = 1; i < tokens.length; i++) {
ss.delete_resource(t, tokens[i]);
}
} else {
ss.delete_resource(t);
}
-
+
return 0;
}
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java Thu Jan 21 10:37:58 2010
@@ -32,7 +32,7 @@
public static final Log LOG = LogFactory.getLog(DfsProcessor.class.getName());
public static final LogHelper console = new LogHelper(LOG);
- private FsShell dfs;
+ private final FsShell dfs;
public DfsProcessor(Configuration conf) {
dfs = new FsShell(conf);
@@ -40,7 +40,7 @@
public void init() {
}
-
+
public int run(String command) {
String[] tokens = command.split("\\s+");
@@ -48,12 +48,12 @@
SessionState ss = SessionState.get();
PrintStream oldOut = System.out;
- if(ss != null && ss.out != null) {
+ if (ss != null && ss.out != null) {
System.setOut(ss.out);
}
int ret = dfs.run(tokens);
- if(ret != 0) {
+ if (ret != 0) {
console.printError("Command failed with exit code = " + ret);
}
@@ -61,8 +61,9 @@
return (ret);
} catch (Exception e) {
- console.printError("Exception raised from DFSShell.run " + e.getLocalizedMessage(),
- org.apache.hadoop.util.StringUtils.stringifyException(e));
+ console.printError("Exception raised from DFSShell.run "
+ + e.getLocalizedMessage(), org.apache.hadoop.util.StringUtils
+ .stringifyException(e));
return 1;
}
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java Thu Jan 21 10:37:58 2010
@@ -18,7 +18,7 @@
package org.apache.hadoop.hive.ql.processors;
-import java.util.*;
+import java.util.Properties;
import org.apache.hadoop.hive.ql.session.SessionState;
@@ -27,64 +27,67 @@
private static String prefix = "set: ";
public static boolean getBoolean(String value) {
- if(value.equals("on") || value.equals("true"))
+ if (value.equals("on") || value.equals("true")) {
return true;
- if(value.equals("off") || value.equals("false"))
+ }
+ if (value.equals("off") || value.equals("false")) {
return false;
- throw new IllegalArgumentException(prefix + "'" + value + "' is not a boolean");
+ }
+ throw new IllegalArgumentException(prefix + "'" + value
+ + "' is not a boolean");
}
private void dumpOptions(Properties p) {
SessionState ss = SessionState.get();
ss.out.println("silent=" + (ss.getIsSilent() ? "on" : "off"));
- for(Object one: p.keySet()) {
- String oneProp = (String)one;
+ for (Object one : p.keySet()) {
+ String oneProp = (String) one;
String oneValue = p.getProperty(oneProp);
- ss.out.println(oneProp+"="+oneValue);
+ ss.out.println(oneProp + "=" + oneValue);
}
}
private void dumpOption(Properties p, String s) {
SessionState ss = SessionState.get();
-
- if(p.getProperty(s) != null) {
- ss.out.println(s+"="+p.getProperty(s));
+
+ if (p.getProperty(s) != null) {
+ ss.out.println(s + "=" + p.getProperty(s));
} else {
- ss.out.println(s+" is undefined");
+ ss.out.println(s + " is undefined");
}
}
public void init() {
}
-
+
public int run(String command) {
SessionState ss = SessionState.get();
String nwcmd = command.trim();
- if(nwcmd.equals("")) {
+ if (nwcmd.equals("")) {
dumpOptions(ss.getConf().getChangedProperties());
return 0;
}
- if(nwcmd.equals("-v")) {
+ if (nwcmd.equals("-v")) {
dumpOptions(ss.getConf().getAllProperties());
return 0;
}
- String[] part = new String [2];
+ String[] part = new String[2];
int eqIndex = nwcmd.indexOf('=');
- if(eqIndex == -1) {
+ if (eqIndex == -1) {
// no equality sign - print the property out
dumpOption(ss.getConf().getAllProperties(), nwcmd);
return (0);
- } else if (eqIndex == nwcmd.length()-1) {
- part[0] = nwcmd.substring(0, nwcmd.length()-1);
+ } else if (eqIndex == nwcmd.length() - 1) {
+ part[0] = nwcmd.substring(0, nwcmd.length() - 1);
part[1] = "";
} else {
part[0] = nwcmd.substring(0, eqIndex).trim();
- part[1] = nwcmd.substring(eqIndex+1).trim();
+ part[1] = nwcmd.substring(eqIndex + 1).trim();
}
try {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java Thu Jan 21 10:37:58 2010
@@ -18,29 +18,33 @@
package org.apache.hadoop.hive.ql.session;
-import java.io.*;
-import java.util.*;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.net.URL;
+import java.util.Calendar;
+import java.util.GregorianCalendar;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.log4j.*;
-import java.net.URL;
-
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.history.HiveHistory;
-
-import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.PropertyConfigurator;
/**
* SessionState encapsulates common data associated with a session
- *
- * Also provides support for a thread static session object that can
- * be accessed from any point in the code to interact with the user
- * and to retrieve configuration information
+ *
+ * Also provides support for a thread static session object that can be accessed
+ * from any point in the code to interact with the user and to retrieve
+ * configuration information
*/
public class SessionState {
@@ -55,7 +59,7 @@
protected boolean isSilent;
/*
- * HiveHistory Object
+ * HiveHistory Object
*/
protected HiveHistory hiveHist;
/**
@@ -70,8 +74,9 @@
*/
private String commandType;
-
- public HiveConf getConf() { return conf; }
+ public HiveConf getConf() {
+ return conf;
+ }
public void setConf(HiveConf conf) {
this.conf = conf;
@@ -89,7 +94,7 @@
this(null);
}
- public SessionState (HiveConf conf) {
+ public SessionState(HiveConf conf) {
this.conf = conf;
}
@@ -102,7 +107,6 @@
return (conf.getVar(HiveConf.ConfVars.HIVEQUERYSTRING));
}
-
public String getQueryId() {
return (conf.getVar(HiveConf.ConfVars.HIVEQUERYID));
}
@@ -113,15 +117,15 @@
/**
* Singleton Session object per thread.
- *
+ *
**/
- private static ThreadLocal<SessionState> tss = new ThreadLocal<SessionState> ();
+ private static ThreadLocal<SessionState> tss = new ThreadLocal<SessionState>();
/**
* start a new session and set it to current session
*/
public static SessionState start(HiveConf conf) {
- SessionState ss = new SessionState (conf);
+ SessionState ss = new SessionState(conf);
ss.getConf().setVar(HiveConf.ConfVars.HIVESESSIONID, makeSessionId());
ss.hiveHist = new HiveHistory(ss);
tss.set(ss);
@@ -129,18 +133,20 @@
}
/**
- * set current session to existing session object
- * if a thread is running multiple sessions - it must call this method with the new
- * session object when switching from one session to another
+ * set current session to existing session object if a thread is running
+ * multiple sessions - it must call this method with the new session object
+ * when switching from one session to another
*/
public static SessionState start(SessionState startSs) {
tss.set(startSs);
- if(StringUtils.isEmpty(startSs.getConf().getVar(HiveConf.ConfVars.HIVESESSIONID))) {
- startSs.getConf().setVar(HiveConf.ConfVars.HIVESESSIONID, makeSessionId());
+ if (StringUtils.isEmpty(startSs.getConf().getVar(
+ HiveConf.ConfVars.HIVESESSIONID))) {
+ startSs.getConf()
+ .setVar(HiveConf.ConfVars.HIVESESSIONID, makeSessionId());
}
- if (startSs.hiveHist == null){
+ if (startSs.hiveHist == null) {
startSs.hiveHist = new HiveHistory(startSs);
}
return startSs;
@@ -153,34 +159,32 @@
return tss.get();
}
-
/**
* get hiveHitsory object which does structured logging
+ *
* @return The hive history object
*/
- public HiveHistory getHiveHistory(){
+ public HiveHistory getHiveHistory() {
return hiveHist;
}
-
private static String makeSessionId() {
GregorianCalendar gc = new GregorianCalendar();
String userid = System.getProperty("user.name");
- return userid + "_" +
- String.format("%1$4d%2$02d%3$02d%4$02d%5$02d", gc.get(Calendar.YEAR),
- gc.get(Calendar.MONTH) + 1,
- gc.get(Calendar.DAY_OF_MONTH),
- gc.get(Calendar.HOUR_OF_DAY),
- gc.get(Calendar.MINUTE));
+ return userid
+ + "_"
+ + String.format("%1$4d%2$02d%3$02d%4$02d%5$02d", gc.get(Calendar.YEAR),
+ gc.get(Calendar.MONTH) + 1, gc.get(Calendar.DAY_OF_MONTH), gc
+ .get(Calendar.HOUR_OF_DAY), gc.get(Calendar.MINUTE));
}
public static final String HIVE_L4J = "hive-log4j.properties";
- public static void initHiveLog4j () {
+ public static void initHiveLog4j() {
// allow hive log4j to override any normal initialized one
URL hive_l4j = SessionState.class.getClassLoader().getResource(HIVE_L4J);
- if(hive_l4j == null) {
+ if (hive_l4j == null) {
System.out.println(HIVE_L4J + " not found");
} else {
LogManager.resetConfiguration();
@@ -189,14 +193,17 @@
}
/**
- * This class provides helper routines to emit informational and error messages to the user
- * and log4j files while obeying the current session's verbosity levels.
- *
- * NEVER write directly to the SessionStates standard output other than to emit result data
- * DO use printInfo and printError provided by LogHelper to emit non result data strings
- *
- * It is perfectly acceptable to have global static LogHelper objects (for example - once per module)
- * LogHelper always emits info/error to current session as required.
+ * This class provides helper routines to emit informational and error
+ * messages to the user and log4j files while obeying the current session's
+ * verbosity levels.
+ *
+ * NEVER write directly to the SessionStates standard output other than to
+ * emit result data DO use printInfo and printError provided by LogHelper to
+ * emit non result data strings
+ *
+ * It is perfectly acceptable to have global static LogHelper objects (for
+ * example - once per module) LogHelper always emits info/error to current
+ * session as required.
*/
public static class LogHelper {
@@ -233,7 +240,7 @@
}
public void printInfo(String info, String detail) {
- if(!getIsSilent()) {
+ if (!getIsSilent()) {
getErrStream().println(info);
}
LOG.info(info + StringUtils.defaultString(detail));
@@ -250,11 +257,12 @@
}
private static LogHelper _console;
+
/**
* initialize or retrieve console object for SessionState
*/
public static LogHelper getConsole() {
- if(_console == null) {
+ if (_console == null) {
Log LOG = LogFactory.getLog("SessionState");
_console = new LogHelper(LOG);
}
@@ -267,15 +275,16 @@
Configuration conf = (ss == null) ? new Configuration() : ss.getConf();
try {
- if(Utilities.realFile(newFile, conf) != null)
+ if (Utilities.realFile(newFile, conf) != null) {
return newFile;
- else {
+ } else {
console.printError(newFile + " does not exist");
return null;
}
} catch (IOException e) {
- console.printError("Unable to validate " + newFile + "\nException: " + e.getMessage(),
- "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+ console.printError("Unable to validate " + newFile + "\nException: "
+ + e.getMessage(), "\n"
+ + org.apache.hadoop.util.StringUtils.stringifyException(e));
return null;
}
}
@@ -289,8 +298,9 @@
console.printInfo("Added " + newJar + " to class path");
return true;
} catch (Exception e) {
- console.printError("Unable to register " + newJar + "\nException: " + e.getMessage(),
- "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+ console.printError("Unable to register " + newJar + "\nException: "
+ + e.getMessage(), "\n"
+ + org.apache.hadoop.util.StringUtils.stringifyException(e));
return false;
}
}
@@ -302,39 +312,54 @@
console.printInfo("Deleted " + jarsToUnregister + " from class path");
return true;
} catch (Exception e) {
- console.printError("Unable to unregister " + jarsToUnregister + "\nException: " + e.getMessage(),
- "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+ console.printError("Unable to unregister " + jarsToUnregister
+ + "\nException: " + e.getMessage(), "\n"
+ + org.apache.hadoop.util.StringUtils.stringifyException(e));
return false;
}
}
public static interface ResourceHook {
public String preHook(Set<String> cur, String s);
+
public boolean postHook(Set<String> cur, String s);
}
public static enum ResourceType {
- FILE(new ResourceHook () {
- public String preHook(Set<String> cur, String s) { return validateFile(cur, s); }
- public boolean postHook(Set<String> cur, String s) { return true; }
- }),
-
- JAR(new ResourceHook () {
- public String preHook(Set<String> cur, String s) {
- String newJar = validateFile(cur, s);
- if(newJar != null) {
- return (registerJar(newJar) ? newJar : null);
- } else {
- return null;
- }
+ FILE(new ResourceHook() {
+ public String preHook(Set<String> cur, String s) {
+ return validateFile(cur, s);
+ }
+
+ public boolean postHook(Set<String> cur, String s) {
+ return true;
+ }
+ }),
+
+ JAR(new ResourceHook() {
+ public String preHook(Set<String> cur, String s) {
+ String newJar = validateFile(cur, s);
+ if (newJar != null) {
+ return (registerJar(newJar) ? newJar : null);
+ } else {
+ return null;
}
- public boolean postHook(Set<String> cur, String s) { return unregisterJar(s); }
- }),
+ }
+
+ public boolean postHook(Set<String> cur, String s) {
+ return unregisterJar(s);
+ }
+ }),
+
+ ARCHIVE(new ResourceHook() {
+ public String preHook(Set<String> cur, String s) {
+ return validateFile(cur, s);
+ }
- ARCHIVE(new ResourceHook () {
- public String preHook(Set<String> cur, String s) { return validateFile(cur, s); }
- public boolean postHook(Set<String> cur, String s) { return true; }
- });
+ public boolean postHook(Set<String> cur, String s) {
+ return true;
+ }
+ });
public ResourceHook hook;
@@ -353,8 +378,8 @@
}
// try singular
- if(s.endsWith("S")) {
- s = s.substring(0, s.length()-1);
+ if (s.endsWith("S")) {
+ s = s.substring(0, s.length() - 1);
} else {
return null;
}
@@ -366,44 +391,46 @@
return null;
}
- private HashMap<ResourceType, HashSet<String>> resource_map = new HashMap<ResourceType, HashSet<String>> ();
+ private final HashMap<ResourceType, HashSet<String>> resource_map = new HashMap<ResourceType, HashSet<String>>();
public void add_resource(ResourceType t, String value) {
- if(resource_map.get(t) == null) {
- resource_map.put(t, new HashSet<String> ());
+ if (resource_map.get(t) == null) {
+ resource_map.put(t, new HashSet<String>());
}
String fnlVal = value;
- if(t.hook != null) {
+ if (t.hook != null) {
fnlVal = t.hook.preHook(resource_map.get(t), value);
- if(fnlVal == null)
+ if (fnlVal == null) {
return;
+ }
}
resource_map.get(t).add(fnlVal);
}
public boolean delete_resource(ResourceType t, String value) {
- if(resource_map.get(t) == null) {
+ if (resource_map.get(t) == null) {
return false;
}
- if(t.hook != null) {
- if(!t.hook.postHook(resource_map.get(t), value))
+ if (t.hook != null) {
+ if (!t.hook.postHook(resource_map.get(t), value)) {
return false;
+ }
}
return (resource_map.get(t).remove(value));
}
public Set<String> list_resource(ResourceType t, List<String> filter) {
- if(resource_map.get(t) == null) {
+ if (resource_map.get(t) == null) {
return null;
}
Set<String> orig = resource_map.get(t);
- if(filter == null) {
+ if (filter == null) {
return orig;
} else {
- Set<String> fnl = new HashSet<String> ();
- for(String one: orig) {
- if(filter.contains(one)) {
+ Set<String> fnl = new HashSet<String>();
+ for (String one : orig) {
+ if (filter.contains(one)) {
fnl.add(one);
}
}
@@ -412,11 +439,11 @@
}
public void delete_resource(ResourceType t) {
- if(resource_map.get(t) != null) {
- for(String value : resource_map.get(t)) {
+ if (resource_map.get(t) != null) {
+ for (String value : resource_map.get(t)) {
delete_resource(t, value);
}
- resource_map.remove (t);
+ resource_map.remove(t);
}
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java Thu Jan 21 10:37:58 2010
@@ -16,7 +16,6 @@
* limitations under the License.
*/
-
package org.apache.hadoop.hive.ql.tools;
import java.io.IOException;
@@ -29,9 +28,9 @@
import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
+import org.apache.hadoop.hive.ql.lib.GraphWalker;
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
-import org.apache.hadoop.hive.ql.lib.GraphWalker;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
import org.apache.hadoop.hive.ql.parse.ASTNode;
@@ -42,13 +41,12 @@
/**
*
- * This class prints out the lineage info.
- * It takes sql as input and prints lineage info.
- * Currently this prints only input and output tables for a given sql.
- * Later we can expand to add join tables etc.
- *
+ * This class prints out the lineage info. It takes sql as input and prints
+ * lineage info. Currently this prints only input and output tables for a given
+ * sql. Later we can expand to add join tables etc.
+ *
*/
-public class LineageInfo implements NodeProcessor {
+public class LineageInfo implements NodeProcessor {
/**
* Stores input tables in sql
@@ -57,11 +55,11 @@
/**
* Stores output tables in sql
*/
- TreeSet<String> OutputTableList= new TreeSet<String>();
+ TreeSet<String> OutputTableList = new TreeSet<String>();
/**
*
- * @return java.util.TreeSet
+ * @return java.util.TreeSet
*/
public TreeSet<String> getInputTableList() {
return inputTableList;
@@ -77,18 +75,18 @@
/**
* Implements the process method for the NodeProcessor interface.
*/
- public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs)
- throws SemanticException {
- ASTNode pt = (ASTNode)nd;
+ public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
+ Object... nodeOutputs) throws SemanticException {
+ ASTNode pt = (ASTNode) nd;
switch (pt.getToken().getType()) {
case HiveParser.TOK_TAB:
- OutputTableList.add(pt.getChild(0).getText()) ;
+ OutputTableList.add(pt.getChild(0).getText());
break;
case HiveParser.TOK_TABREF:
- String table_name = ((ASTNode)pt.getChild(0)).getText();
+ String table_name = ((ASTNode) pt.getChild(0)).getText();
inputTableList.add(table_name);
break;
}
@@ -96,14 +94,16 @@
}
/**
- * parses given query and gets the lineage info.
+ * parses given query and gets the lineage info.
+ *
* @param query
* @throws ParseException
*/
- public void getLineageInfo(String query) throws ParseException, SemanticException {
+ public void getLineageInfo(String query) throws ParseException,
+ SemanticException {
/*
- * Get the AST tree
+ * Get the AST tree
*/
ParseDriver pd = new ParseDriver();
ASTNode tree = pd.parse(query);
@@ -118,11 +118,13 @@
inputTableList.clear();
OutputTableList.clear();
- // create a walker which walks the tree in a DFS manner while maintaining the operator stack. The dispatcher
+ // create a walker which walks the tree in a DFS manner while maintaining
+ // the operator stack. The dispatcher
// generates the plan from the operator tree
Map<Rule, NodeProcessor> rules = new LinkedHashMap<Rule, NodeProcessor>();
- // The dispatcher fires the processor corresponding to the closest matching rule and passes the context along
+ // The dispatcher fires the processor corresponding to the closest matching
+ // rule and passes the context along
Dispatcher disp = new DefaultRuleDispatcher(this, rules, null);
GraphWalker ogw = new DefaultGraphWalker(disp);
@@ -133,7 +135,7 @@
}
public static void main(String[] args) throws IOException, ParseException,
- SemanticException {
+ SemanticException {
String query = args[0];
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java Thu Jan 21 10:37:58 2010
@@ -29,11 +29,7 @@
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
-
-@description(
- name = "max",
- value = "_FUNC_(expr) - Returns the maximum value of expr"
- )
+@description(name = "max", value = "_FUNC_(expr) - Returns the maximum value of expr")
public class UDAFMax extends UDAF {
static public class MaxShortEvaluator implements UDAFEvaluator {
@@ -270,5 +266,4 @@
}
}
-
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java Thu Jan 21 10:37:58 2010
@@ -29,10 +29,7 @@
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
-@description(
- name = "min",
- value = "_FUNC_(expr) - Returns the minimum value of expr"
- )
+@description(name = "min", value = "_FUNC_(expr) - Returns the minimum value of expr")
public class UDAFMin extends UDAF {
static public class MinShortEvaluator implements UDAFEvaluator {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFWrongArgLengthForTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFWrongArgLengthForTestCase.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFWrongArgLengthForTestCase.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFWrongArgLengthForTestCase.java Thu Jan 21 10:37:58 2010
@@ -23,42 +23,42 @@
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
-
public class UDAFWrongArgLengthForTestCase extends UDAF {
- static public class UDAFWrongArgLengthForTestCaseEvaluator implements UDAFEvaluator {
-
+ static public class UDAFWrongArgLengthForTestCaseEvaluator implements
+ UDAFEvaluator {
+
private long mCount;
-
+
public UDAFWrongArgLengthForTestCaseEvaluator() {
super();
init();
}
-
+
public void init() {
mCount = 0;
}
-
+
Text emptyText = new Text();
-
+
public boolean iterate(Object o) {
if (o != null && !emptyText.equals(o)) {
- mCount ++;
+ mCount++;
}
return true;
}
-
+
public LongWritable terminatePartial() {
return new LongWritable(mCount);
}
-
+
public boolean merge() {
return true;
}
-
+
public LongWritable terminate() {
return new LongWritable(mCount);
}
}
-
+
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAbs.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAbs.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAbs.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAbs.java Thu Jan 21 10:37:58 2010
@@ -24,48 +24,43 @@
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
-@description(
- name = "abs",
- value = "_FUNC_(x) - returns the absolute value of x",
- extended = "Example:\n" +
- " > SELECT _FUNC_(0) FROM src LIMIT 1;\n" +
- " 0\n" +
- " > SELECT _FUNC_(-5) FROM src LIMIT 1;\n" +
- " 5"
- )
-public class UDFAbs extends UDF {
-
- private DoubleWritable resultDouble = new DoubleWritable();
- private LongWritable resultLong = new LongWritable();
- private IntWritable resultInt = new IntWritable();
-
+@description(name = "abs", value = "_FUNC_(x) - returns the absolute value of x", extended = "Example:\n"
+ + " > SELECT _FUNC_(0) FROM src LIMIT 1;\n"
+ + " 0\n"
+ + " > SELECT _FUNC_(-5) FROM src LIMIT 1;\n" + " 5")
+public class UDFAbs extends UDF {
+
+ private final DoubleWritable resultDouble = new DoubleWritable();
+ private final LongWritable resultLong = new LongWritable();
+ private final IntWritable resultInt = new IntWritable();
+
public DoubleWritable evaluate(DoubleWritable n) {
if (n == null) {
return null;
}
-
+
resultDouble.set(Math.abs(n.get()));
-
+
return resultDouble;
}
-
+
public LongWritable evaluate(LongWritable n) {
if (n == null) {
return null;
}
-
+
resultLong.set(Math.abs(n.get()));
-
+
return resultLong;
}
-
+
public IntWritable evaluate(IntWritable n) {
if (n == null) {
return null;
}
-
+
resultInt.set(Math.abs(n.get()));
-
+
return resultInt;
}
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAcos.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAcos.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAcos.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAcos.java Thu Jan 21 10:37:58 2010
@@ -24,29 +24,23 @@
import org.apache.hadoop.hive.ql.exec.description;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
-@description(
- name = "acos",
- value = "_FUNC_(x) - returns the arc cosine of x if -1<=x<=1 or " +
- "NULL otherwise",
- extended = "Example:\n" +
- " > SELECT _FUNC_(1) FROM src LIMIT 1;\n" +
- " 0\n" +
- " > SELECT _FUNC_(2) FROM src LIMIT 1;\n" +
- " NULL"
- )
+@description(name = "acos", value = "_FUNC_(x) - returns the arc cosine of x if -1<=x<=1 or "
+ + "NULL otherwise", extended = "Example:\n"
+ + " > SELECT _FUNC_(1) FROM src LIMIT 1;\n" + " 0\n"
+ + " > SELECT _FUNC_(2) FROM src LIMIT 1;\n" + " NULL")
public class UDFAcos extends UDF {
private static Log LOG = LogFactory.getLog(UDFAcos.class.getName());
DoubleWritable result = new DoubleWritable();
-
+
public UDFAcos() {
}
/**
* Take Arc Cosine of a in radians.
*/
- public DoubleWritable evaluate(DoubleWritable a) {
+ public DoubleWritable evaluate(DoubleWritable a) {
if (a == null) {
return null;
} else {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAscii.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAscii.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAscii.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAscii.java Thu Jan 21 10:37:58 2010
@@ -23,32 +23,27 @@
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
-@description(
- name = "ascii",
- value = "_FUNC_(str) - returns the numeric value of the first character" +
- " of str",
- extended = "Returns 0 if str is empty or NULL if str is NULL\n" +
- "Example:\n" +
- " > SELECT _FUNC_('222') FROM src LIMIT 1;" +
- " 50\n" +
- " > SELECT _FUNC_(2) FROM src LIMIT 1;\n" +
- " 50"
- )
-public class UDFAscii extends UDF {
-
- private IntWritable result = new IntWritable();
-
+@description(name = "ascii", value = "_FUNC_(str) - returns the numeric value of the first character"
+ + " of str", extended = "Returns 0 if str is empty or NULL if str is NULL\n"
+ + "Example:\n"
+ + " > SELECT _FUNC_('222') FROM src LIMIT 1;"
+ + " 50\n"
+ + " > SELECT _FUNC_(2) FROM src LIMIT 1;\n" + " 50")
+public class UDFAscii extends UDF {
+
+ private final IntWritable result = new IntWritable();
+
public IntWritable evaluate(Text s) {
if (s == null) {
return null;
}
-
- if(s.getLength() > 0) {
- result.set(s.getBytes()[0]);
+
+ if (s.getLength() > 0) {
+ result.set(s.getBytes()[0]);
} else {
result.set(0);
}
-
+
return result;
}
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAsin.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAsin.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAsin.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAsin.java Thu Jan 21 10:37:58 2010
@@ -24,28 +24,23 @@
import org.apache.hadoop.hive.ql.exec.description;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
-@description(
- name = "asin",
- value = "_FUNC_(x) - returns the arc sine of x if -1<=x<=1 or NULL otherwise",
- extended = "Example:\n" +
- " > SELECT _FUNC_(0) FROM src LIMIT 1;\n" +
- " 0\n" +
- " > SELECT _FUNC_(2) FROM src LIMIT 1;\n" +
- " NULL"
- )
+@description(name = "asin", value = "_FUNC_(x) - returns the arc sine of x if -1<=x<=1 or NULL otherwise", extended = "Example:\n"
+ + " > SELECT _FUNC_(0) FROM src LIMIT 1;\n"
+ + " 0\n"
+ + " > SELECT _FUNC_(2) FROM src LIMIT 1;\n" + " NULL")
public class UDFAsin extends UDF {
private static Log LOG = LogFactory.getLog(UDFAsin.class.getName());
DoubleWritable result = new DoubleWritable();
-
+
public UDFAsin() {
}
/**
* Take Arc Sine of a in radians.
*/
- public DoubleWritable evaluate(DoubleWritable a) {
+ public DoubleWritable evaluate(DoubleWritable a) {
if (a == null) {
return null;
} else {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseBitOP.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseBitOP.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseBitOP.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseBitOP.java Thu Jan 21 10:37:58 2010
@@ -25,8 +25,8 @@
import org.apache.hadoop.io.LongWritable;
/**
- * Base class for numeric operators like +, -, / etc. All these operators
- * share a common method resolver (NumericOpMethodResolver).
+ * Base class for numeric operators like +, -, / etc. All these operators share
+ * a common method resolver (NumericOpMethodResolver).
*/
public abstract class UDFBaseBitOP extends UDF {
@@ -41,5 +41,5 @@
protected ShortWritable shortWritable = new ShortWritable();
protected IntWritable intWritable = new IntWritable();
protected LongWritable longWritable = new LongWritable();
-
+
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java Thu Jan 21 10:37:58 2010
@@ -26,13 +26,13 @@
public abstract class UDFBaseCompare extends UDF {
/**
- * This constructor sets the resolver to be used for comparison operators.
- * See {@link org.apache.hadoop.hive.ql.exec.UDFMethodResolver}
+ * This constructor sets the resolver to be used for comparison operators. See
+ * {@link org.apache.hadoop.hive.ql.exec.UDFMethodResolver}
*/
public UDFBaseCompare() {
super(null);
setResolver(new ComparisonOpMethodResolver(this.getClass()));
}
- public abstract BooleanWritable evaluate(DoubleWritable a, DoubleWritable b);
+ public abstract BooleanWritable evaluate(DoubleWritable a, DoubleWritable b);
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericOp.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericOp.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericOp.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericOp.java Thu Jan 21 10:37:58 2010
@@ -28,15 +28,14 @@
import org.apache.hadoop.io.LongWritable;
/**
- * Base class for numeric operators like +, -, / etc. All these operators
- * share a common method resolver (NumericOpMethodResolver).
+ * Base class for numeric operators like +, -, / etc. All these operators share
+ * a common method resolver (NumericOpMethodResolver).
*/
public abstract class UDFBaseNumericOp extends UDF {
/**
- * Constructor.
- * This constructor sets the resolver to be used for comparison operators.
- * See {@link org.apache.hadoop.hive.ql.exec.UDFMethodResolver}
+ * Constructor. This constructor sets the resolver to be used for comparison
+ * operators. See {@link org.apache.hadoop.hive.ql.exec.UDFMethodResolver}
*/
public UDFBaseNumericOp() {
super(null);
@@ -49,12 +48,17 @@
protected LongWritable longWritable = new LongWritable();
protected FloatWritable floatWritable = new FloatWritable();
protected DoubleWritable doubleWritable = new DoubleWritable();
-
- public abstract ByteWritable evaluate(ByteWritable a, ByteWritable b);
- public abstract ShortWritable evaluate(ShortWritable a, ShortWritable b);
- public abstract IntWritable evaluate(IntWritable a, IntWritable b);
- public abstract LongWritable evaluate(LongWritable a, LongWritable b);
- public abstract FloatWritable evaluate(FloatWritable a, FloatWritable b);
- public abstract DoubleWritable evaluate(DoubleWritable a, DoubleWritable b);
+
+ public abstract ByteWritable evaluate(ByteWritable a, ByteWritable b);
+
+ public abstract ShortWritable evaluate(ShortWritable a, ShortWritable b);
+
+ public abstract IntWritable evaluate(IntWritable a, IntWritable b);
+
+ public abstract LongWritable evaluate(LongWritable a, LongWritable b);
+
+ public abstract FloatWritable evaluate(FloatWritable a, FloatWritable b);
+
+ public abstract DoubleWritable evaluate(DoubleWritable a, DoubleWritable b);
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericUnaryOp.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericUnaryOp.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericUnaryOp.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericUnaryOp.java Thu Jan 21 10:37:58 2010
@@ -27,8 +27,8 @@
import org.apache.hadoop.io.LongWritable;
/**
- * Base class for numeric operators like +, -, / etc. All these operators
- * share a common method resolver (NumericOpMethodResolver).
+ * Base class for numeric operators like +, -, / etc. All these operators share
+ * a common method resolver (NumericOpMethodResolver).
*/
public abstract class UDFBaseNumericUnaryOp extends UDF {
@@ -45,12 +45,17 @@
protected LongWritable longWritable = new LongWritable();
protected FloatWritable floatWritable = new FloatWritable();
protected DoubleWritable doubleWritable = new DoubleWritable();
-
- public abstract ByteWritable evaluate(ByteWritable a);
- public abstract ShortWritable evaluate(ShortWritable a);
- public abstract IntWritable evaluate(IntWritable a);
- public abstract LongWritable evaluate(LongWritable a);
- public abstract FloatWritable evaluate(FloatWritable a);
- public abstract DoubleWritable evaluate(DoubleWritable a);
+
+ public abstract ByteWritable evaluate(ByteWritable a);
+
+ public abstract ShortWritable evaluate(ShortWritable a);
+
+ public abstract IntWritable evaluate(IntWritable a);
+
+ public abstract LongWritable evaluate(LongWritable a);
+
+ public abstract FloatWritable evaluate(FloatWritable a);
+
+ public abstract DoubleWritable evaluate(DoubleWritable a);
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBin.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBin.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBin.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBin.java Thu Jan 21 10:37:58 2010
@@ -18,40 +18,32 @@
package org.apache.hadoop.hive.ql.udf;
-import java.util.Arrays;
-
import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.description;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
-@description(
- name = "bin",
- value = "_FUNC_(n) - returns n in binary",
- extended = "n is a BIGINT. Returns NULL if n is NULL.\n" +
- "Example:\n" +
- " > SELECT _FUNC_(13) FROM src LIMIT 1\n" +
- " '1101'"
- )
-public class UDFBin extends UDF {
- private Text result = new Text();
+@description(name = "bin", value = "_FUNC_(n) - returns n in binary", extended = "n is a BIGINT. Returns NULL if n is NULL.\n"
+ + "Example:\n" + " > SELECT _FUNC_(13) FROM src LIMIT 1\n" + " '1101'")
+public class UDFBin extends UDF {
+ private final Text result = new Text();
byte[] value = new byte[64];
-
+
public Text evaluate(LongWritable n) {
if (n == null) {
return null;
}
-
+
long num = n.get();
// Extract the bits of num into value[] from right to left
int len = 0;
do {
len++;
- value[value.length-len] = (byte)('0' + (num & 1));
+ value[value.length - len] = (byte) ('0' + (num & 1));
num >>>= 1;
- } while(num != 0);
-
- result.set(value, value.length-len, len);
+ } while (num != 0);
+
+ result.set(value, value.length - len, len);
return result;
}
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java Thu Jan 21 10:37:58 2010
@@ -25,30 +25,26 @@
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.io.LongWritable;
-@description(
- name = "ceil,ceiling",
- value = "_FUNC_(x) - Find the smallest integer not smaller than x",
- extended = "Example:\n" +
- " > SELECT _FUNC_(-0.1) FROM src LIMIT 1;\n" +
- " 0\n" +
- " > SELECT _FUNC_(5) FROM src LIMIT 1;\n" +
- " 5"
- )
+@description(name = "ceil,ceiling", value = "_FUNC_(x) - Find the smallest integer not smaller than x", extended = "Example:\n"
+ + " > SELECT _FUNC_(-0.1) FROM src LIMIT 1;\n"
+ + " 0\n"
+ + " > SELECT _FUNC_(5) FROM src LIMIT 1;\n" + " 5")
public class UDFCeil extends UDF {
private static Log LOG = LogFactory.getLog(UDFCeil.class.getName());
LongWritable longWritable = new LongWritable();
+
public UDFCeil() {
}
- public LongWritable evaluate(DoubleWritable i) {
+ public LongWritable evaluate(DoubleWritable i) {
if (i == null) {
return null;
} else {
- longWritable.set((long)Math.ceil(i.get()));
+ longWritable.set((long) Math.ceil(i.get()));
return longWritable;
}
}
-
+
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConcat.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConcat.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConcat.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConcat.java Thu Jan 21 10:37:58 2010
@@ -22,14 +22,10 @@
import org.apache.hadoop.hive.ql.exec.description;
import org.apache.hadoop.io.Text;
-@description(
- name = "concat",
- value = "_FUNC_(str1, str2, ... strN) - returns the concatenation of str1, str2, ... strN",
- extended = "Returns NULL if any argument is NULL.\n" +
- "Example:\n" +
- " > SELECT _FUNC_('abc', 'def') FROM src LIMIT 1;\n" +
- " 'abcdef'"
- )
+@description(name = "concat", value = "_FUNC_(str1, str2, ... strN) - returns the concatenation of str1, str2, ... strN", extended = "Returns NULL if any argument is NULL.\n"
+ + "Example:\n"
+ + " > SELECT _FUNC_('abc', 'def') FROM src LIMIT 1;\n"
+ + " 'abcdef'")
public class UDFConcat extends UDF {
public UDFConcat() {
@@ -37,14 +33,13 @@
Text text = new Text();
-
public Text evaluate(Text... args) {
text.clear();
- for(int i=0; i<args.length; i++) {
- if (args[i] == null) {
+ for (Text arg : args) {
+ if (arg == null) {
return null;
}
- text.append(args[i].getBytes(), 0, args[i].getLength());
+ text.append(arg.getBytes(), 0, arg.getLength());
}
return text;
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConv.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConv.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConv.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConv.java Thu Jan 21 10:37:58 2010
@@ -22,170 +22,171 @@
import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.description;
import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
-@description(
- name = "conv",
- value="_FUNC_(num, from_base, to_base) - convert num from from_base to" +
- " to_base",
- extended="If to_base is negative, treat num as a signed integer," +
- "otherwise, treat it as an unsigned integer.\n" +
- "Example:\n" +
- " > SELECT _FUNC_('100', 2, 10) FROM src LIMIT 1;\n" +
- " '4'\n" +
- " > SELECT _FUNC_(-10, 16, -10) FROM src LIMIT 1;\n" +
- " '16'"
- )
+@description(name = "conv", value = "_FUNC_(num, from_base, to_base) - convert num from from_base to"
+ + " to_base", extended = "If to_base is negative, treat num as a signed integer,"
+ + "otherwise, treat it as an unsigned integer.\n"
+ + "Example:\n"
+ + " > SELECT _FUNC_('100', 2, 10) FROM src LIMIT 1;\n"
+ + " '4'\n"
+ + " > SELECT _FUNC_(-10, 16, -10) FROM src LIMIT 1;\n" + " '16'")
public class UDFConv extends UDF {
- private Text result = new Text();
- private byte[] value = new byte[64];
-
- /**
- * Divide x by m as if x is an unsigned 64-bit integer.
- * Examples:
- * unsignedLongDiv(-1, 2) == Long.MAX_VALUE
- * unsignedLongDiv(6, 3) == 2
- * unsignedLongDiv(0, 5) == 0
- *
- * @param x is treated as unsigned
- * @param m is treated as signed
+ private final Text result = new Text();
+ private final byte[] value = new byte[64];
+
+ /**
+ * Divide x by m as if x is an unsigned 64-bit integer. Examples:
+ * unsignedLongDiv(-1, 2) == Long.MAX_VALUE unsignedLongDiv(6, 3) == 2
+ * unsignedLongDiv(0, 5) == 0
+ *
+ * @param x
+ * is treated as unsigned
+ * @param m
+ * is treated as signed
*/
private long unsignedLongDiv(long x, int m) {
- if(x >= 0) {
+ if (x >= 0) {
return x / m;
}
-
+
// Let uval be the value of the unsigned long with the same bits as x
// Two's complement => x = uval - 2*MAX - 2
// => uval = x + 2*MAX + 2
// Now, use the fact: (a+b)/c = a/c + b/c + (a%c+b%c)/c
- return x/m + 2*(Long.MAX_VALUE/m) + 2/m
- + (x%m + 2*(Long.MAX_VALUE%m) + 2%m) / m;
+ return x / m + 2 * (Long.MAX_VALUE / m) + 2 / m
+ + (x % m + 2 * (Long.MAX_VALUE % m) + 2 % m) / m;
}
-
+
/**
* Decode val into value[]
*
- * @param val is treated as an unsigned 64-bit integer
- * @param radix must be between MIN_RADIX and MAX_RADIX
+ * @param val
+ * is treated as an unsigned 64-bit integer
+ * @param radix
+ * must be between MIN_RADIX and MAX_RADIX
*/
private void decode(long val, int radix) {
- Arrays.fill(value, (byte)0);
- for (int i = value.length-1; val != 0; i--) {
+ Arrays.fill(value, (byte) 0);
+ for (int i = value.length - 1; val != 0; i--) {
long q = unsignedLongDiv(val, radix);
- value[i] = (byte)(val - q*radix);
+ value[i] = (byte) (val - q * radix);
val = q;
}
}
-
+
/**
* Convert value[] into a long. On overflow, return -1 (as mySQL does). If a
* negative digit is found, ignore the suffix starting there.
*
- * @param radix must be between MIN_RADIX and MAX_RADIX
+ * @param radix
+ * must be between MIN_RADIX and MAX_RADIX
* @return the result should be treated as an unsigned 64-bit integer.
*/
private long encode(int radix) {
long val = 0;
- long bound = unsignedLongDiv(-1-radix, radix); // Possible overflow once val
- // exceeds this value
- for(int i = 0; i<value.length && value[i]>=0; i++) {
- if(val >= bound) {
+ long bound = unsignedLongDiv(-1 - radix, radix); // Possible overflow once
+ // val
+ // exceeds this value
+ for (int i = 0; i < value.length && value[i] >= 0; i++) {
+ if (val >= bound) {
// Check for overflow
- if(unsignedLongDiv(-1-value[i], radix) < val) {
+ if (unsignedLongDiv(-1 - value[i], radix) < val) {
return -1;
}
}
- val = val*radix + value[i];
+ val = val * radix + value[i];
}
return val;
}
-
+
/**
* Convert the bytes in value[] to the corresponding chars.
*
- * @param radix must be between MIN_RADIX and MAX_RADIX
- * @param fromPos is the first nonzero element
- */
- private void byte2char(int radix, int fromPos)
- {
- for(int i=fromPos; i < value.length; i++) {
- value[i] = (byte)Character.toUpperCase(
- Character.forDigit(value[i], radix));
+ * @param radix
+ * must be between MIN_RADIX and MAX_RADIX
+ * @param fromPos
+ * is the first nonzero element
+ */
+ private void byte2char(int radix, int fromPos) {
+ for (int i = fromPos; i < value.length; i++) {
+ value[i] = (byte) Character.toUpperCase(Character.forDigit(value[i],
+ radix));
}
}
-
+
/**
* Convert the chars in value[] to the corresponding integers. Convert invalid
* characters to -1.
*
- * @param radix must be between MIN_RADIX and MAX_RADIX
- * @param fromPos is the first nonzero element
- */
- private void char2byte(int radix, int fromPos)
- {
- for(int i=fromPos; i<value.length; i++) {
- value[i] = (byte)Character.digit(value[i], radix);
+ * @param radix
+ * must be between MIN_RADIX and MAX_RADIX
+ * @param fromPos
+ * is the first nonzero element
+ */
+ private void char2byte(int radix, int fromPos) {
+ for (int i = fromPos; i < value.length; i++) {
+ value[i] = (byte) Character.digit(value[i], radix);
}
}
-
+
/**
- * Convert numbers between different number bases. If toBase>0 the result is
+ * Convert numbers between different number bases. If toBase>0 the result is
* unsigned, otherwise it is signed.
*
*/
- public Text evaluate(Text n, IntWritable fromBase, IntWritable toBase)
- {
+ public Text evaluate(Text n, IntWritable fromBase, IntWritable toBase) {
if (n == null || fromBase == null || toBase == null) {
return null;
}
-
+
int fromBs = fromBase.get();
int toBs = toBase.get();
- if(fromBs < Character.MIN_RADIX || fromBs > Character.MAX_RADIX
- || Math.abs(toBs) < Character.MIN_RADIX
- || Math.abs(toBs) > Character.MAX_RADIX ) {
+ if (fromBs < Character.MIN_RADIX || fromBs > Character.MAX_RADIX
+ || Math.abs(toBs) < Character.MIN_RADIX
+ || Math.abs(toBs) > Character.MAX_RADIX) {
return null;
}
-
+
byte[] num = n.getBytes();
- boolean negative = (num[0]=='-');
+ boolean negative = (num[0] == '-');
int first = 0;
- if(negative) {
+ if (negative) {
first = 1;
}
-
+
// Copy the digits in the right side of the array
- for(int i = 1; i <= n.getLength()-first; i++) {
+ for (int i = 1; i <= n.getLength() - first; i++) {
value[value.length - i] = num[n.getLength() - i];
}
char2byte(fromBs, value.length - n.getLength() + first);
-
+
// Do the conversion by going through a 64 bit integer
long val = encode(fromBs);
- if(negative && toBs > 0) {
- if(val < 0) {
+ if (negative && toBs > 0) {
+ if (val < 0) {
val = -1;
} else {
val = -val;
}
}
- if(toBs < 0 && val <0 ) {
+ if (toBs < 0 && val < 0) {
val = -val;
negative = true;
}
decode(val, Math.abs(toBs));
-
+
// Find the first non-zero digit or the last digits if all are zero.
- for(first=0; first<value.length-1 && value[first]==0; first++) ;
-
+ for (first = 0; first < value.length - 1 && value[first] == 0; first++) {
+ ;
+ }
+
byte2char(Math.abs(toBs), first);
-
- if(negative && toBs<0) {
+
+ if (negative && toBs < 0) {
value[--first] = '-';
}
-
+
result.set(value, first, value.length - first);
return result;
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCos.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCos.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCos.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCos.java Thu Jan 21 10:37:58 2010
@@ -24,26 +24,21 @@
import org.apache.hadoop.hive.ql.exec.description;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
-@description(
- name = "cos",
- value = "_FUNC_(x) - returns the cosine of x (x is in radians)",
- extended = "Example:\n " +
- " > SELECT _FUNC_(0) FROM src LIMIT 1;\n" +
- " 1"
- )
+@description(name = "cos", value = "_FUNC_(x) - returns the cosine of x (x is in radians)", extended = "Example:\n "
+ + " > SELECT _FUNC_(0) FROM src LIMIT 1;\n" + " 1")
public class UDFCos extends UDF {
private static Log LOG = LogFactory.getLog(UDFCos.class.getName());
DoubleWritable result = new DoubleWritable();
-
+
public UDFCos() {
}
/**
* Take Cosine of a
*/
- public DoubleWritable evaluate(DoubleWritable a) {
+ public DoubleWritable evaluate(DoubleWritable a) {
if (a == null) {
return null;
} else {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDate.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDate.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDate.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDate.java Thu Jan 21 10:37:58 2010
@@ -20,7 +20,6 @@
import java.text.ParseException;
import java.text.SimpleDateFormat;
-import java.util.Calendar;
import java.util.Date;
import org.apache.commons.logging.Log;
@@ -29,36 +28,35 @@
import org.apache.hadoop.hive.ql.exec.description;
import org.apache.hadoop.io.Text;
-@description(
- name = "to_date",
- value = "_FUNC_(expr) - Extracts the date part of the date or datetime " +
- "expression expr",
- extended = "Example:\n " +
- " > SELECT _FUNC_('2009-30-07 04:17:52') FROM src LIMIT 1;\n" +
- " '2009-30-07'"
- )
+@description(name = "to_date", value = "_FUNC_(expr) - Extracts the date part of the date or datetime "
+ + "expression expr", extended = "Example:\n "
+ + " > SELECT _FUNC_('2009-30-07 04:17:52') FROM src LIMIT 1;\n"
+ + " '2009-30-07'")
public class UDFDate extends UDF {
private static Log LOG = LogFactory.getLog(UDFDate.class.getName());
- private SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
+ private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
Text t = new Text();
+
public UDFDate() {
}
/**
* Get the date part of a date time string.
*
- * @param dateString the date string in the format of "yyyy-MM-dd HH:mm:ss" or "yyyy-MM-dd".
+ * @param dateString
+ * the date string in the format of "yyyy-MM-dd HH:mm:ss" or
+ * "yyyy-MM-dd".
* @return the date in the format of "yyyy-MM-dd".
*/
- public Text evaluate(Text dateString) {
-
+ public Text evaluate(Text dateString) {
+
if (dateString == null) {
return null;
}
-
+
try {
Date date = formatter.parse(dateString.toString());
t.set(formatter.format(date));
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateAdd.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateAdd.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateAdd.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateAdd.java Thu Jan 21 10:37:58 2010
@@ -31,46 +31,47 @@
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
-@description(
- name = "date_add",
- value = "_FUNC_(start_date, num_days) - Returns the date that is num_days" +
- " after start_date.",
- extended = "start_date is a string in the format 'yyyy-MM-dd HH:mm:ss' or" +
- " 'yyyy-MM-dd'. num_days is a number. The time part of start_date is " +
- "ignored.\n" +
- "Example:\n " +
- " > SELECT _FUNC_('2009-30-07', 1) FROM src LIMIT 1;\n" +
- " '2009-31-07'"
- )
+@description(name = "date_add", value = "_FUNC_(start_date, num_days) - Returns the date that is num_days"
+ + " after start_date.", extended = "start_date is a string in the format 'yyyy-MM-dd HH:mm:ss' or"
+ + " 'yyyy-MM-dd'. num_days is a number. The time part of start_date is "
+ + "ignored.\n"
+ + "Example:\n "
+ + " > SELECT _FUNC_('2009-30-07', 1) FROM src LIMIT 1;\n"
+ + " '2009-31-07'")
public class UDFDateAdd extends UDF {
private static Log LOG = LogFactory.getLog(UDFDateAdd.class.getName());
- private SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
- private Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+ private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
+ private final Calendar calendar = Calendar.getInstance(TimeZone
+ .getTimeZone("UTC"));
Text result = new Text();
-
+
public UDFDateAdd() {
}
/**
- * Add a number of days to the date.
- * The time part of the string will be ignored.
+ * Add a number of days to the date. The time part of the string will be
+ * ignored.
*
* NOTE: This is a subset of what MySQL offers as:
- * http://dev.mysql.com/doc/refman/5.1/en/date-and-time-functions.html#function_date-add
+ * http://dev.mysql.com/doc/refman
+ * /5.1/en/date-and-time-functions.html#function_date-add
*
- * @param dateString1 the date string in the format of "yyyy-MM-dd HH:mm:ss" or "yyyy-MM-dd".
- * @param days The number of days to add.
+ * @param dateString1
+ * the date string in the format of "yyyy-MM-dd HH:mm:ss" or
+ * "yyyy-MM-dd".
+ * @param days
+ * The number of days to add.
* @return the date in the format of "yyyy-MM-dd".
*/
- public Text evaluate(Text dateString1, IntWritable days) {
-
+ public Text evaluate(Text dateString1, IntWritable days) {
+
if (dateString1 == null || days == null) {
return null;
}
-
+
try {
calendar.setTime(formatter.parse(dateString1.toString()));
calendar.add(Calendar.DAY_OF_MONTH, days.get());
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateDiff.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateDiff.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateDiff.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateDiff.java Thu Jan 21 10:37:58 2010
@@ -29,51 +29,51 @@
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
-@description(
- name = "datediff",
- value = "_FUNC_(date1, date2) - Returns the number of days between date1 " +
- "and date2",
- extended = "date1 and date2 are strings in the format " +
- "'yyyy-MM-dd HH:mm:ss' or 'yyyy-MM-dd'. The time parts are ignored." +
- "If date1 is earlier than date2, the result is negative.\n" +
- "Example:\n " +
- " > SELECT _FUNC_('2009-30-07', '2009-31-07') FROM src LIMIT 1;\n" +
- " 1"
- )
+@description(name = "datediff", value = "_FUNC_(date1, date2) - Returns the number of days between date1 "
+ + "and date2", extended = "date1 and date2 are strings in the format "
+ + "'yyyy-MM-dd HH:mm:ss' or 'yyyy-MM-dd'. The time parts are ignored."
+ + "If date1 is earlier than date2, the result is negative.\n"
+ + "Example:\n "
+ + " > SELECT _FUNC_('2009-30-07', '2009-31-07') FROM src LIMIT 1;\n"
+ + " 1")
public class UDFDateDiff extends UDF {
private static Log LOG = LogFactory.getLog(UDFDateDiff.class.getName());
- private SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
+ private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
IntWritable result = new IntWritable();
-
+
public UDFDateDiff() {
formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
}
/**
- * Calculate the difference in the number of days.
- * The time part of the string will be ignored.
- * If dateString1 is earlier than dateString2, then the result can be negative.
+ * Calculate the difference in the number of days. The time part of the string
+ * will be ignored. If dateString1 is earlier than dateString2, then the
+ * result can be negative.
*
- * @param dateString1 the date string in the format of "yyyy-MM-dd HH:mm:ss" or "yyyy-MM-dd".
- * @param dateString2 the date string in the format of "yyyy-MM-dd HH:mm:ss" or "yyyy-MM-dd".
+ * @param dateString1
+ * the date string in the format of "yyyy-MM-dd HH:mm:ss" or
+ * "yyyy-MM-dd".
+ * @param dateString2
+ * the date string in the format of "yyyy-MM-dd HH:mm:ss" or
+ * "yyyy-MM-dd".
* @return the difference in days.
*/
- public IntWritable evaluate(Text dateString1, Text dateString2) {
-
+ public IntWritable evaluate(Text dateString1, Text dateString2) {
+
if (dateString1 == null || dateString2 == null) {
return null;
}
-
+
try {
// NOTE: This implementation avoids the extra-second problem
// by comparing with UTC epoch and integer division.
- long diffInMilliSeconds = (formatter.parse(dateString1.toString()).getTime()
- - formatter.parse(dateString2.toString()).getTime());
+ long diffInMilliSeconds = (formatter.parse(dateString1.toString())
+ .getTime() - formatter.parse(dateString2.toString()).getTime());
// 86400 is the number of seconds in a day
- result.set((int)(diffInMilliSeconds / (86400 * 1000)));
+ result.set((int) (diffInMilliSeconds / (86400 * 1000)));
return result;
} catch (ParseException e) {
return null;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateSub.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateSub.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateSub.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateSub.java Thu Jan 21 10:37:58 2010
@@ -31,45 +31,47 @@
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
-@description(
- name = "date_sub",
- value = "_FUNC_(start_date, num_days) - Returns the date that is num_days" +
- " before start_date.",
- extended = "start_date is a string in the format 'yyyy-MM-dd HH:mm:ss' or" +
- " 'yyyy-MM-dd'. num_days is a number. The time part of start_date is " +
- "ignored.\n" +
- "Example:\n " +
- " > SELECT _FUNC_('2009-30-07', 1) FROM src LIMIT 1;\n" +
- " '2009-29-07'"
- )
+@description(name = "date_sub", value = "_FUNC_(start_date, num_days) - Returns the date that is num_days"
+ + " before start_date.", extended = "start_date is a string in the format 'yyyy-MM-dd HH:mm:ss' or"
+ + " 'yyyy-MM-dd'. num_days is a number. The time part of start_date is "
+ + "ignored.\n"
+ + "Example:\n "
+ + " > SELECT _FUNC_('2009-30-07', 1) FROM src LIMIT 1;\n"
+ + " '2009-29-07'")
public class UDFDateSub extends UDF {
private static Log LOG = LogFactory.getLog(UDFDateSub.class.getName());
- private SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
- private Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+ private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
+ private final Calendar calendar = Calendar.getInstance(TimeZone
+ .getTimeZone("UTC"));
Text result = new Text();
+
public UDFDateSub() {
}
/**
- * Subtract a number of days to the date.
- * The time part of the string will be ignored.
+ * Subtract a number of days to the date. The time part of the string will be
+ * ignored.
*
* NOTE: This is a subset of what MySQL offers as:
- * http://dev.mysql.com/doc/refman/5.1/en/date-and-time-functions.html#function_date-sub
+ * http://dev.mysql.com/doc/refman
+ * /5.1/en/date-and-time-functions.html#function_date-sub
*
- * @param dateString1 the date string in the format of "yyyy-MM-dd HH:mm:ss" or "yyyy-MM-dd".
- * @param days the number of days to subtract.
+ * @param dateString1
+ * the date string in the format of "yyyy-MM-dd HH:mm:ss" or
+ * "yyyy-MM-dd".
+ * @param days
+ * the number of days to subtract.
* @return the date in the format of "yyyy-MM-dd".
*/
- public Text evaluate(Text dateString1, IntWritable days) {
-
+ public Text evaluate(Text dateString1, IntWritable days) {
+
if (dateString1 == null || days == null) {
return null;
}
-
+
try {
calendar.setTime(formatter.parse(dateString1.toString()));
calendar.add(Calendar.DAY_OF_MONTH, -days.get());
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java Thu Jan 21 10:37:58 2010
@@ -30,38 +30,37 @@
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
-@description(
- name = "day,dayofmonth",
- value = "_FUNC_(date) - Returns the date of the month of date",
- extended = "date is a string in the format of 'yyyy-MM-dd HH:mm:ss' or " +
- "'yyyy-MM-dd'.\n" +
- "Example:\n " +
- " > SELECT _FUNC_('2009-30-07', 1) FROM src LIMIT 1;\n" +
- " 30"
- )
+@description(name = "day,dayofmonth", value = "_FUNC_(date) - Returns the date of the month of date", extended = "date is a string in the format of 'yyyy-MM-dd HH:mm:ss' or "
+ + "'yyyy-MM-dd'.\n"
+ + "Example:\n "
+ + " > SELECT _FUNC_('2009-30-07', 1) FROM src LIMIT 1;\n" + " 30")
public class UDFDayOfMonth extends UDF {
private static Log LOG = LogFactory.getLog(UDFDayOfMonth.class.getName());
- private SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
- private Calendar calendar = Calendar.getInstance();
+ private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
+ private final Calendar calendar = Calendar.getInstance();
IntWritable result = new IntWritable();
+
public UDFDayOfMonth() {
}
/**
* Get the day of month from a date string.
*
- * @param dateString the dateString in the format of "yyyy-MM-dd HH:mm:ss" or "yyyy-MM-dd".
- * @return an int from 1 to 31. null if the dateString is not a valid date string.
+ * @param dateString
+ * the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
+ * "yyyy-MM-dd".
+ * @return an int from 1 to 31. null if the dateString is not a valid date
+ * string.
*/
- public IntWritable evaluate(Text dateString) {
-
+ public IntWritable evaluate(Text dateString) {
+
if (dateString == null) {
return null;
}
-
+
try {
Date date = formatter.parse(dateString.toString());
calendar.setTime(date);
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFExp.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFExp.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFExp.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFExp.java Thu Jan 21 10:37:58 2010
@@ -24,26 +24,21 @@
import org.apache.hadoop.hive.ql.exec.description;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
-@description(
- name = "exp",
- value = "_FUNC_(x) - Returns e to the power of x",
- extended = "Example:\n " +
- " > SELECT _FUNC_(0) FROM src LIMIT 1;\n" +
- " 1"
- )
+@description(name = "exp", value = "_FUNC_(x) - Returns e to the power of x", extended = "Example:\n "
+ + " > SELECT _FUNC_(0) FROM src LIMIT 1;\n" + " 1")
public class UDFExp extends UDF {
private static Log LOG = LogFactory.getLog(UDFExp.class.getName());
DoubleWritable result = new DoubleWritable();
-
+
public UDFExp() {
}
/**
- * Raise e (the base of natural logarithm) to the power of a.
+ * Raise e (the base of natural logarithm) to the power of a.
*/
- public DoubleWritable evaluate(DoubleWritable a) {
+ public DoubleWritable evaluate(DoubleWritable a) {
if (a == null) {
return null;
} else {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFindInSet.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFindInSet.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFindInSet.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFindInSet.java Thu Jan 21 10:37:58 2010
@@ -20,51 +20,46 @@
import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.description;
-import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.Text;
-@description(
- name = "find_in_set",
- value = "_FUNC_(str,str_array) - Returns the first occurrence " +
- " of str in str_array where str_array is a comma-delimited string." +
- " Returns null if either argument is null." +
- " Returns 0 if the first argument has any commas.",
- extended = "Example:\n" +
- " > SELECT _FUNC_('ab','abc,b,ab,c,def') FROM src LIMIT 1;\n" +
- " 3\n" +
- " > SELECT * FROM src1 WHERE NOT _FUNC_(key,'311,128,345,956')=0;\n" +
- " 311 val_311\n" +
- " 128"
-
- )
+@description(name = "find_in_set", value = "_FUNC_(str,str_array) - Returns the first occurrence "
+ + " of str in str_array where str_array is a comma-delimited string."
+ + " Returns null if either argument is null."
+ + " Returns 0 if the first argument has any commas.", extended = "Example:\n"
+ + " > SELECT _FUNC_('ab','abc,b,ab,c,def') FROM src LIMIT 1;\n"
+ + " 3\n"
+ + " > SELECT * FROM src1 WHERE NOT _FUNC_(key,'311,128,345,956')=0;\n"
+ + " 311 val_311\n" + " 128"
+)
public class UDFFindInSet extends UDF {
- private IntWritable result = new IntWritable();
-
+ private final IntWritable result = new IntWritable();
+
public IntWritable evaluate(Text s, Text txtarray) {
if (s == null || txtarray == null) {
return null;
}
-
+
byte[] search_bytes = s.getBytes();
-
- for(int i = 0; i < s.getLength(); i++) {
- if(search_bytes[i]==',') {
+
+ for (int i = 0; i < s.getLength(); i++) {
+ if (search_bytes[i] == ',') {
result.set(0);
return result;
- }
-
+ }
+
}
-
+
byte[] data = txtarray.getBytes();
int search_length = s.getLength();
-
+
int cur_pos_in_array = 0;
int cur_length = 0;
boolean matching = true;
-
- for(int i = 0; i < txtarray.getLength(); i++) {
- if(data[i] == ',') {
+
+ for (int i = 0; i < txtarray.getLength(); i++) {
+ if (data[i] == ',') {
cur_pos_in_array++;
if (matching && cur_length == search_length) {
result.set(cur_pos_in_array);
@@ -74,19 +69,19 @@
cur_length = 0;
}
} else {
- if (cur_length + 1 <= search_length){
- if(!matching || search_bytes[cur_length] != data[i]) {
+ if (cur_length + 1 <= search_length) {
+ if (!matching || search_bytes[cur_length] != data[i]) {
matching = false;
}
} else {
matching = false;
}
- cur_length++;
+ cur_length++;
}
-
+
}
-
- if(matching && cur_length == search_length) {
+
+ if (matching && cur_length == search_length) {
cur_pos_in_array++;
result.set(cur_pos_in_array);
return result;