You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2010/02/09 08:55:50 UTC
svn commit: r907950 [8/15] - in /hadoop/hive/trunk: ./ checkstyle/
cli/src/java/org/apache/hadoop/hive/cli/
common/src/java/org/apache/hadoop/hive/common/
common/src/java/org/apache/hadoop/hive/conf/
contrib/src/java/org/apache/hadoop/hive/contrib/file...
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java Tue Feb 9 07:55:30 2010
@@ -23,6 +23,10 @@
import org.apache.hadoop.hive.ql.exec.Operator;
+/**
+ * MapredLocalWork.
+ *
+ */
@Explain(displayName = "Map Reduce Local Work")
public class MapredLocalWork implements Serializable {
private static final long serialVersionUID = 1L;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java Tue Feb 9 07:55:30 2010
@@ -28,6 +28,10 @@
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.Utilities;
+/**
+ * MapredWork.
+ *
+ */
@Explain(displayName = "Map Reduce")
public class MapredWork implements Serializable {
private static final long serialVersionUID = 1L;
@@ -178,7 +182,6 @@
public void setReducer(final Operator<?> reducer) {
this.reducer = reducer;
}
-
public Integer getNumMapTasks() {
return numMapTasks;
@@ -187,7 +190,7 @@
public void setNumMapTasks(Integer numMapTasks) {
this.numMapTasks = numMapTasks;
}
-
+
/**
* If the number of reducers is -1, the runtime will automatically figure it
* out by input data size.
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java Tue Feb 9 07:55:30 2010
@@ -24,6 +24,10 @@
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+/**
+ * MoveWork.
+ *
+ */
@Explain(displayName = "Move Operator")
public class MoveWork implements Serializable {
private static final long serialVersionUID = 1L;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MsckDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MsckDesc.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MsckDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MsckDesc.java Tue Feb 9 07:55:30 2010
@@ -8,6 +8,10 @@
import org.apache.hadoop.fs.Path;
+/**
+ * MsckDesc.
+ *
+ */
public class MsckDesc extends DDLWork implements Serializable {
private String tableName;
@@ -33,7 +37,7 @@
this.tableName = tableName;
this.partSpecs = new ArrayList<LinkedHashMap<String, String>>(partSpecs.size());
for (int i = 0; i < partSpecs.size(); i++) {
- this.partSpecs.add(new LinkedHashMap<String, String>(partSpecs.get(i)));
+ this.partSpecs.add(new LinkedHashMap<String, String>(partSpecs.get(i)));
}
this.resFile = resFile;
this.repairPartitions = repairPartitions;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java Tue Feb 9 07:55:30 2010
@@ -31,6 +31,10 @@
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.mapred.InputFormat;
+/**
+ * PartitionDesc.
+ *
+ */
@Explain(displayName = "Partition")
public class PartitionDesc implements Serializable, Cloneable {
private static final long serialVersionUID = 2L;
@@ -123,7 +127,7 @@
}
/**
- * Return a deserializer object corresponding to the tableDesc
+ * Return a deserializer object corresponding to the tableDesc.
*/
public Deserializer getDeserializer() throws Exception {
Deserializer de = deserializerClass.newInstance();
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java Tue Feb 9 07:55:30 2010
@@ -48,11 +48,18 @@
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
-public class PlanUtils {
+/**
+ * PlanUtils.
+ *
+ */
+public final class PlanUtils {
- protected final static Log LOG = LogFactory
- .getLog("org.apache.hadoop.hive.ql.plan.PlanUtils");
+ protected static final Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.plan.PlanUtils");
+ /**
+ * ExpressionTypes.
+ *
+ */
public static enum ExpressionTypes {
FIELD, JEXL
};
@@ -234,9 +241,9 @@
public static TableDesc getDefaultTableDesc(String separatorCode) {
return new TableDesc(MetadataTypedColumnsetSerDe.class,
TextInputFormat.class, IgnoreKeyTextOutputFormat.class, Utilities
- .makeProperties(
- org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT,
- separatorCode));
+ .makeProperties(
+ org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT,
+ separatorCode));
}
/**
@@ -247,10 +254,10 @@
return new TableDesc(BinarySortableSerDe.class,
SequenceFileInputFormat.class, SequenceFileOutputFormat.class,
Utilities.makeProperties(Constants.LIST_COLUMNS, MetaStoreUtils
- .getColumnNamesFromFieldSchema(fieldSchemas),
- Constants.LIST_COLUMN_TYPES, MetaStoreUtils
- .getColumnTypesFromFieldSchema(fieldSchemas),
- Constants.SERIALIZATION_SORT_ORDER, order));
+ .getColumnNamesFromFieldSchema(fieldSchemas),
+ Constants.LIST_COLUMN_TYPES, MetaStoreUtils
+ .getColumnTypesFromFieldSchema(fieldSchemas),
+ Constants.SERIALIZATION_SORT_ORDER, order));
}
/**
@@ -259,10 +266,10 @@
public static TableDesc getMapJoinKeyTableDesc(List<FieldSchema> fieldSchemas) {
return new TableDesc(LazyBinarySerDe.class, SequenceFileInputFormat.class,
SequenceFileOutputFormat.class, Utilities.makeProperties("columns",
- MetaStoreUtils.getColumnNamesFromFieldSchema(fieldSchemas),
- "columns.types", MetaStoreUtils
- .getColumnTypesFromFieldSchema(fieldSchemas),
- Constants.ESCAPE_CHAR, "\\"));
+ MetaStoreUtils.getColumnNamesFromFieldSchema(fieldSchemas),
+ "columns.types", MetaStoreUtils
+ .getColumnTypesFromFieldSchema(fieldSchemas),
+ Constants.ESCAPE_CHAR, "\\"));
}
/**
@@ -272,10 +279,10 @@
List<FieldSchema> fieldSchemas) {
return new TableDesc(LazyBinarySerDe.class, SequenceFileInputFormat.class,
SequenceFileOutputFormat.class, Utilities.makeProperties("columns",
- MetaStoreUtils.getColumnNamesFromFieldSchema(fieldSchemas),
- "columns.types", MetaStoreUtils
- .getColumnTypesFromFieldSchema(fieldSchemas),
- Constants.ESCAPE_CHAR, "\\"));
+ MetaStoreUtils.getColumnNamesFromFieldSchema(fieldSchemas),
+ "columns.types", MetaStoreUtils
+ .getColumnTypesFromFieldSchema(fieldSchemas),
+ Constants.ESCAPE_CHAR, "\\"));
}
/**
@@ -285,11 +292,11 @@
List<FieldSchema> fieldSchemas) {
return new TableDesc(LazyBinarySerDe.class, SequenceFileInputFormat.class,
SequenceFileOutputFormat.class, Utilities.makeProperties(
- Constants.LIST_COLUMNS, MetaStoreUtils
- .getColumnNamesFromFieldSchema(fieldSchemas),
- Constants.LIST_COLUMN_TYPES, MetaStoreUtils
- .getColumnTypesFromFieldSchema(fieldSchemas),
- Constants.ESCAPE_CHAR, "\\"));
+ Constants.LIST_COLUMNS, MetaStoreUtils
+ .getColumnNamesFromFieldSchema(fieldSchemas),
+ Constants.LIST_COLUMN_TYPES, MetaStoreUtils
+ .getColumnTypesFromFieldSchema(fieldSchemas),
+ Constants.ESCAPE_CHAR, "\\"));
}
/**
@@ -298,11 +305,11 @@
public static TableDesc getReduceValueTableDesc(List<FieldSchema> fieldSchemas) {
return new TableDesc(LazyBinarySerDe.class, SequenceFileInputFormat.class,
SequenceFileOutputFormat.class, Utilities.makeProperties(
- Constants.LIST_COLUMNS, MetaStoreUtils
- .getColumnNamesFromFieldSchema(fieldSchemas),
- Constants.LIST_COLUMN_TYPES, MetaStoreUtils
- .getColumnTypesFromFieldSchema(fieldSchemas),
- Constants.ESCAPE_CHAR, "\\"));
+ Constants.LIST_COLUMNS, MetaStoreUtils
+ .getColumnNamesFromFieldSchema(fieldSchemas),
+ Constants.LIST_COLUMN_TYPES, MetaStoreUtils
+ .getColumnTypesFromFieldSchema(fieldSchemas),
+ Constants.ESCAPE_CHAR, "\\"));
}
/**
@@ -420,7 +427,7 @@
}
return new ReduceSinkDesc(keyCols, valueCols, outputKeyCols, outputValCols,
tag, partitionCols, numReducers, keyTable,
- // Revert to DynamicSerDe:
+ // Revert to DynamicSerDe:
// getBinaryTableDesc(getFieldSchemasFromColumnList(valueCols,
// "reducesinkvalue")));
valueTable);
@@ -473,4 +480,8 @@
tag, partitionCols, order.toString(), numReducers);
}
+ private PlanUtils() {
+ // prevent instantiation
+ }
+
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java Tue Feb 9 07:55:30 2010
@@ -20,6 +20,10 @@
import java.io.Serializable;
+/**
+ * ReduceSinkDesc.
+ *
+ */
@Explain(displayName = "Reduce Output Operator")
public class ReduceSinkDesc implements Serializable {
private static final long serialVersionUID = 1L;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SchemaDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SchemaDesc.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SchemaDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SchemaDesc.java Tue Feb 9 07:55:30 2010
@@ -20,6 +20,10 @@
import java.io.Serializable;
+/**
+ * SchemaDesc.
+ *
+ */
public class SchemaDesc implements Serializable {
private static final long serialVersionUID = 1L;
private String schema;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java Tue Feb 9 07:55:30 2010
@@ -23,6 +23,10 @@
import org.apache.hadoop.hive.ql.exec.RecordReader;
import org.apache.hadoop.hive.ql.exec.RecordWriter;
+/**
+ * ScriptDesc.
+ *
+ */
@Explain(displayName = "Transform Operator")
public class ScriptDesc implements Serializable {
private static final long serialVersionUID = 1L;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java Tue Feb 9 07:55:30 2010
@@ -20,6 +20,10 @@
import java.io.Serializable;
+/**
+ * SelectDesc.
+ *
+ */
@Explain(displayName = "Select Operator")
public class SelectDesc implements Serializable {
private static final long serialVersionUID = 1L;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java Tue Feb 9 07:55:30 2010
@@ -22,17 +22,21 @@
import org.apache.hadoop.fs.Path;
+/**
+ * ShowFunctionsDesc.
+ *
+ */
@Explain(displayName = "Show Functions")
public class ShowFunctionsDesc extends DDLDesc implements Serializable {
private static final long serialVersionUID = 1L;
String pattern;
Path resFile;
/**
- * table name for the result of show tables
+ * table name for the result of show tables.
*/
private static final String table = "show";
/**
- * thrift ddl for the result of show tables
+ * thrift ddl for the result of show tables.
*/
private static final String schema = "tab_name#string";
@@ -43,10 +47,10 @@
public String getSchema() {
return schema;
}
-
+
public ShowFunctionsDesc() {
}
-
+
/**
* @param resFile
*/
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java Tue Feb 9 07:55:30 2010
@@ -22,17 +22,21 @@
import org.apache.hadoop.fs.Path;
+/**
+ * ShowPartitionsDesc.
+ *
+ */
@Explain(displayName = "Show Partitions")
public class ShowPartitionsDesc extends DDLDesc implements Serializable {
private static final long serialVersionUID = 1L;
String tabName;
Path resFile;
/**
- * table name for the result of show tables
+ * table name for the result of show tables.
*/
private static final String table = "showpartitions";
/**
- * thrift ddl for the result of show tables
+ * thrift ddl for the result of show tables.
*/
private static final String schema = "partition#string";
@@ -46,10 +50,10 @@
public ShowPartitionsDesc() {
}
-
+
/**
* @param tabName
- * Name of the table whose partitions need to be listed
+ * Name of the table whose partitions need to be listed.
* @param resFile
* File to store the results in
*/
@@ -59,7 +63,7 @@
}
/**
- * @return the name of the table
+ * @return the name of the table.
*/
@Explain(displayName = "table")
public String getTabName() {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java Tue Feb 9 07:55:30 2010
@@ -23,6 +23,10 @@
import org.apache.hadoop.fs.Path;
+/**
+ * ShowTableStatusDesc.
+ *
+ */
@Explain(displayName = "Show Table Status")
public class ShowTableStatusDesc extends DDLDesc implements Serializable {
private static final long serialVersionUID = 1L;
@@ -32,11 +36,11 @@
HashMap<String, String> partSpec;
/**
- * table name for the result of show tables
+ * table name for the result of show tables.
*/
private static final String table = "show_tablestatus";
/**
- * thrift ddl for the result of show tables
+ * thrift ddl for the result of show tables.
*/
private static final String schema = "tab_name#string";
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java Tue Feb 9 07:55:30 2010
@@ -22,17 +22,21 @@
import org.apache.hadoop.fs.Path;
+/**
+ * ShowTablesDesc.
+ *
+ */
@Explain(displayName = "Show Tables")
public class ShowTablesDesc extends DDLDesc implements Serializable {
private static final long serialVersionUID = 1L;
String pattern;
Path resFile;
/**
- * table name for the result of show tables
+ * table name for the result of show tables.
*/
private static final String table = "show";
/**
- * thrift ddl for the result of show tables
+ * thrift ddl for the result of show tables.
*/
private static final String schema = "tab_name#string";
@@ -46,7 +50,7 @@
public ShowTablesDesc() {
}
-
+
/**
* @param resFile
*/
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java Tue Feb 9 07:55:30 2010
@@ -27,6 +27,10 @@
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.mapred.InputFormat;
+/**
+ * TableDesc.
+ *
+ */
public class TableDesc implements Serializable, Cloneable {
private static final long serialVersionUID = 1L;
private Class<? extends Deserializer> deserializerClass;
@@ -65,7 +69,7 @@
}
/**
- * Return a deserializer object corresponding to the tableDesc
+ * Return a deserializer object corresponding to the tableDesc.
*/
public Deserializer getDeserializer() throws Exception {
Deserializer de = deserializerClass.newInstance();
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java Tue Feb 9 07:55:30 2010
@@ -28,7 +28,7 @@
public class UnionDesc implements Serializable {
private static final long serialVersionUID = 1L;
- transient private int numInputs;
+ private transient int numInputs;
@SuppressWarnings("nls")
public UnionDesc() {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java Tue Feb 9 07:55:30 2010
@@ -38,14 +38,14 @@
*/
public class ExprWalkerInfo implements NodeProcessorCtx {
- /** Information maintained for an expr while walking an expr tree */
+ /** Information maintained for an expr while walking an expr tree. */
private static class ExprInfo {
/**
- * true if expr rooted at this node doesn't contain more than one table
+ * true if expr rooted at this node doesn't contain more than one table.
* alias
*/
public boolean isCandidate = false;
- /** alias that this expression refers to */
+ /** alias that this expression refers to. */
public String alias = null;
/** new expr for this expression. */
public ExprNodeDesc convertedExpr = null;
@@ -68,7 +68,7 @@
/**
* this map contains a expr infos. Each key is a node in the expression tree
* and the information for each node is the value which is used while walking
- * the tree by its parent
+ * the tree by its parent.
*/
private final Map<String, List<ExprNodeDesc>> pushdownPreds;
/**
@@ -94,14 +94,14 @@
}
/**
- * @return the op of this expression
+ * @return the op of this expression.
*/
public Operator<? extends Serializable> getOp() {
return op;
}
/**
- * @return the row resolver of the operator of this expression
+ * @return the row resolver of the operator of this expression.
*/
public RowResolver getToRR() {
return toRR;
@@ -120,7 +120,7 @@
}
/**
- * adds a replacement node for this expression
+ * adds a replacement node for this expression.
*
* @param oldNode
* original node
@@ -138,7 +138,7 @@
}
/**
- * Returns true if the specified expression is pushdown candidate else false
+ * Returns true if the specified expression is pushdown candidate else false.
*
* @param expr
* @return true or false
@@ -152,7 +152,7 @@
}
/**
- * Marks the specified expr to the specified value
+ * Marks the specified expr to the specified value.
*
* @param expr
* @param b
@@ -168,7 +168,7 @@
}
/**
- * Returns the alias of the specified expr
+ * Returns the alias of the specified expr.
*
* @param expr
* @return The alias of the expression
@@ -182,7 +182,7 @@
}
/**
- * Adds the specified alias to the specified expr
+ * Adds the specified alias to the specified expr.
*
* @param expr
* @param alias
@@ -201,7 +201,7 @@
/**
* Adds the specified expr as the top-most pushdown expr (ie all its children
- * can be pushed)
+ * can be pushed).
*
* @param expr
*/
@@ -216,7 +216,7 @@
/**
* Returns the list of pushdown expressions for each alias that appear in the
* current operator's RowResolver. The exprs in each list can be combined
- * using conjunction (AND)
+ * using conjunction (AND).
*
* @return the map of alias to a list of pushdown predicates
*/
@@ -225,7 +225,7 @@
}
/**
- * Merges the specified pushdown predicates with the current class
+ * Merges the specified pushdown predicates with the current class.
*
* @param ewi
* ExpressionWalkerInfo
@@ -246,7 +246,7 @@
}
/**
- * sets the deterministic flag for this expression
+ * sets the deterministic flag for this expression.
*
* @param b
* deterministic or not
@@ -256,7 +256,7 @@
}
/**
- * @return whether this expression is deterministic or not
+ * @return whether this expression is deterministic or not.
*/
public boolean isDeterministic() {
return isDeterministic;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java Tue Feb 9 07:55:30 2010
@@ -47,12 +47,16 @@
* determines whether the expression is a possible candidate for predicate
* pushdown optimization for the given operator
*/
-public class ExprWalkerProcFactory {
+public final class ExprWalkerProcFactory {
+ /**
+ * ColumnExprProcessor.
+ *
+ */
public static class ColumnExprProcessor implements NodeProcessor {
/**
- * Converts the reference from child row resolver to current row resolver
+ * Converts the reference from child row resolver to current row resolver.
*/
@Override
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
@@ -88,6 +92,10 @@
}
+ /**
+ * FieldExprProcessor.
+ *
+ */
public static class FieldExprProcessor implements NodeProcessor {
@Override
@@ -129,7 +137,7 @@
/**
* If all children are candidates and refer only to one table alias then this
* expr is a candidate else it is not a candidate but its children could be
- * final candidates
+ * final candidates.
*/
public static class GenericFuncExprProcessor implements NodeProcessor {
@@ -181,7 +189,7 @@
}
/**
- * For constants and null expressions
+ * For constants and null expressions.
*/
public static class DefaultExprProcessor implements NodeProcessor {
@@ -219,7 +227,7 @@
}
/**
- * Extracts pushdown predicates from the given list of predicate expression
+ * Extracts pushdown predicates from the given list of predicate expression.
*
* @param opContext
* operator context used for resolving column references
@@ -273,7 +281,7 @@
/**
* Walks through the top AND nodes and determine which of them are final
- * candidates
+ * candidates.
*/
private static void extractFinalCandidates(ExprNodeDesc expr,
ExprWalkerInfo ctx) {
@@ -289,6 +297,9 @@
extractFinalCandidates((ExprNodeDesc) ch, ctx);
}
}
+ }
+ private ExprWalkerProcFactory() {
+ // prevent instantiation
}
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java Tue Feb 9 07:55:30 2010
@@ -61,10 +61,10 @@
* from filter operators that have been pushed. Currently these pushed
* predicates are evaluated twice.
*/
-public class OpProcFactory {
+public final class OpProcFactory {
/**
- * Processor for Script Operator Prevents any predicates being pushed
+ * Processor for Script Operator Prevents any predicates being pushed.
*/
public static class ScriptPPD extends DefaultPPD implements NodeProcessor {
@@ -83,7 +83,7 @@
/**
* Combines predicates of its child into a single expression and adds a filter
- * op as new child
+ * op as new child.
*/
public static class TableScanPPD extends DefaultPPD implements NodeProcessor {
@@ -103,7 +103,7 @@
/**
* Determines the push down predicates in its where expression and then
- * combines it with the push down predicates that are passed from its children
+ * combines it with the push down predicates that are passed from its children.
*/
public static class FilterPPD extends DefaultPPD implements NodeProcessor {
@@ -123,7 +123,7 @@
if (op.getChildren() != null && op.getChildren().size() == 1) {
createFilter(op, owi
.getPrunedPreds((Operator<? extends Serializable>) (op
- .getChildren().get(0))), owi);
+ .getChildren().get(0))), owi);
}
return null;
@@ -140,7 +140,7 @@
/**
* Determines predicates for which alias can be pushed to it's parents. See
- * the comments for getQualifiedAliases function
+ * the comments for getQualifiedAliases function.
*/
public static class JoinPPD extends DefaultPPD implements NodeProcessor {
@Override
@@ -243,7 +243,7 @@
}
/**
- * Default processor which just merges its children
+ * Default processor which just merges its children.
*/
public static class DefaultPPD implements NodeProcessor {
@@ -276,7 +276,7 @@
/**
* Take current operators pushdown predicates and merges them with
- * children's pushdown predicates
+ * children's pushdown predicates.
*
* @param nd
* current operator
@@ -301,7 +301,7 @@
Operator<? extends Serializable> op = (Operator<? extends Serializable>) nd;
ExprWalkerInfo childPreds = owi
.getPrunedPreds((Operator<? extends Serializable>) nd.getChildren()
- .get(0));
+ .get(0));
if (childPreds == null) {
return;
}
@@ -408,4 +408,7 @@
return new ScriptPPD();
}
+ private OpProcFactory() {
+ // prevent instantiation
+ }
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java Tue Feb 9 07:55:30 2010
@@ -24,6 +24,10 @@
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+/**
+ * AddResourceProcessor.
+ *
+ */
public class AddResourceProcessor implements CommandProcessor {
public static final Log LOG = LogFactory.getLog(AddResourceProcessor.class
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java Tue Feb 9 07:55:30 2010
@@ -18,25 +18,34 @@
package org.apache.hadoop.hive.ql.processors;
-import org.apache.commons.lang.StringUtils;
+import static org.apache.commons.lang.StringUtils.isBlank;
+
import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.session.SessionState;
-public class CommandProcessorFactory {
+/**
+ * CommandProcessorFactory.
+ *
+ */
+public final class CommandProcessorFactory {
+
+ private CommandProcessorFactory() {
+ // prevent instantiation
+ }
public static CommandProcessor get(String cmd) {
String cmdl = cmd.toLowerCase();
- if (cmdl.equals("set")) {
+ if ("set".equals(cmdl)) {
return new SetProcessor();
- } else if (cmdl.equals("dfs")) {
+ } else if ("dfs".equals(cmdl)) {
SessionState ss = SessionState.get();
return new DfsProcessor(ss.getConf());
- } else if (cmdl.equals("add")) {
+ } else if ("add".equals(cmdl)) {
return new AddResourceProcessor();
- } else if (cmdl.equals("delete")) {
+ } else if ("delete".equals(cmdl)) {
return new DeleteResourceProcessor();
- } else if (!StringUtils.isBlank(cmd)) {
+ } else if (!isBlank(cmd)) {
return new Driver();
}
return null;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java Tue Feb 9 07:55:30 2010
@@ -24,10 +24,13 @@
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+/**
+ * DeleteResourceProcessor.
+ *
+ */
public class DeleteResourceProcessor implements CommandProcessor {
- public static final Log LOG = LogFactory.getLog(DeleteResourceProcessor.class
- .getName());
+ public static final Log LOG = LogFactory.getLog(DeleteResourceProcessor.class.getName());
public static final LogHelper console = new LogHelper(LOG);
public void init() {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java Tue Feb 9 07:55:30 2010
@@ -27,6 +27,10 @@
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+/**
+ * DfsProcessor.
+ *
+ */
public class DfsProcessor implements CommandProcessor {
public static final Log LOG = LogFactory.getLog(DfsProcessor.class.getName());
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java Tue Feb 9 07:55:30 2010
@@ -22,6 +22,10 @@
import org.apache.hadoop.hive.ql.session.SessionState;
+/**
+ * SetProcessor.
+ *
+ */
public class SetProcessor implements CommandProcessor {
private static String prefix = "set: ";
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java Tue Feb 9 07:55:30 2010
@@ -40,7 +40,7 @@
import org.apache.log4j.PropertyConfigurator;
/**
- * SessionState encapsulates common data associated with a session
+ * SessionState encapsulates common data associated with a session.
*
* Also provides support for a thread static session object that can be accessed
* from any point in the code to interact with the user and to retrieve
@@ -49,12 +49,12 @@
public class SessionState {
/**
- * current configuration
+ * current configuration.
*/
protected HiveConf conf;
/**
- * silent mode
+ * silent mode.
*/
protected boolean isSilent;
@@ -63,14 +63,14 @@
*/
protected HiveHistory hiveHist;
/**
- * Streams to read/write from
+ * Streams to read/write from.
*/
public PrintStream out;
public InputStream in;
public PrintStream err;
/**
- * type of the command
+ * type of the command.
*/
private String commandType;
@@ -122,7 +122,7 @@
private static ThreadLocal<SessionState> tss = new ThreadLocal<SessionState>();
/**
- * start a new session and set it to current session
+ * start a new session and set it to current session.
*/
public static SessionState start(HiveConf conf) {
SessionState ss = new SessionState(conf);
@@ -135,7 +135,7 @@
/**
* set current session to existing session object if a thread is running
* multiple sessions - it must call this method with the new session object
- * when switching from one session to another
+ * when switching from one session to another.
*/
public static SessionState start(SessionState startSs) {
@@ -153,14 +153,14 @@
}
/**
- * get the current session
+ * get the current session.
*/
public static SessionState get() {
return tss.get();
}
/**
- * get hiveHitsory object which does structured logging
+ * get hiveHitsory object which does structured logging.
*
* @return The hive history object
*/
@@ -175,8 +175,8 @@
return userid
+ "_"
+ String.format("%1$4d%2$02d%3$02d%4$02d%5$02d", gc.get(Calendar.YEAR),
- gc.get(Calendar.MONTH) + 1, gc.get(Calendar.DAY_OF_MONTH), gc
- .get(Calendar.HOUR_OF_DAY), gc.get(Calendar.MINUTE));
+ gc.get(Calendar.MONTH) + 1, gc.get(Calendar.DAY_OF_MONTH), gc
+ .get(Calendar.HOUR_OF_DAY), gc.get(Calendar.MINUTE));
}
public static final String HIVE_L4J = "hive-log4j.properties";
@@ -199,7 +199,7 @@
*
* NEVER write directly to the SessionStates standard output other than to
* emit result data DO use printInfo and printError provided by LogHelper to
- * emit non result data strings
+ * emit non result data strings.
*
* It is perfectly acceptable to have global static LogHelper objects (for
* example - once per module) LogHelper always emits info/error to current
@@ -259,7 +259,7 @@
private static LogHelper _console;
/**
- * initialize or retrieve console object for SessionState
+ * initialize or retrieve console object for SessionState.
*/
public static LogHelper getConsole() {
if (_console == null) {
@@ -319,12 +319,20 @@
}
}
+ /**
+ * ResourceHook.
+ *
+ */
public static interface ResourceHook {
- public String preHook(Set<String> cur, String s);
+ String preHook(Set<String> cur, String s);
- public boolean postHook(Set<String> cur, String s);
+ boolean postHook(Set<String> cur, String s);
}
+ /**
+ * ResourceType.
+ *
+ */
public static enum ResourceType {
FILE(new ResourceHook() {
public String preHook(Set<String> cur, String s) {
@@ -391,7 +399,8 @@
return null;
}
- private final HashMap<ResourceType, HashSet<String>> resource_map = new HashMap<ResourceType, HashSet<String>>();
+ private final HashMap<ResourceType, HashSet<String>> resource_map =
+ new HashMap<ResourceType, HashSet<String>>();
public void add_resource(ResourceType t, String value) {
if (resource_map.get(t) == null) {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java Tue Feb 9 07:55:30 2010
@@ -49,11 +49,11 @@
public class LineageInfo implements NodeProcessor {
/**
- * Stores input tables in sql
+ * Stores input tables in sql.
*/
TreeSet<String> inputTableList = new TreeSet<String>();
/**
- * Stores output tables in sql
+ * Stores output tables in sql.
*/
TreeSet<String> OutputTableList = new TreeSet<String>();
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFWrongArgLengthForTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFWrongArgLengthForTestCase.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFWrongArgLengthForTestCase.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFWrongArgLengthForTestCase.java Tue Feb 9 07:55:30 2010
@@ -23,9 +23,17 @@
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
+/**
+ * UDAFWrongArgLengthForTestCase.
+ *
+ */
public class UDAFWrongArgLengthForTestCase extends UDAF {
- static public class UDAFWrongArgLengthForTestCaseEvaluator implements
+ /**
+ * UDAFWrongArgLengthForTestCaseEvaluator.
+ *
+ */
+ public static class UDAFWrongArgLengthForTestCaseEvaluator implements
UDAFEvaluator {
private long mCount;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAbs.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAbs.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAbs.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAbs.java Tue Feb 9 07:55:30 2010
@@ -18,18 +18,23 @@
package org.apache.hadoop.hive.ql.udf;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
-@Description(name = "abs", value = "_FUNC_(x) - returns the absolute value of x", extended = "Example:\n"
+/**
+ * UDFAbs.
+ *
+ */
+@Description(name = "abs",
+ value = "_FUNC_(x) - returns the absolute value of x",
+ extended = "Example:\n"
+ " > SELECT _FUNC_(0) FROM src LIMIT 1;\n"
+ " 0\n"
+ " > SELECT _FUNC_(-5) FROM src LIMIT 1;\n" + " 5")
public class UDFAbs extends UDF {
-
private final DoubleWritable resultDouble = new DoubleWritable();
private final LongWritable resultLong = new LongWritable();
private final IntWritable resultInt = new IntWritable();
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAcos.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAcos.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAcos.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAcos.java Tue Feb 9 07:55:30 2010
@@ -18,21 +18,20 @@
package org.apache.hadoop.hive.ql.udf;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+/**
+ * UDFAcos.
+ *
+ */
@Description(name = "acos", value = "_FUNC_(x) - returns the arc cosine of x if -1<=x<=1 or "
+ "NULL otherwise", extended = "Example:\n"
+ " > SELECT _FUNC_(1) FROM src LIMIT 1;\n" + " 0\n"
+ " > SELECT _FUNC_(2) FROM src LIMIT 1;\n" + " NULL")
public class UDFAcos extends UDF {
-
- private static Log LOG = LogFactory.getLog(UDFAcos.class.getName());
-
- DoubleWritable result = new DoubleWritable();
+ private DoubleWritable result = new DoubleWritable();
public UDFAcos() {
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAscii.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAscii.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAscii.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAscii.java Tue Feb 9 07:55:30 2010
@@ -18,19 +18,24 @@
package org.apache.hadoop.hive.ql.udf;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
-@Description(name = "ascii", value = "_FUNC_(str) - returns the numeric value of the first character"
- + " of str", extended = "Returns 0 if str is empty or NULL if str is NULL\n"
+/**
+ * UDFAscii.
+ *
+ */
+@Description(name = "ascii",
+ value = "_FUNC_(str) - returns the numeric value of the first character"
+ + " of str",
+ extended = "Returns 0 if str is empty or NULL if str is NULL\n"
+ "Example:\n"
+ " > SELECT _FUNC_('222') FROM src LIMIT 1;"
+ " 50\n"
+ " > SELECT _FUNC_(2) FROM src LIMIT 1;\n" + " 50")
public class UDFAscii extends UDF {
-
private final IntWritable result = new IntWritable();
public IntWritable evaluate(Text s) {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAsin.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAsin.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAsin.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAsin.java Tue Feb 9 07:55:30 2010
@@ -18,21 +18,22 @@
package org.apache.hadoop.hive.ql.udf;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
-@Description(name = "asin", value = "_FUNC_(x) - returns the arc sine of x if -1<=x<=1 or NULL otherwise", extended = "Example:\n"
+/**
+ * UDFAsin.
+ *
+ */
+@Description(name = "asin",
+ value = "_FUNC_(x) - returns the arc sine of x if -1<=x<=1 or NULL otherwise",
+ extended = "Example:\n"
+ " > SELECT _FUNC_(0) FROM src LIMIT 1;\n"
+ " 0\n"
+ " > SELECT _FUNC_(2) FROM src LIMIT 1;\n" + " NULL")
public class UDFAsin extends UDF {
-
- private static Log LOG = LogFactory.getLog(UDFAsin.class.getName());
-
- DoubleWritable result = new DoubleWritable();
+ private DoubleWritable result = new DoubleWritable();
public UDFAsin() {
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java Tue Feb 9 07:55:30 2010
@@ -23,6 +23,10 @@
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.io.BooleanWritable;
+/**
+ * UDFBaseCompare.
+ *
+ */
public abstract class UDFBaseCompare extends UDF {
/**
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBin.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBin.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBin.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBin.java Tue Feb 9 07:55:30 2010
@@ -18,16 +18,22 @@
package org.apache.hadoop.hive.ql.udf;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
-@Description(name = "bin", value = "_FUNC_(n) - returns n in binary", extended = "n is a BIGINT. Returns NULL if n is NULL.\n"
+/**
+ * UDFBin.
+ *
+ */
+@Description(name = "bin",
+ value = "_FUNC_(n) - returns n in binary",
+ extended = "n is a BIGINT. Returns NULL if n is NULL.\n"
+ "Example:\n" + " > SELECT _FUNC_(13) FROM src LIMIT 1\n" + " '1101'")
public class UDFBin extends UDF {
private final Text result = new Text();
- byte[] value = new byte[64];
+ private byte[] value = new byte[64];
public Text evaluate(LongWritable n) {
if (n == null) {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java Tue Feb 9 07:55:30 2010
@@ -18,22 +18,23 @@
package org.apache.hadoop.hive.ql.udf;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.io.LongWritable;
-@Description(name = "ceil,ceiling", value = "_FUNC_(x) - Find the smallest integer not smaller than x", extended = "Example:\n"
+/**
+ * UDFCeil.
+ *
+ */
+@Description(name = "ceil,ceiling",
+ value = "_FUNC_(x) - Find the smallest integer not smaller than x",
+ extended = "Example:\n"
+ " > SELECT _FUNC_(-0.1) FROM src LIMIT 1;\n"
+ " 0\n"
+ " > SELECT _FUNC_(5) FROM src LIMIT 1;\n" + " 5")
public class UDFCeil extends UDF {
-
- private static Log LOG = LogFactory.getLog(UDFCeil.class.getName());
-
- LongWritable longWritable = new LongWritable();
+ private LongWritable longWritable = new LongWritable();
public UDFCeil() {
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConcat.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConcat.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConcat.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConcat.java Tue Feb 9 07:55:30 2010
@@ -18,11 +18,17 @@
package org.apache.hadoop.hive.ql.udf;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.Text;
-@Description(name = "concat", value = "_FUNC_(str1, str2, ... strN) - returns the concatenation of str1, str2, ... strN", extended = "Returns NULL if any argument is NULL.\n"
+/**
+ * UDFConcat.
+ *
+ */
+@Description(name = "concat",
+ value = "_FUNC_(str1, str2, ... strN) - returns the concatenation of str1, str2, ... strN",
+ extended = "Returns NULL if any argument is NULL.\n"
+ "Example:\n"
+ " > SELECT _FUNC_('abc', 'def') FROM src LIMIT 1;\n"
+ " 'abcdef'")
@@ -31,7 +37,7 @@
public UDFConcat() {
}
- Text text = new Text();
+ private Text text = new Text();
public Text evaluate(Text... args) {
text.clear();
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConv.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConv.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConv.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConv.java Tue Feb 9 07:55:30 2010
@@ -19,13 +19,19 @@
import java.util.Arrays;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
-@Description(name = "conv", value = "_FUNC_(num, from_base, to_base) - convert num from from_base to"
- + " to_base", extended = "If to_base is negative, treat num as a signed integer,"
+/**
+ * UDFConv.
+ *
+ */
+@Description(name = "conv",
+ value = "_FUNC_(num, from_base, to_base) - convert num from from_base to"
+ + " to_base",
+ extended = "If to_base is negative, treat num as a signed integer,"
+ "otherwise, treat it as an unsigned integer.\n"
+ "Example:\n"
+ " > SELECT _FUNC_('100', 2, 10) FROM src LIMIT 1;\n"
@@ -59,7 +65,7 @@
}
/**
- * Decode val into value[]
+ * Decode val into value[].
*
* @param val
* is treated as an unsigned 64-bit integer
@@ -86,7 +92,7 @@
private long encode(int radix) {
long val = 0;
long bound = unsignedLongDiv(-1 - radix, radix); // Possible overflow once
- // val
+ // val
// exceeds this value
for (int i = 0; i < value.length && value[i] >= 0; i++) {
if (val >= bound) {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCos.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCos.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCos.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCos.java Tue Feb 9 07:55:30 2010
@@ -18,25 +18,26 @@
package org.apache.hadoop.hive.ql.udf;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
-@Description(name = "cos", value = "_FUNC_(x) - returns the cosine of x (x is in radians)", extended = "Example:\n "
+/**
+ * UDFCos.
+ *
+ */
+@Description(name = "cos",
+ value = "_FUNC_(x) - returns the cosine of x (x is in radians)",
+ extended = "Example:\n "
+ " > SELECT _FUNC_(0) FROM src LIMIT 1;\n" + " 1")
public class UDFCos extends UDF {
-
- private static Log LOG = LogFactory.getLog(UDFCos.class.getName());
-
- DoubleWritable result = new DoubleWritable();
+ private DoubleWritable result = new DoubleWritable();
public UDFCos() {
}
/**
- * Take Cosine of a
+ * Take Cosine of a.
*/
public DoubleWritable evaluate(DoubleWritable a) {
if (a == null) {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDate.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDate.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDate.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDate.java Tue Feb 9 07:55:30 2010
@@ -22,23 +22,23 @@
import java.text.SimpleDateFormat;
import java.util.Date;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.Text;
-@Description(name = "to_date", value = "_FUNC_(expr) - Extracts the date part of the date or datetime "
- + "expression expr", extended = "Example:\n "
+/**
+ * UDFDate.
+ *
+ */
+@Description(name = "to_date",
+ value = "_FUNC_(expr) - Extracts the date part of the date or datetime expression expr",
+ extended = "Example:\n "
+ " > SELECT _FUNC_('2009-30-07 04:17:52') FROM src LIMIT 1;\n"
+ " '2009-30-07'")
public class UDFDate extends UDF {
-
- private static Log LOG = LogFactory.getLog(UDFDate.class.getName());
-
private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
- Text t = new Text();
+ private Text t = new Text();
public UDFDate() {
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateAdd.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateAdd.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateAdd.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateAdd.java Tue Feb 9 07:55:30 2010
@@ -24,29 +24,28 @@
import java.util.Date;
import java.util.TimeZone;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
-@Description(name = "date_add", value = "_FUNC_(start_date, num_days) - Returns the date that is num_days"
- + " after start_date.", extended = "start_date is a string in the format 'yyyy-MM-dd HH:mm:ss' or"
+/**
+ * UDFDateAdd.
+ *
+ */
+@Description(name = "date_add",
+ value = "_FUNC_(start_date, num_days) - Returns the date that is num_days after start_date.",
+ extended = "start_date is a string in the format 'yyyy-MM-dd HH:mm:ss' or"
+ " 'yyyy-MM-dd'. num_days is a number. The time part of start_date is "
+ "ignored.\n"
+ "Example:\n "
+ " > SELECT _FUNC_('2009-30-07', 1) FROM src LIMIT 1;\n"
+ " '2009-31-07'")
public class UDFDateAdd extends UDF {
-
- private static Log LOG = LogFactory.getLog(UDFDateAdd.class.getName());
-
private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
- private final Calendar calendar = Calendar.getInstance(TimeZone
- .getTimeZone("UTC"));
+ private final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
- Text result = new Text();
+ private Text result = new Text();
public UDFDateAdd() {
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateDiff.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateDiff.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateDiff.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateDiff.java Tue Feb 9 07:55:30 2010
@@ -22,27 +22,27 @@
import java.text.SimpleDateFormat;
import java.util.TimeZone;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
-@Description(name = "datediff", value = "_FUNC_(date1, date2) - Returns the number of days between date1 "
- + "and date2", extended = "date1 and date2 are strings in the format "
+/**
+ * UDFDateDiff.
+ *
+ */
+@Description(name = "datediff",
+ value = "_FUNC_(date1, date2) - Returns the number of days between date1 and date2",
+ extended = "date1 and date2 are strings in the format "
+ "'yyyy-MM-dd HH:mm:ss' or 'yyyy-MM-dd'. The time parts are ignored."
+ "If date1 is earlier than date2, the result is negative.\n"
+ "Example:\n "
+ " > SELECT _FUNC_('2009-30-07', '2009-31-07') FROM src LIMIT 1;\n"
+ " 1")
public class UDFDateDiff extends UDF {
-
- private static Log LOG = LogFactory.getLog(UDFDateDiff.class.getName());
-
private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
- IntWritable result = new IntWritable();
+ private IntWritable result = new IntWritable();
public UDFDateDiff() {
formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateSub.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateSub.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateSub.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateSub.java Tue Feb 9 07:55:30 2010
@@ -24,29 +24,29 @@
import java.util.Date;
import java.util.TimeZone;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
-@Description(name = "date_sub", value = "_FUNC_(start_date, num_days) - Returns the date that is num_days"
- + " before start_date.", extended = "start_date is a string in the format 'yyyy-MM-dd HH:mm:ss' or"
+/**
+ * UDFDateSub.
+ *
+ */
+@Description(name = "date_sub",
+ value = "_FUNC_(start_date, num_days) - Returns the date that is num_days before start_date.",
+ extended = "start_date is a string in the format 'yyyy-MM-dd HH:mm:ss' or"
+ " 'yyyy-MM-dd'. num_days is a number. The time part of start_date is "
+ "ignored.\n"
+ "Example:\n "
+ " > SELECT _FUNC_('2009-30-07', 1) FROM src LIMIT 1;\n"
+ " '2009-29-07'")
public class UDFDateSub extends UDF {
-
- private static Log LOG = LogFactory.getLog(UDFDateSub.class.getName());
-
private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
private final Calendar calendar = Calendar.getInstance(TimeZone
.getTimeZone("UTC"));
- Text result = new Text();
+ private Text result = new Text();
public UDFDateSub() {
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java Tue Feb 9 07:55:30 2010
@@ -23,25 +23,26 @@
import java.util.Calendar;
import java.util.Date;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
-@Description(name = "day,dayofmonth", value = "_FUNC_(date) - Returns the date of the month of date", extended = "date is a string in the format of 'yyyy-MM-dd HH:mm:ss' or "
+/**
+ * UDFDayOfMonth.
+ *
+ */
+@Description(name = "day,dayofmonth",
+ value = "_FUNC_(date) - Returns the date of the month of date",
+ extended = "date is a string in the format of 'yyyy-MM-dd HH:mm:ss' or "
+ "'yyyy-MM-dd'.\n"
+ "Example:\n "
+ " > SELECT _FUNC_('2009-30-07', 1) FROM src LIMIT 1;\n" + " 30")
public class UDFDayOfMonth extends UDF {
-
- private static Log LOG = LogFactory.getLog(UDFDayOfMonth.class.getName());
-
private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
private final Calendar calendar = Calendar.getInstance();
- IntWritable result = new IntWritable();
+ private IntWritable result = new IntWritable();
public UDFDayOfMonth() {
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFExp.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFExp.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFExp.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFExp.java Tue Feb 9 07:55:30 2010
@@ -18,19 +18,20 @@
package org.apache.hadoop.hive.ql.udf;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
-@Description(name = "exp", value = "_FUNC_(x) - Returns e to the power of x", extended = "Example:\n "
+/**
+ * UDFExp.
+ *
+ */
+@Description(name = "exp",
+ value = "_FUNC_(x) - Returns e to the power of x",
+ extended = "Example:\n "
+ " > SELECT _FUNC_(0) FROM src LIMIT 1;\n" + " 1")
public class UDFExp extends UDF {
-
- private static Log LOG = LogFactory.getLog(UDFExp.class.getName());
-
- DoubleWritable result = new DoubleWritable();
+ private DoubleWritable result = new DoubleWritable();
public UDFExp() {
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFindInSet.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFindInSet.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFindInSet.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFindInSet.java Tue Feb 9 07:55:30 2010
@@ -18,11 +18,15 @@
package org.apache.hadoop.hive.ql.udf;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
+/**
+ * UDFFindInSet.
+ *
+ */
@Description(name = "find_in_set", value = "_FUNC_(str,str_array) - Returns the first occurrence "
+ " of str in str_array where str_array is a comma-delimited string."
+ " Returns null if either argument is null."
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java Tue Feb 9 07:55:30 2010
@@ -18,22 +18,23 @@
package org.apache.hadoop.hive.ql.udf;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.io.LongWritable;
-@Description(name = "floor", value = "_FUNC_(x) - Find the largest integer not greater than x", extended = "Example:\n"
+/**
+ * UDFFloor.
+ *
+ */
+@Description(name = "floor",
+ value = "_FUNC_(x) - Find the largest integer not greater than x",
+ extended = "Example:\n"
+ " > SELECT _FUNC_(-0.1) FROM src LIMIT 1;\n"
+ " -1\n"
+ " > SELECT _FUNC_(5) FROM src LIMIT 1;\n" + " 5")
public class UDFFloor extends UDF {
-
- private static Log LOG = LogFactory.getLog(UDFFloor.class.getName());
-
- LongWritable result = new LongWritable();
+ private LongWritable result = new LongWritable();
public UDFFloor() {
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java Tue Feb 9 07:55:30 2010
@@ -21,31 +21,31 @@
import java.text.SimpleDateFormat;
import java.util.Date;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
-@Description(name = "from_unixtime", value = "_FUNC_(unix_time, format) - returns unix_time in the specified "
- + "format", extended = "Example:\n"
+/**
+ * UDFFromUnixTime.
+ *
+ */
+@Description(name = "from_unixtime",
+ value = "_FUNC_(unix_time, format) - returns unix_time in the specified format",
+ extended = "Example:\n"
+ " > SELECT _FUNC_(0, 'yyyy-MM-dd HH:mm:ss') FROM src LIMIT 1;\n"
+ " '1970-01-01 00:00:00'")
public class UDFFromUnixTime extends UDF {
-
- private static Log LOG = LogFactory.getLog(UDFFromUnixTime.class.getName());
-
private SimpleDateFormat formatter;
- Text result = new Text();
- Text lastFormat = new Text();
+ private Text result = new Text();
+ private Text lastFormat = new Text();
public UDFFromUnixTime() {
}
- Text defaultFormat = new Text("yyyy-MM-dd HH:mm:ss");
+ private Text defaultFormat = new Text("yyyy-MM-dd HH:mm:ss");
public Text evaluate(IntWritable unixtime) {
return evaluate(unixtime, defaultFormat);
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHex.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHex.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHex.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHex.java Tue Feb 9 07:55:30 2010
@@ -18,13 +18,19 @@
package org.apache.hadoop.hive.ql.udf;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
-@Description(name = "hex", value = "_FUNC_(n or str) - Convert the argument to hexadecimal ", extended = "If the argument is a string, returns two hex digits for each "
+/**
+ * UDFHex.
+ *
+ */
+@Description(name = "hex",
+ value = "_FUNC_(n or str) - Convert the argument to hexadecimal ",
+ extended = "If the argument is a string, returns two hex digits for each "
+ "character in the string.\n"
+ "If the argument is a number, returns the hexadecimal representation.\n"
+ "Example:\n"
@@ -34,7 +40,7 @@
+ " '46616365626F6F6B'")
public class UDFHex extends UDF {
private final Text result = new Text();
- byte[] value = new byte[16];
+ private byte[] value = new byte[16];
/**
* Convert num to hex.
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java Tue Feb 9 07:55:30 2010
@@ -23,29 +23,29 @@
import java.util.Calendar;
import java.util.Date;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
-@Description(name = "hour", value = "_FUNC_(date) - Returns the hour of date", extended = "date is a string in the format of 'yyyy-MM-dd HH:mm:ss' or "
+/**
+ * UDFHour.
+ *
+ */
+@Description(name = "hour",
+ value = "_FUNC_(date) - Returns the hour of date",
+ extended = "date is a string in the format of 'yyyy-MM-dd HH:mm:ss' or "
+ "'HH:mm:ss'.\n"
+ "Example:\n "
+ " > SELECT _FUNC_('2009-07-30 12:58:59') FROM src LIMIT 1;\n"
+ " 12\n"
+ " > SELECT _FUNC_('12:58:59') FROM src LIMIT 1;\n" + " 12")
public class UDFHour extends UDF {
-
- private static Log LOG = LogFactory.getLog(UDFHour.class.getName());
-
- private final SimpleDateFormat formatter1 = new SimpleDateFormat(
- "yyyy-MM-dd HH:mm:ss");
+ private final SimpleDateFormat formatter1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
private final SimpleDateFormat formatter2 = new SimpleDateFormat("HH:mm:ss");
private final Calendar calendar = Calendar.getInstance();
- IntWritable result = new IntWritable();
+ private IntWritable result = new IntWritable();
public UDFHour() {
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java Tue Feb 9 07:55:30 2010
@@ -25,16 +25,20 @@
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.Text;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
-@Description(name = "get_json_object", value = "_FUNC_(json_txt, path) - Extract a json object from path ", extended = "Extract json object from a json string based on json path "
+/**
+ * UDFJson.
+ *
+ */
+@Description(name = "get_json_object",
+ value = "_FUNC_(json_txt, path) - Extract a json object from path ",
+ extended = "Extract json object from a json string based on json path "
+ "specified, and return json string of the extracted json object. It "
+ "will return null if the input json string is invalid.\n"
+ "A limited version of JSONPath supported:\n"
@@ -51,7 +55,6 @@
+ " [,] : Union operator\n"
+ " [start:end:step] : array slice operator\n")
public class UDFJson extends UDF {
- private static Log LOG = LogFactory.getLog(UDFJson.class.getName());
private final Pattern patternKey = Pattern.compile("^([a-zA-Z0-9_\\-]+).*");
private final Pattern patternIndex = Pattern.compile("\\[([0-9]+|\\*)\\]");
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLTrim.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLTrim.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLTrim.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLTrim.java Tue Feb 9 07:55:30 2010
@@ -19,15 +19,20 @@
package org.apache.hadoop.hive.ql.udf;
import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.Text;
-@Description(name = "ltrim", value = "_FUNC_(str) - Removes the leading space characters from str ", extended = "Example:\n"
+/**
+ * UDFLTrim.
+ *
+ */
+@Description(name = "ltrim",
+ value = "_FUNC_(str) - Removes the leading space characters from str ",
+ extended = "Example:\n"
+ " > SELECT _FUNC_(' facebook') FROM src LIMIT 1;\n" + " 'facebook'")
public class UDFLTrim extends UDF {
-
- Text result = new Text();
+ private Text result = new Text();
public UDFLTrim() {
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLength.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLength.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLength.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLength.java Tue Feb 9 07:55:30 2010
@@ -17,13 +17,19 @@
*/
package org.apache.hadoop.hive.ql.udf;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
-@Description(name = "length", value = "_FUNC_(str) - Returns the length of str ", extended = "Example:\n"
+/**
+ * UDFLength.
+ *
+ */
+@Description(name = "length",
+ value = "_FUNC_(str) - Returns the length of str ",
+ extended = "Example:\n"
+ " > SELECT _FUNC_('Facebook') FROM src LIMIT 1;\n" + " 8")
public class UDFLength extends UDF {
private final IntWritable result = new IntWritable();
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java Tue Feb 9 07:55:30 2010
@@ -21,19 +21,21 @@
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.Text;
-@Description(name = "like", value = "_FUNC_(str, pattern) - Checks if str matches pattern", extended = "Example:\n"
+/**
+ * UDFLike.
+ *
+ */
+@Description(name = "like",
+ value = "_FUNC_(str, pattern) - Checks if str matches pattern",
+ extended = "Example:\n"
+ " > SELECT a.* FROM srcpart a WHERE a.hr _FUNC_ '%2' LIMIT 1;\n"
+ " 27 val_27 2008-04-08 12")
public class UDFLike extends UDF {
-
- private static Log LOG = LogFactory.getLog(UDFLike.class.getName());
private final Text lastLikePattern = new Text();
private Pattern p = null;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLn.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLn.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLn.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLn.java Tue Feb 9 07:55:30 2010
@@ -18,19 +18,20 @@
package org.apache.hadoop.hive.ql.udf;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
-@Description(name = "ln", value = "_FUNC_(x) - Returns the natural logarithm of x", extended = "Example:\n"
+/**
+ * UDFLn.
+ *
+ */
+@Description(name = "ln",
+ value = "_FUNC_(x) - Returns the natural logarithm of x",
+ extended = "Example:\n"
+ " > SELECT _FUNC_(1) FROM src LIMIT 1;\n" + " 0")
public class UDFLn extends UDF {
-
- private static Log LOG = LogFactory.getLog(UDFLn.class.getName());
-
- DoubleWritable result = new DoubleWritable();
+ private DoubleWritable result = new DoubleWritable();
public UDFLn() {
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog.java Tue Feb 9 07:55:30 2010
@@ -18,19 +18,20 @@
package org.apache.hadoop.hive.ql.udf;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
-@Description(name = "log", value = "_FUNC_([b], x) - Returns the logarithm of x with base b", extended = "Example:\n"
+/**
+ * UDFLog.
+ *
+ */
+@Description(name = "log",
+ value = "_FUNC_([b], x) - Returns the logarithm of x with base b",
+ extended = "Example:\n"
+ " > SELECT _FUNC_(13, 13) FROM src LIMIT 1;\n" + " 1")
public class UDFLog extends UDF {
-
- private static Log LOG = LogFactory.getLog(UDFLog.class.getName());
-
- DoubleWritable result = new DoubleWritable();
+ private DoubleWritable result = new DoubleWritable();
public UDFLog() {
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog10.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog10.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog10.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog10.java Tue Feb 9 07:55:30 2010
@@ -18,21 +18,22 @@
package org.apache.hadoop.hive.ql.udf;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
-@Description(name = "log10", value = "_FUNC_(x) - Returns the logarithm of x with base 10", extended = "Example:\n"
+/**
+ * UDFLog10.
+ *
+ */
+@Description(name = "log10",
+ value = "_FUNC_(x) - Returns the logarithm of x with base 10",
+ extended = "Example:\n"
+ " > SELECT _FUNC_(10) FROM src LIMIT 1;\n" + " 1")
public class UDFLog10 extends UDF {
-
- private static Log LOG = LogFactory.getLog(UDFLog10.class.getName());
-
private static double log10 = Math.log(10.0);
- DoubleWritable result = new DoubleWritable();
+ private DoubleWritable result = new DoubleWritable();
public UDFLog10() {
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog2.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog2.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog2.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog2.java Tue Feb 9 07:55:30 2010
@@ -18,21 +18,22 @@
package org.apache.hadoop.hive.ql.udf;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
-@Description(name = "log2", value = "_FUNC_(x) - Returns the logarithm of x with base 2", extended = "Example:\n"
+/**
+ * UDFLog2.
+ *
+ */
+@Description(name = "log2",
+ value = "_FUNC_(x) - Returns the logarithm of x with base 2",
+ extended = "Example:\n"
+ " > SELECT _FUNC_(2) FROM src LIMIT 1;\n" + " 1")
public class UDFLog2 extends UDF {
-
- private static Log LOG = LogFactory.getLog(UDFLog2.class.getName());
-
private static double log2 = Math.log(2.0);
- DoubleWritable result = new DoubleWritable();
+ private DoubleWritable result = new DoubleWritable();
public UDFLog2() {
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLower.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLower.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLower.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLower.java Tue Feb 9 07:55:30 2010
@@ -18,15 +18,20 @@
package org.apache.hadoop.hive.ql.udf;
-import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.Text;
-@Description(name = "lower,lcase", value = "_FUNC_(str) - Returns str with all characters changed to lowercase", extended = "Example:\n"
+/**
+ * UDFLower.
+ *
+ */
+@Description(name = "lower,lcase",
+ value = "_FUNC_(str) - Returns str with all characters changed to lowercase",
+ extended = "Example:\n"
+ " > SELECT _FUNC_('Facebook') FROM src LIMIT 1;\n" + " 'facebook'")
public class UDFLower extends UDF {
-
- Text t = new Text();
+ private Text t = new Text();
public UDFLower() {
}