You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2010/02/02 02:12:25 UTC
svn commit: r905488 - in /hadoop/hive/trunk: ./
ql/src/java/org/apache/hadoop/hive/ql/
ql/src/java/org/apache/hadoop/hive/ql/exec/
ql/src/java/org/apache/hadoop/hive/ql/metadata/
ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/java/org/apache/hadoo...
Author: namit
Date: Tue Feb 2 01:12:24 2010
New Revision: 905488
URL: http://svn.apache.org/viewvc?rev=905488&view=rev
Log:
HIVE-1119. Serialize all tasks (Zheng Shao via namit)
Modified:
hadoop/hive/trunk/CHANGES.txt
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MsckDesc.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java
Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=905488&r1=905487&r2=905488&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Tue Feb 2 01:12:24 2010
@@ -107,6 +107,8 @@
HIVE-1092. Add job counter for conditional tasks
(He Yongqiang via namit)
+ HIVE-1119. Serialize all tasks (Zheng Shao via namit)
+
Release 0.5.0 - Unreleased
INCOMPATIBLE CHANGES
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java?rev=905488&r1=905487&r2=905488&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java Tue Feb 2 01:12:24 2010
@@ -60,7 +60,7 @@
static final private Log LOG = LogFactory.getLog(QueryPlan.class.getName());
- private final String queryString;
+ private String queryString;
private ArrayList<Task<? extends Serializable>> rootTasks;
private FetchTask fetchTask;
@@ -69,12 +69,15 @@
private HashMap<String, String> idToTableNameMap;
- private final String queryId;
- private final org.apache.hadoop.hive.ql.plan.api.Query query;
- private final HashMap<String, HashMap<String, Long>> counters;
- private final HashSet<String> done;
- private final HashSet<String> started;
+ private String queryId;
+ private org.apache.hadoop.hive.ql.plan.api.Query query;
+ private HashMap<String, HashMap<String, Long>> counters;
+ private HashSet<String> done;
+ private HashSet<String> started;
+ public QueryPlan() {
+ }
+
public QueryPlan(String queryString, BaseSemanticAnalyzer sem) {
this.queryString = queryString;
@@ -612,11 +615,11 @@
done.add(queryId);
}
- public Set<String> getStarted() {
+ public HashSet<String> getStarted() {
return started;
}
- public Set<String> getDone() {
+ public HashSet<String> getDone() {
return done;
}
@@ -660,4 +663,40 @@
this.idToTableNameMap = idToTableNameMap;
}
+ public String getQueryString() {
+ return queryString;
+ }
+
+ public void setQueryString(String queryString) {
+ this.queryString = queryString;
+ }
+
+ public org.apache.hadoop.hive.ql.plan.api.Query getQuery() {
+ return query;
+ }
+
+ public void setQuery(org.apache.hadoop.hive.ql.plan.api.Query query) {
+ this.query = query;
+ }
+
+ public HashMap<String, HashMap<String, Long>> getCounters() {
+ return counters;
+ }
+
+ public void setCounters(HashMap<String, HashMap<String, Long>> counters) {
+ this.counters = counters;
+ }
+
+ public void setQueryId(String queryId) {
+ this.queryId = queryId;
+ }
+
+ public void setDone(HashSet<String> done) {
+ this.done = done;
+ }
+
+ public void setStarted(HashSet<String> started) {
+ this.started = started;
+ }
+
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=905488&r1=905487&r2=905488&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Tue Feb 2 01:12:24 2010
@@ -260,7 +260,7 @@
try {
HiveMetaStoreChecker checker = new HiveMetaStoreChecker(db);
checker.checkMetastore(MetaStoreUtils.DEFAULT_DATABASE_NAME, msckDesc
- .getTableName(), msckDesc.getPartitionSpec(), result);
+ .getTableName(), msckDesc.getPartSpecs(), result);
if (msckDesc.isRepairPartitions()) {
Table table = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
msckDesc.getTableName());
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java?rev=905488&r1=905487&r2=905488&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java Tue Feb 2 01:12:24 2010
@@ -31,6 +31,7 @@
import java.util.Set;
import java.util.Map.Entry;
+import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.plan.Explain;
import org.apache.hadoop.hive.ql.plan.ExplainWork;
import org.apache.hadoop.hive.ql.plan.api.StageType;
@@ -51,8 +52,8 @@
public int execute() {
try {
- OutputStream outS = work.getResFile().getFileSystem(conf).create(
- work.getResFile());
+ Path resFile = new Path(work.getResFile());
+ OutputStream outS = resFile.getFileSystem(conf).create(resFile);
PrintStream out = new PrintStream(outS);
// Print out the parse AST
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java?rev=905488&r1=905487&r2=905488&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java Tue Feb 2 01:12:24 2010
@@ -59,7 +59,7 @@
* Most likely filesystem related
*/
public void checkMetastore(String dbName, String tableName,
- List<Map<String, String>> partitions, CheckResult result)
+ List<? extends Map<String, String>> partitions, CheckResult result)
throws HiveException, IOException {
if (dbName == null || "".equalsIgnoreCase(dbName)) {
@@ -161,7 +161,7 @@
* Failed to get required information from the metastore.
*/
void checkTable(String dbName, String tableName,
- List<Map<String, String>> partitions, CheckResult result)
+ List<? extends Map<String, String>> partitions, CheckResult result)
throws MetaException, IOException, HiveException {
Table table = null;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=905488&r1=905487&r2=905488&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Tue Feb 2 01:12:24 2010
@@ -237,7 +237,7 @@
String tableName = unescapeIdentifier(ast.getChild(0).getText());
ASTNode buckets = (ASTNode) ast.getChild(1);
List<String> bucketCols = getColumnNames((ASTNode) buckets.getChild(0));
- List<Order> sortCols = null;
+ List<Order> sortCols = new ArrayList<Order>();
int numBuckets = -1;
if (buckets.getChildCount() == 2) {
numBuckets = (Integer.valueOf(buckets.getChild(1).getText())).intValue();
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java?rev=905488&r1=905487&r2=905488&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java Tue Feb 2 01:12:24 2010
@@ -60,7 +60,7 @@
tasks.add(fetchTask);
}
- rootTasks.add(TaskFactory.get(new ExplainWork(ctx.getResFile(), tasks,
- ((ASTNode) ast.getChild(0)).toStringTree(), extended), conf));
+ rootTasks.add(TaskFactory.get(new ExplainWork(ctx.getResFile().toString(),
+ tasks, ((ASTNode) ast.getChild(0)).toStringTree(), extended), conf));
}
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=905488&r1=905487&r2=905488&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Tue Feb 2 01:12:24 2010
@@ -3104,7 +3104,7 @@
// update the create table descriptor with the resulting schema.
if (tblDesc != null) {
- tblDesc.setCols(field_schemas);
+ tblDesc.setCols(new ArrayList<FieldSchema>(field_schemas));
}
if (!ctx.isMRTmpFileURI(destStr)) {
@@ -5884,10 +5884,10 @@
throws SemanticException {
String tableName = unescapeIdentifier(ast.getChild(0).getText());
String likeTableName = null;
- List<FieldSchema> cols = null;
- List<FieldSchema> partCols = null;
- List<String> bucketCols = null;
- List<Order> sortCols = null;
+ List<FieldSchema> cols = new ArrayList<FieldSchema>();
+ List<FieldSchema> partCols = new ArrayList<FieldSchema>();
+ List<String> bucketCols = new ArrayList<String>();
+ List<Order> sortCols = new ArrayList<Order>();
int numBuckets = -1;
String fieldDelim = null;
String fieldEscape = null;
@@ -5899,7 +5899,7 @@
String outputFormat = null;
String location = null;
String serde = null;
- Map<String, String> mapProp = null;
+ Map<String, String> mapProp = new HashMap<String, String>();
boolean ifNotExists = false;
boolean isExt = false;
ASTNode selectStmt = null;
@@ -5935,7 +5935,7 @@
throw new SemanticException(ErrorMsg.CTAS_CTLT_COEXISTENCE
.getMsg());
}
- if (cols != null) {
+ if (cols.size() != 0) {
throw new SemanticException(ErrorMsg.CTLT_COLLST_COEXISTENCE
.getMsg());
}
@@ -5947,11 +5947,11 @@
if (command_type == CTLT) {
throw new SemanticException(ErrorMsg.CTAS_CTLT_COEXISTENCE.getMsg());
}
- if (cols != null) {
+ if (cols.size() != 0) {
throw new SemanticException(ErrorMsg.CTAS_COLLST_COEXISTENCE.getMsg());
}
// TODO: support partition for CTAS?
- if (partCols != null || bucketCols != null) {
+ if (partCols.size() != 0 || bucketCols.size() != 0) {
throw new SemanticException(ErrorMsg.CTAS_PARCOL_COEXISTENCE.getMsg());
}
if (isExt) {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java?rev=905488&r1=905487&r2=905488&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java Tue Feb 2 01:12:24 2010
@@ -19,6 +19,8 @@
package org.apache.hadoop.hive.ql.plan;
import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -37,14 +39,14 @@
alterTableTypes op;
String oldName;
String newName;
- List<FieldSchema> newCols;
+ ArrayList<FieldSchema> newCols;
String serdeName;
- Map<String, String> props;
+ HashMap<String, String> props;
String inputFormat;
String outputFormat;
int numberBuckets;
- List<String> bucketColumns;
- List<Order> sortColumns;
+ ArrayList<String> bucketColumns;
+ ArrayList<Order> sortColumns;
String oldColName;
String newColName;
@@ -53,6 +55,9 @@
boolean first;
String afterCol;
+ public AlterTableDesc() {
+ }
+
/**
* @param tblName
* table name
@@ -98,7 +103,7 @@
alterTableTypes alterType) {
op = alterType;
oldName = name;
- this.newCols = newCols;
+ this.newCols = new ArrayList<FieldSchema>(newCols);
}
/**
@@ -133,10 +138,29 @@
oldName = tableName;
op = alterTableTypes.ADDCLUSTERSORTCOLUMN;
numberBuckets = numBuckets;
- bucketColumns = bucketCols;
- sortColumns = sortCols;
+ bucketColumns = new ArrayList<String>(bucketCols);
+ sortColumns = new ArrayList<Order>(sortCols);
}
+ @Explain(displayName = "new columns")
+ public List<String> getNewColsString() {
+ return Utilities.getFieldSchemaString(getNewCols());
+ }
+
+ @Explain(displayName = "type")
+ public String getAlterTableTypeString() {
+ switch (op) {
+ case RENAME:
+ return "rename";
+ case ADDCOLS:
+ return "add columns";
+ case REPLACECOLS:
+ return "replace columns";
+ }
+
+ return "unknown";
+ }
+
/**
* @return the old name of the table
*/
@@ -176,19 +200,6 @@
return op;
}
- @Explain(displayName = "type")
- public String getAlterTableTypeString() {
- switch (op) {
- case RENAME:
- return "rename";
- case ADDCOLS:
- return "add columns";
- case REPLACECOLS:
- return "replace columns";
- }
-
- return "unknown";
- }
/**
* @param op
@@ -201,20 +212,15 @@
/**
* @return the newCols
*/
- public List<FieldSchema> getNewCols() {
+ public ArrayList<FieldSchema> getNewCols() {
return newCols;
}
- @Explain(displayName = "new columns")
- public List<String> getNewColsString() {
- return Utilities.getFieldSchemaString(getNewCols());
- }
-
/**
* @param newCols
* the newCols to set
*/
- public void setNewCols(List<FieldSchema> newCols) {
+ public void setNewCols(ArrayList<FieldSchema> newCols) {
this.newCols = newCols;
}
@@ -238,7 +244,7 @@
* @return the props
*/
@Explain(displayName = "properties")
- public Map<String, String> getProps() {
+ public HashMap<String, String> getProps() {
return props;
}
@@ -246,7 +252,7 @@
* @param props
* the props to set
*/
- public void setProps(Map<String, String> props) {
+ public void setProps(HashMap<String, String> props) {
this.props = props;
}
@@ -300,7 +306,7 @@
/**
* @return the bucket columns
*/
- public List<String> getBucketColumns() {
+ public ArrayList<String> getBucketColumns() {
return bucketColumns;
}
@@ -308,14 +314,14 @@
* @param bucketColumns
* the bucket columns to set
*/
- public void setBucketColumns(List<String> bucketColumns) {
+ public void setBucketColumns(ArrayList<String> bucketColumns) {
this.bucketColumns = bucketColumns;
}
/**
* @return the sort columns
*/
- public List<Order> getSortColumns() {
+ public ArrayList<Order> getSortColumns() {
return sortColumns;
}
@@ -323,7 +329,7 @@
* @param sortColumns
* the sort columns to set
*/
- public void setSortColumns(List<Order> sortColumns) {
+ public void setSortColumns(ArrayList<Order> sortColumns) {
this.sortColumns = sortColumns;
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java?rev=905488&r1=905487&r2=905488&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java Tue Feb 2 01:12:24 2010
@@ -19,6 +19,8 @@
package org.apache.hadoop.hive.ql.plan;
import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -31,10 +33,10 @@
private static final long serialVersionUID = 1L;
String tableName;
boolean isExternal;
- List<FieldSchema> cols;
- List<FieldSchema> partCols;
- List<String> bucketCols;
- List<Order> sortCols;
+ ArrayList<FieldSchema> cols;
+ ArrayList<FieldSchema> partCols;
+ ArrayList<String> bucketCols;
+ ArrayList<Order> sortCols;
int numBuckets;
String fieldDelim;
String fieldEscape;
@@ -46,9 +48,12 @@
String outputFormat;
String location;
String serName;
- Map<String, String> mapProp;
+ HashMap<String, String> mapProp;
boolean ifNotExists;
+ public CreateTableDesc() {
+ }
+
public CreateTableDesc(String tableName, boolean isExternal,
List<FieldSchema> cols, List<FieldSchema> partCols,
List<String> bucketCols, List<Order> sortCols, int numBuckets,
@@ -58,10 +63,10 @@
Map<String, String> mapProp, boolean ifNotExists) {
this.tableName = tableName;
this.isExternal = isExternal;
- this.bucketCols = bucketCols;
- this.sortCols = sortCols;
+ this.bucketCols = new ArrayList<String>(bucketCols);
+ this.sortCols = new ArrayList<Order>(sortCols);
this.collItemDelim = collItemDelim;
- this.cols = cols;
+ this.cols = new ArrayList<FieldSchema>(cols);
this.comment = comment;
this.fieldDelim = fieldDelim;
this.fieldEscape = fieldEscape;
@@ -71,12 +76,25 @@
this.location = location;
this.mapKeyDelim = mapKeyDelim;
this.numBuckets = numBuckets;
- this.partCols = partCols;
+ this.partCols = new ArrayList<FieldSchema>(partCols);
this.serName = serName;
- this.mapProp = mapProp;
+ this.mapProp = new HashMap<String, String>(mapProp);
this.ifNotExists = ifNotExists;
}
+
+ @Explain(displayName = "columns")
+ public List<String> getColsString() {
+ return Utilities.getFieldSchemaString(getCols());
+ }
+
+ @Explain(displayName = "partition columns")
+ public List<String> getPartColsString() {
+ return Utilities.getFieldSchemaString(getPartCols());
+ }
+
+
+
@Explain(displayName = "if not exists")
public boolean getIfNotExists() {
return ifNotExists;
@@ -95,38 +113,28 @@
this.tableName = tableName;
}
- public List<FieldSchema> getCols() {
+ public ArrayList<FieldSchema> getCols() {
return cols;
}
- @Explain(displayName = "columns")
- public List<String> getColsString() {
- return Utilities.getFieldSchemaString(getCols());
- }
-
- public void setCols(List<FieldSchema> cols) {
+ public void setCols(ArrayList<FieldSchema> cols) {
this.cols = cols;
}
- public List<FieldSchema> getPartCols() {
+ public ArrayList<FieldSchema> getPartCols() {
return partCols;
}
- @Explain(displayName = "partition columns")
- public List<String> getPartColsString() {
- return Utilities.getFieldSchemaString(getPartCols());
- }
-
- public void setPartCols(List<FieldSchema> partCols) {
+ public void setPartCols(ArrayList<FieldSchema> partCols) {
this.partCols = partCols;
}
@Explain(displayName = "bucket columns")
- public List<String> getBucketCols() {
+ public ArrayList<String> getBucketCols() {
return bucketCols;
}
- public void setBucketCols(List<String> bucketCols) {
+ public void setBucketCols(ArrayList<String> bucketCols) {
this.bucketCols = bucketCols;
}
@@ -233,7 +241,7 @@
* @return the sortCols
*/
@Explain(displayName = "sort columns")
- public List<Order> getSortCols() {
+ public ArrayList<Order> getSortCols() {
return sortCols;
}
@@ -241,7 +249,7 @@
* @param sortCols
* the sortCols to set
*/
- public void setSortCols(List<Order> sortCols) {
+ public void setSortCols(ArrayList<Order> sortCols) {
this.sortCols = sortCols;
}
@@ -265,7 +273,7 @@
* @return the serDe properties
*/
@Explain(displayName = "serde properties")
- public Map<String, String> getMapProp() {
+ public HashMap<String, String> getMapProp() {
return mapProp;
}
@@ -273,7 +281,7 @@
* @param mapProp
* the map properties to set
*/
- public void setMapProp(Map<String, String> mapProp) {
+ public void setMapProp(HashMap<String, String> mapProp) {
this.mapProp = mapProp;
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java?rev=905488&r1=905487&r2=905488&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java Tue Feb 2 01:12:24 2010
@@ -29,6 +29,9 @@
boolean ifNotExists;
String likeTableName;
+ public CreateTableLikeDesc() {
+ }
+
public CreateTableLikeDesc(String tableName, boolean isExternal,
String location, boolean ifNotExists, String likeTableName) {
this.tableName = tableName;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java?rev=905488&r1=905487&r2=905488&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java Tue Feb 2 01:12:24 2010
@@ -26,6 +26,7 @@
public class DDLWork implements Serializable {
private static final long serialVersionUID = 1L;
+
private CreateTableDesc createTblDesc;
private CreateTableLikeDesc createTblLikeDesc;
private CreateViewDesc createVwDesc;
@@ -395,6 +396,18 @@
this.showTblStatusDesc = showTblStatusDesc;
}
+ public CreateViewDesc getCreateVwDesc() {
+ return createVwDesc;
+ }
+
+ public void setCreateVwDesc(CreateViewDesc createVwDesc) {
+ this.createVwDesc = createVwDesc;
+ }
+
+ public void setDescFunctionDesc(DescFunctionDesc descFunctionDesc) {
+ this.descFunctionDesc = descFunctionDesc;
+ }
+
public Set<ReadEntity> getInputs() {
return inputs;
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java?rev=905488&r1=905487&r2=905488&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java Tue Feb 2 01:12:24 2010
@@ -40,11 +40,11 @@
/**
* table name for the result of show tables
*/
- private final String table = "show";
+ private static final String table = "show";
/**
* thrift ddl for the result of show tables
*/
- private final String schema = "tab_name#string";
+ private static final String schema = "tab_name#string";
public String getTable() {
return table;
@@ -54,6 +54,9 @@
return schema;
}
+ public DescFunctionDesc() {
+ }
+
/**
* @param resFile
*/
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java?rev=905488&r1=905487&r2=905488&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java Tue Feb 2 01:12:24 2010
@@ -25,6 +25,10 @@
@Explain(displayName = "Describe Table")
public class DescTableDesc extends DDLDesc implements Serializable {
+ public void setPartSpec(HashMap<String, String> partSpec) {
+ this.partSpec = partSpec;
+ }
+
private static final long serialVersionUID = 1L;
String tableName;
@@ -34,20 +38,15 @@
/**
* table name for the result of describe table
*/
- private final String table = "describe";
+ private static final String table = "describe";
/**
* thrift ddl for the result of describe table
*/
- private final String schema = "col_name,data_type,comment#string:string:string";
+ private static final String schema = "col_name,data_type,comment#string:string:string";
- public String getTable() {
- return table;
- }
-
- public String getSchema() {
- return schema;
+ public DescTableDesc() {
}
-
+
/**
* @param isExt
* @param partSpec
@@ -62,6 +61,14 @@
this.tableName = tableName;
}
+ public String getTable() {
+ return table;
+ }
+
+ public String getSchema() {
+ return schema;
+ }
+
/**
* @return the isExt
*/
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java?rev=905488&r1=905487&r2=905488&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java Tue Feb 2 01:12:24 2010
@@ -19,6 +19,9 @@
package org.apache.hadoop.hive.ql.plan;
import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@@ -27,9 +30,13 @@
private static final long serialVersionUID = 1L;
String tableName;
- List<Map<String, String>> partSpecs;
+ ArrayList<LinkedHashMap<String, String>> partSpecs;
boolean expectView;
+
+ public DropTableDesc() {
+ }
+
/**
* @param tableName
*/
@@ -39,9 +46,12 @@
this.expectView = expectView;
}
- public DropTableDesc(String tableName, List<Map<String, String>> partSpecs) {
+ public DropTableDesc(String tableName, List<? extends Map<String, String>> partSpecs) {
this.tableName = tableName;
- this.partSpecs = partSpecs;
+ this.partSpecs = new ArrayList<LinkedHashMap<String, String>>(partSpecs.size());
+ for (int i = 0; i < partSpecs.size(); i++) {
+ this.partSpecs.add(new LinkedHashMap<String, String>(partSpecs.get(i)));
+ }
expectView = false;
}
@@ -64,7 +74,7 @@
/**
* @return the partSpecs
*/
- public List<Map<String, String>> getPartSpecs() {
+ public ArrayList<LinkedHashMap<String, String>> getPartSpecs() {
return partSpecs;
}
@@ -72,7 +82,7 @@
* @param partSpecs
* the partSpecs to set
*/
- public void setPartSpecs(List<Map<String, String>> partSpecs) {
+ public void setPartSpecs(ArrayList<LinkedHashMap<String, String>> partSpecs) {
this.partSpecs = partSpecs;
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java?rev=905488&r1=905487&r2=905488&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java Tue Feb 2 01:12:24 2010
@@ -19,6 +19,7 @@
package org.apache.hadoop.hive.ql.plan;
import java.io.Serializable;
+import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.fs.Path;
@@ -27,36 +28,36 @@
public class ExplainWork implements Serializable {
private static final long serialVersionUID = 1L;
- private Path resFile;
- private List<Task<? extends Serializable>> rootTasks;
+ private String resFile;
+ private ArrayList<Task<? extends Serializable>> rootTasks;
private String astStringTree;
boolean extended;
public ExplainWork() {
}
- public ExplainWork(Path resFile,
+ public ExplainWork(String resFile,
List<Task<? extends Serializable>> rootTasks, String astStringTree,
boolean extended) {
this.resFile = resFile;
- this.rootTasks = rootTasks;
+ this.rootTasks = new ArrayList<Task<? extends Serializable>>(rootTasks);
this.astStringTree = astStringTree;
this.extended = extended;
}
- public Path getResFile() {
+ public String getResFile() {
return resFile;
}
- public void setResFile(Path resFile) {
+ public void setResFile(String resFile) {
this.resFile = resFile;
}
- public List<Task<? extends Serializable>> getRootTasks() {
+ public ArrayList<Task<? extends Serializable>> getRootTasks() {
return rootTasks;
}
- public void setRootTasks(List<Task<? extends Serializable>> rootTasks) {
+ public void setRootTasks(ArrayList<Task<? extends Serializable>> rootTasks) {
this.rootTasks = rootTasks;
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java?rev=905488&r1=905487&r2=905488&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java Tue Feb 2 01:12:24 2010
@@ -31,8 +31,8 @@
private String tblDir;
private TableDesc tblDesc;
- private List<String> partDir;
- private List<PartitionDesc> partDesc;
+ private ArrayList<String> partDir;
+ private ArrayList<PartitionDesc> partDesc;
private int limit;
@@ -59,8 +59,8 @@
}
public FetchWork(List<String> partDir, List<PartitionDesc> partDesc, int limit) {
- this.partDir = partDir;
- this.partDesc = partDesc;
+ this.partDir = new ArrayList<String>(partDir);
+ this.partDesc = new ArrayList<PartitionDesc>(partDesc);
this.limit = limit;
}
@@ -112,7 +112,7 @@
/**
* @return the partDir
*/
- public List<String> getPartDir() {
+ public ArrayList<String> getPartDir() {
return partDir;
}
@@ -150,14 +150,14 @@
* @param partDir
* the partDir to set
*/
- public void setPartDir(List<String> partDir) {
+ public void setPartDir(ArrayList<String> partDir) {
this.partDir = partDir;
}
/**
* @return the partDesc
*/
- public List<PartitionDesc> getPartDesc() {
+ public ArrayList<PartitionDesc> getPartDesc() {
return partDesc;
}
@@ -165,7 +165,7 @@
* @param partDesc
* the partDesc to set
*/
- public void setPartDesc(List<PartitionDesc> partDesc) {
+ public void setPartDesc(ArrayList<PartitionDesc> partDesc) {
this.partDesc = partDesc;
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MsckDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MsckDesc.java?rev=905488&r1=905487&r2=905488&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MsckDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MsckDesc.java Tue Feb 2 01:12:24 2010
@@ -1,14 +1,17 @@
package org.apache.hadoop.hive.ql.plan;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.fs.Path;
-public class MsckDesc {
+public class MsckDesc extends DDLWork implements Serializable {
private String tableName;
- private List<Map<String, String>> partitionSpec;
+ private ArrayList<LinkedHashMap<String, String>> partSpecs;
private Path resFile;
private boolean repairPartitions;
@@ -24,11 +27,14 @@
* @param repairPartitions
* remove stale / add new partitions found during the check
*/
- public MsckDesc(String tableName, List<Map<String, String>> partSpecs,
+ public MsckDesc(String tableName, List<? extends Map<String, String>> partSpecs,
Path resFile, boolean repairPartitions) {
super();
this.tableName = tableName;
- partitionSpec = partSpecs;
+ this.partSpecs = new ArrayList<LinkedHashMap<String, String>>(partSpecs.size());
+ for (int i = 0; i < partSpecs.size(); i++) {
+ this.partSpecs.add(new LinkedHashMap<String, String>(partSpecs.get(i)));
+ }
this.resFile = resFile;
this.repairPartitions = repairPartitions;
}
@@ -51,16 +57,16 @@
/**
* @return partitions to check.
*/
- public List<Map<String, String>> getPartitionSpec() {
- return partitionSpec;
+ public ArrayList<LinkedHashMap<String, String>> getPartSpecs() {
+ return partSpecs;
}
/**
* @param partitionSpec
* partitions to check.
*/
- public void setPartitionSpec(List<Map<String, String>> partitionSpec) {
- this.partitionSpec = partitionSpec;
+ public void setPartSpecs(ArrayList<LinkedHashMap<String, String>> partSpecs) {
+ this.partSpecs = partSpecs;
}
/**
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java?rev=905488&r1=905487&r2=905488&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java Tue Feb 2 01:12:24 2010
@@ -30,11 +30,11 @@
/**
* table name for the result of show tables
*/
- private final String table = "show";
+ private static final String table = "show";
/**
* thrift ddl for the result of show tables
*/
- private final String schema = "tab_name#string";
+ private static final String schema = "tab_name#string";
public String getTable() {
return table;
@@ -43,7 +43,10 @@
public String getSchema() {
return schema;
}
-
+
+ public ShowFunctionsDesc() {
+ }
+
/**
* @param resFile
*/
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java?rev=905488&r1=905487&r2=905488&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java Tue Feb 2 01:12:24 2010
@@ -30,11 +30,11 @@
/**
* table name for the result of show tables
*/
- private final String table = "showpartitions";
+ private static final String table = "showpartitions";
/**
* thrift ddl for the result of show tables
*/
- private final String schema = "partition#string";
+ private static final String schema = "partition#string";
public String getTable() {
return table;
@@ -44,6 +44,9 @@
return schema;
}
+ public ShowPartitionsDesc() {
+ }
+
/**
* @param tabName
* Name of the table whose partitions need to be listed
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java?rev=905488&r1=905487&r2=905488&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java Tue Feb 2 01:12:24 2010
@@ -34,11 +34,11 @@
/**
* table name for the result of show tables
*/
- private final String table = "show_tablestatus";
+ private static final String table = "show_tablestatus";
/**
* thrift ddl for the result of show tables
*/
- private final String schema = "tab_name#string";
+ private static final String schema = "tab_name#string";
public String getTable() {
return table;
@@ -139,7 +139,7 @@
* @param partSpec
* the partSpec to set
*/
- public void setPartSpecs(HashMap<String, String> partSpec) {
+ public void setPartSpec(HashMap<String, String> partSpec) {
this.partSpec = partSpec;
}
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java?rev=905488&r1=905487&r2=905488&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java Tue Feb 2 01:12:24 2010
@@ -30,11 +30,11 @@
/**
* table name for the result of show tables
*/
- private final String table = "show";
+ private static final String table = "show";
/**
* thrift ddl for the result of show tables
*/
- private final String schema = "tab_name#string";
+ private static final String schema = "tab_name#string";
public String getTable() {
return table;
@@ -44,6 +44,9 @@
return schema;
}
+ public ShowTablesDesc() {
+ }
+
/**
* @param resFile
*/