You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by at...@apache.org on 2009/06/03 22:41:01 UTC
svn commit: r781536 - in /hadoop/hive/trunk: ./
metastore/src/java/org/apache/hadoop/hive/metastore/
ql/src/java/org/apache/hadoop/hive/ql/exec/
ql/src/java/org/apache/hadoop/hive/ql/metadata/
ql/src/test/org/apache/hadoop/hive/ql/ ql/src/test/org/apac...
Author: athusoo
Date: Wed Jun 3 20:40:59 2009
New Revision: 781536
URL: http://svn.apache.org/viewvc?rev=781536&view=rev
Log:
HIVE-443. Remove deprecated functions from metastore.
(Prasad Chaaka via athusoo)
Modified:
hadoop/hive/trunk/CHANGES.txt
hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=781536&r1=781535&r2=781536&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Wed Jun 3 20:40:59 2009
@@ -209,6 +209,9 @@
HIVE-534. Fix in the cli so that it does not ignore the first
newline character. (Raghotham Murthy via athusoo)
+ HIVE-443. Remove deprecated functions from metastore.
+ (Prasad Chaaka via athusoo)
+
Release 0.3.1 - Unreleased
INCOMPATIBLE CHANGES
Modified: hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java?rev=781536&r1=781535&r2=781536&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (original)
+++ hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java Wed Jun 3 20:40:59 2009
@@ -21,7 +21,6 @@
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;
@@ -42,18 +41,16 @@
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.serde2.Deserializer;
-import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.SerDeUtils;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.util.StringUtils;
public class MetaStoreUtils {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java?rev=781536&r1=781535&r2=781536&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java Wed Jun 3 20:40:59 2009
@@ -18,26 +18,22 @@
package org.apache.hadoop.hive.ql.exec;
-import java.io.Serializable;
import java.io.IOException;
+import java.io.Serializable;
import java.security.AccessControlException;
import java.util.ArrayList;
import java.util.Arrays;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.loadFileDesc;
import org.apache.hadoop.hive.ql.plan.loadTableDesc;
import org.apache.hadoop.hive.ql.plan.moveWork;
-import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
-import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.util.StringUtils;
/**
@@ -112,7 +108,7 @@
FileSystem fs;
try {
fs = FileSystem.get
- (db.getTable(tbd.getTable().getTableName()).getDataLocation(),conf);
+ (db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tbd.getTable().getTableName()).getDataLocation(),conf);
dirs = fs.globStatus(new Path(tbd.getSourceDir()));
files = new ArrayList<FileStatus>();
for (int i=0; (dirs != null && i<dirs.length); i++) {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java?rev=781536&r1=781535&r2=781536&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java Wed Jun 3 20:40:59 2009
@@ -259,17 +259,6 @@
}
/**
- * Drops table along with the data in it. If the table doesn't exist then it is a no-op
- * @param tableName
- * @throws HiveException
- * @deprecated Use {@link #dropTable(String, String)} instead
- */
- public void dropTable(String tableName) throws HiveException {
- dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, true, true);
- }
-
-
- /**
* Drops table along with the data in it. If the table doesn't exist
* then it is a no-op
* @param dbName database where the table lives
@@ -284,22 +273,6 @@
* Drops the table.
* @param tableName
* @param deleteData deletes the underlying data along with metadata
- * @param ignoreUnknownTab an exception if thrown if this is falser
- * and table doesn't exist
- * @throws HiveException
- * @deprecated Use {@link #dropTable(String, String, boolean, boolean)} instead
- */
- public void dropTable(String tableName, boolean deleteData,
- boolean ignoreUnknownTab) throws HiveException {
-
- dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName,
- deleteData, ignoreUnknownTab);
- }
-
- /**
- * Drops the table.
- * @param tableName
- * @param deleteData deletes the underlying data along with metadata
* @param ignoreUnknownTab an exception if thrown if this is falser and
* table doesn't exist
* @throws HiveException
@@ -324,18 +297,6 @@
/**
* Returns metadata of the table.
- * @param tableName the name of the table
- * @return the table
- * @exception HiveException if there's an internal error or if the
- * table doesn't exist
- * @deprecated Use {@link #getTable(String, String)} instead
- */
- public Table getTable(final String tableName) throws HiveException {
- return this.getTable(tableName, true);
- }
-
- /**
- * Returns metadata of the table.
* @param dbName the name of the database
* @param tableName the name of the table
* @return the table
@@ -350,22 +311,6 @@
/**
* Returns metadata of the table
- * @param tableName the name of the table
- * @param throwException controls whether an exception is
- * thrown or a null returned
- * @return the table or if something false and
- * throwException is false a null value.
- * @throws HiveException
- * @deprecated Use {@link #getTable(String, String, boolean)} instead
- */
- public Table getTable(final String tableName, boolean throwException)
- throws HiveException {
- return getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName,
- throwException);
- }
-
- /**
- * Returns metadata of the table
* @param dbName the name of the database
* @param tableName the name of the table
* @param throwException controls whether an exception is thrown
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=781536&r1=781535&r2=781536&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Wed Jun 3 20:40:59 2009
@@ -44,6 +44,7 @@
import org.apache.hadoop.hive.cli.CliDriver;
import org.apache.hadoop.hive.cli.CliSessionState;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.exec.Utilities.StreamPrinter;
@@ -264,7 +265,7 @@
"srcpart", "srcbucket", "dest1", "dest2",
"dest3", "dest4", "dest4_sequencefile",
"dest_j1", "dest_j2", "dest_g1", "dest_g2"}) {
- db.dropTable(s);
+ db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, s);
}
for(String s: new String [] {"dest4.out", "union.out"}) {
deleteDirectory(new File(warehousePath, s));
@@ -293,12 +294,10 @@
part_cols.add("hr");
db.createTable("srcpart", cols, part_cols, TextInputFormat.class, IgnoreKeyTextOutputFormat.class);
srcTables.add("srcpart");
- Table srcpart = db.getTable("srcpart");
Path fpath;
Path newfpath;
HashMap<String, String> part_spec = new HashMap<String, String>();
- String loadCmd;
for (String ds: new String[]{"2008-04-08", "2008-04-09"}) {
for (String hr: new String[]{"11", "12"}) {
part_spec.clear();
@@ -413,7 +412,7 @@
db.createTable("dest2", cols, null, TextInputFormat.class, IgnoreKeyTextOutputFormat.class);
db.createTable("dest3", cols, part_cols, TextInputFormat.class, IgnoreKeyTextOutputFormat.class);
- Table dest3 = db.getTable("dest3");
+ Table dest3 = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "dest3");
HashMap<String, String> part_spec = new HashMap<String, String>();
part_spec.put("ds", "2008-04-08");
@@ -485,7 +484,7 @@
drv.run("FROM dest4_sequencefile INSERT OVERWRITE TABLE dest4 SELECT dest4_sequencefile.*");
// Drop dest4_sequencefile
- db.dropTable("dest4_sequencefile", true, true);
+ db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "dest4_sequencefile", true, true);
}
public int checkNegativeResults(String tname, Exception e) throws Exception {
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java?rev=781536&r1=781535&r2=781536&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java Wed Jun 3 20:40:59 2009
@@ -18,32 +18,39 @@
package org.apache.hadoop.hive.ql.exec;
-import junit.framework.TestCase;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.util.ArrayList;
+import java.util.LinkedList;
-import java.io.*;
-import java.util.*;
+import junit.framework.TestCase;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.mapred.TextInputFormat;
-
-import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.metadata.*;
-import org.apache.hadoop.hive.ql.exec.ExecDriver;
-import org.apache.hadoop.hive.ql.exec.Operator;
-import org.apache.hadoop.hive.ql.exec.OperatorFactory;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
import org.apache.hadoop.hive.ql.exec.Utilities.StreamPrinter;
-import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.plan.PlanUtils.ExpressionTypes;
-import org.apache.hadoop.hive.ql.plan.*;
+import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.plan.PlanUtils;
+import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.exprNodeFieldDesc;
+import org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc;
+import org.apache.hadoop.hive.ql.plan.extractDesc;
+import org.apache.hadoop.hive.ql.plan.fileSinkDesc;
+import org.apache.hadoop.hive.ql.plan.filterDesc;
+import org.apache.hadoop.hive.ql.plan.mapredWork;
+import org.apache.hadoop.hive.ql.plan.reduceSinkDesc;
+import org.apache.hadoop.hive.ql.plan.scriptDesc;
+import org.apache.hadoop.hive.ql.plan.selectDesc;
import org.apache.hadoop.hive.serde.Constants;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-
-import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.mapred.TextInputFormat;
/**
@@ -103,7 +110,7 @@
cols.add("key");
cols.add("value");
for(String src: srctables) {
- db.dropTable(src, true, true);
+ db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, src, true, true);
db.createTable(src, cols, null, TextInputFormat.class, IgnoreKeyTextOutputFormat.class);
db.loadTable(hadoopDataFile[i], src, false, null);
i++;
@@ -465,7 +472,7 @@
System.out.println("Beginning testMapPlan1");
try {
- populateMapPlan1(db.getTable("src"));
+ populateMapPlan1(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"));
File planFile = generatePlanFile();
executePlan(planFile);
fileDiff("lt100.txt.deflate", "mapplan1.out");
@@ -480,7 +487,7 @@
System.out.println("Beginning testMapPlan2");
try {
- populateMapPlan2(db.getTable("src"));
+ populateMapPlan2(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"));
File planFile = generatePlanFile();
executePlan(planFile);
fileDiff("lt100.txt", "mapplan2.out");
@@ -495,7 +502,7 @@
System.out.println("Beginning testMapRedPlan1");
try {
- populateMapRedPlan1(db.getTable("src"));
+ populateMapRedPlan1(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"));
File planFile = generatePlanFile();
executePlan(planFile);
fileDiff("kv1.val.sorted.txt", "mapredplan1.out");
@@ -510,7 +517,7 @@
System.out.println("Beginning testMapPlan2");
try {
- populateMapRedPlan2(db.getTable("src"));
+ populateMapRedPlan2(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"));
File planFile = generatePlanFile();
executePlan(planFile);
fileDiff("lt100.sorted.txt", "mapredplan2.out");
@@ -525,7 +532,8 @@
System.out.println("Beginning testMapPlan3");
try {
- populateMapRedPlan3(db.getTable("src"), db.getTable("src2"));
+ populateMapRedPlan3(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"),
+ db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src2"));
File planFile = generatePlanFile();
executePlan(planFile);
fileDiff("kv1kv2.cogroup.txt", "mapredplan3.out");
@@ -541,7 +549,7 @@
System.out.println("Beginning testMapPlan4");
try {
- populateMapRedPlan4(db.getTable("src"));
+ populateMapRedPlan4(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"));
File planFile = generatePlanFile();
executePlan(planFile);
fileDiff("kv1.string-sorted.txt", "mapredplan4.out");
@@ -556,7 +564,7 @@
System.out.println("Beginning testMapPlan5");
try {
- populateMapRedPlan5(db.getTable("src"));
+ populateMapRedPlan5(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"));
File planFile = generatePlanFile();
executePlan(planFile);
fileDiff("kv1.string-sorted.txt", "mapredplan5.out");
@@ -571,7 +579,7 @@
System.out.println("Beginning testMapPlan6");
try {
- populateMapRedPlan6(db.getTable("src"));
+ populateMapRedPlan6(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"));
File planFile = generatePlanFile();
executePlan(planFile);
fileDiff("lt100.sorted.txt", "mapredplan6.out");
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java?rev=781536&r1=781535&r2=781536&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java Wed Jun 3 20:40:59 2009
@@ -22,26 +22,24 @@
import java.io.UnsupportedEncodingException;
import java.util.LinkedList;
import java.util.Map;
-import java.util.TreeSet;
-import java.util.Map.Entry;
+
+import junit.framework.TestCase;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.cli.CliSessionState;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
import org.apache.hadoop.hive.ql.Driver;
-import org.apache.hadoop.hive.ql.history.HiveHistory.QueryInfo;
import org.apache.hadoop.hive.ql.history.HiveHistory.Keys;
+import org.apache.hadoop.hive.ql.history.HiveHistory.QueryInfo;
import org.apache.hadoop.hive.ql.history.HiveHistory.TaskInfo;
import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.tools.LineageInfo;
-import org.apache.hadoop.hive.service.HiveInterface;
import org.apache.hadoop.mapred.TextInputFormat;
-import junit.framework.TestCase;
-
public class TestHiveHistory extends TestCase {
static HiveConf conf;
@@ -93,7 +91,7 @@
cols.add("key");
cols.add("value");
for (String src : srctables) {
- db.dropTable(src, true, true);
+ db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, src, true, true);
db.createTable(src, cols, null, TextInputFormat.class,
IgnoreKeyTextOutputFormat.class);
db.loadTable(hadoopDataFile[i], src, false, null);