You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by da...@apache.org on 2015/10/01 19:31:20 UTC

hive git commit: HIVE-10752: Revert HIVE-5193 (Aihua Xu via Chaoyu Tang)

Repository: hive
Updated Branches:
  refs/heads/branch-1 f953fc3cf -> fda7c5175


HIVE-10752: Revert HIVE-5193 (Aihua Xu via Chaoyu Tang)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/fda7c517
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/fda7c517
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/fda7c517

Branch: refs/heads/branch-1
Commit: fda7c517586621c0319f86e691cdfc3520f984ed
Parents: f953fc3
Author: Daniel Dai <da...@hortonworks.com>
Authored: Thu Oct 1 10:31:02 2015 -0700
Committer: Daniel Dai <da...@hortonworks.com>
Committed: Thu Oct 1 10:31:02 2015 -0700

----------------------------------------------------------------------
 .../apache/hive/hcatalog/pig/HCatLoader.java    |  8 ----
 .../hive/hcatalog/pig/TestHCatLoader.java       | 44 --------------------
 2 files changed, 52 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/fda7c517/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java
index c951847..0685790 100644
--- a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java
+++ b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java
@@ -19,7 +19,6 @@
 package org.apache.hive.hcatalog.pig;
 
 import java.io.IOException;
-import java.util.ArrayList;
 import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.List;
@@ -32,7 +31,6 @@ import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.common.classification.InterfaceStability;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.security.Credentials;
@@ -163,12 +161,6 @@ public class HCatLoader extends HCatBaseLoader {
     if (requiredFieldsInfo != null) {
       // convert to hcatschema and pass to HCatInputFormat
       try {
-        //push down projections to columnar store works for RCFile and ORCFile
-        ArrayList<Integer> list = new ArrayList<Integer>(requiredFieldsInfo.getFields().size());
-        for (RequiredField rf : requiredFieldsInfo.getFields()) {
-          list.add(rf.getIndex());
-        }
-        ColumnProjectionUtils.appendReadColumns(job.getConfiguration(), list);
         outputSchema = phutil.getHCatSchema(requiredFieldsInfo.getFields(), signature, this.getClass());
         HCatInputFormat.setOutputSchema(job, outputSchema);
       } catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/hive/blob/fda7c517/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
index fc18a3b..45a219c 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
@@ -19,8 +19,6 @@
 package org.apache.hive.hcatalog.pig;
 
 import java.io.File;
-import java.io.FileWriter;
-import java.io.PrintWriter;
 import java.io.IOException;
 import java.io.RandomAccessFile;
 import java.sql.Date;
@@ -36,10 +34,7 @@ import java.util.Properties;
 import java.util.Set;
 
 import org.apache.commons.io.FileUtils;
-
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
@@ -49,7 +44,6 @@ import org.apache.hadoop.hive.ql.io.IOConstants;
 import org.apache.hadoop.hive.ql.io.StorageFormats;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.mapreduce.Job;
 
 import org.apache.hadoop.util.Shell;
@@ -66,10 +60,6 @@ import org.apache.pig.data.DataType;
 import org.apache.pig.data.Tuple;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
-import org.apache.pig.PigRunner;
-import org.apache.pig.tools.pigstats.OutputStats;
-import org.apache.pig.tools.pigstats.PigStats;
-
 import org.joda.time.DateTime;
 
 import org.junit.After;
@@ -492,40 +482,6 @@ public class TestHCatLoader {
   }
 
   @Test
-  public void testColumnarStorePushdown() throws Exception {
-    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
-    String PIGOUTPUT_DIR = TEST_DATA_DIR+ "/colpushdownop";
-    String PIG_FILE = "test.pig";
-    String expectedCols = "0,1";
-    PrintWriter w = new PrintWriter(new FileWriter(PIG_FILE));
-    w.println("A = load '" + COMPLEX_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
-    w.println("B = foreach A generate name,studentid;");
-    w.println("C = filter B by name is not null;");
-    w.println("store C into '" + PIGOUTPUT_DIR + "' using PigStorage();");
-    w.close();
-
-    try {
-      String[] args = { "-x", "local", PIG_FILE };
-      PigStats stats = PigRunner.run(args, null);
-      //Pig script was successful
-      assertTrue(stats.isSuccessful());
-      //Single MapReduce job is launched
-      OutputStats outstats = stats.getOutputStats().get(0);
-      assertTrue(outstats!= null);
-      assertEquals(expectedCols,outstats.getConf()
-        .get(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR));
-      //delete output file on exit
-      FileSystem fs = FileSystem.get(outstats.getConf());
-      if (fs.exists(new Path(PIGOUTPUT_DIR)))
-      {
-        fs.delete(new Path(PIGOUTPUT_DIR), true);
-      }
-    } finally {
-      new File(PIG_FILE).delete();
-    }
-  }
-
-  @Test
   public void testGetInputBytes() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     File file = new File(TEST_WAREHOUSE_DIR + "/" + SPECIFIC_SIZE_TABLE + "/part-m-00000");