You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by ss...@apache.org on 2016/11/17 20:51:42 UTC

[2/3] phoenix git commit: PHOENIX-3391 Supporting Hive 2.1.0 in PhoenixStorageHandler

PHOENIX-3391 Supporting Hive 2.1.0 in PhoenixStorageHandler

Signed-off-by: Sergey Soldatov <ss...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/af564008
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/af564008
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/af564008

Branch: refs/heads/4.x-HBase-0.98
Commit: af564008bf8e4853cef1aea85a2e117d6756d8c0
Parents: 3f1be1a
Author: Jeongdae Kim <kj...@gmail.com>
Authored: Tue Nov 8 11:26:29 2016 +0900
Committer: Sergey Soldatov <ss...@apache.org>
Committed: Thu Nov 17 12:51:10 2016 -0800

----------------------------------------------------------------------
 .../phoenix/hive/mapreduce/PhoenixInputFormat.java      |  5 ++---
 .../phoenix/hive/util/PhoenixStorageHandlerUtil.java    | 12 ++++++++++++
 2 files changed, 14 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/af564008/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
----------------------------------------------------------------------
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
index e3d0212..7e2f3d1 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
@@ -36,7 +36,6 @@ import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.*;
@@ -110,7 +109,7 @@ public class PhoenixInputFormat<T extends DBWritable> implements InputFormat<Wri
             }
 
             query = PhoenixQueryBuilder.getInstance().buildQuery(jobConf, tableName,
-                    ColumnProjectionUtils.getReadColumnNames(jobConf), conditionList);
+                    PhoenixStorageHandlerUtil.getReadColumnNames(jobConf), conditionList);
         } else if (PhoenixStorageHandlerConstants.TEZ.equals(executionEngine)) {
             Map<String, String> columnTypeMap = PhoenixStorageHandlerUtil.createColumnTypeMap
                     (jobConf);
@@ -120,7 +119,7 @@ public class PhoenixInputFormat<T extends DBWritable> implements InputFormat<Wri
 
             String whereClause = jobConf.get(TableScanDesc.FILTER_TEXT_CONF_STR);
             query = PhoenixQueryBuilder.getInstance().buildQuery(jobConf, tableName,
-                    ColumnProjectionUtils.getReadColumnNames(jobConf), whereClause, columnTypeMap);
+                    PhoenixStorageHandlerUtil.getReadColumnNames(jobConf), whereClause, columnTypeMap);
         } else {
             throw new IOException(executionEngine + " execution engine unsupported yet.");
         }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/af564008/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
----------------------------------------------------------------------
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
index 0dd1134..18799a5 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hive.ql.io.AcidOutputFormat.Options;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.net.DNS;
 import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
@@ -46,7 +47,10 @@ import java.net.InetSocketAddress;
 import java.net.UnknownHostException;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.Collections;
 import java.util.Iterator;
+import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Properties;
@@ -205,6 +209,14 @@ public class PhoenixStorageHandlerUtil {
         return columnTypeMap;
     }
 
+    public static List<String> getReadColumnNames(Configuration conf) {
+        String colNames = conf.get(ColumnProjectionUtils.READ_COLUMN_NAMES_CONF_STR);
+        if (colNames != null && !colNames.isEmpty()) {
+            return Arrays.asList(colNames.split(PhoenixStorageHandlerConstants.COMMA));
+        }
+        return Collections.EMPTY_LIST;
+    }
+
     public static boolean isTransactionalTable(Properties tableProperties) {
         String tableIsTransactional = tableProperties.getProperty(hive_metastoreConstants
                 .TABLE_IS_TRANSACTIONAL);