You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@linkis.apache.org by pe...@apache.org on 2023/02/23 13:18:03 UTC

[linkis] branch dev-1.3.3 updated: [feat] support different hive version (#4255)

This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.3.3
in repository https://gitbox.apache.org/repos/asf/linkis.git


The following commit(s) were added to refs/heads/dev-1.3.3 by this push:
     new ee29de943 [feat] support different hive version (#4255)
ee29de943 is described below

commit ee29de94374b6fb59285a0bf6b0a3710a3c6e430
Author: GuoPhilipse <46...@users.noreply.github.com>
AuthorDate: Thu Feb 23 21:17:57 2023 +0800

    [feat] support different hive version (#4255)
    
    * support different hive version
    
    * fix code style
    
    * log exception
---
 linkis-engineconn-plugins/flink/pom.xml            |  1 -
 linkis-engineconn-plugins/hive/pom.xml             |  4 --
 .../hive/serde/CustomerDelimitedJSONSerDe.java     | 54 ++++++++++++++++------
 .../linkis-metadata-query/service/hive/pom.xml     |  1 -
 pom.xml                                            |  1 +
 5 files changed, 42 insertions(+), 19 deletions(-)

diff --git a/linkis-engineconn-plugins/flink/pom.xml b/linkis-engineconn-plugins/flink/pom.xml
index a6c0894a5..7fe87fb2d 100644
--- a/linkis-engineconn-plugins/flink/pom.xml
+++ b/linkis-engineconn-plugins/flink/pom.xml
@@ -27,7 +27,6 @@
   <artifactId>linkis-engineconn-plugin-flink</artifactId>
   <properties>
     <flink.version>1.12.2</flink.version>
-    <hive.version>2.3.3</hive.version>
     <commons-cli.version>1.3.1</commons-cli.version>
   </properties>
 
diff --git a/linkis-engineconn-plugins/hive/pom.xml b/linkis-engineconn-plugins/hive/pom.xml
index 8fe446167..28b60fff0 100644
--- a/linkis-engineconn-plugins/hive/pom.xml
+++ b/linkis-engineconn-plugins/hive/pom.xml
@@ -26,10 +26,6 @@
 
   <artifactId>linkis-engineplugin-hive</artifactId>
 
-  <properties>
-    <hive.version>2.3.3</hive.version>
-  </properties>
-
   <dependencies>
 
     <dependency>
diff --git a/linkis-engineconn-plugins/hive/src/main/java/org/apache/linkis/engineplugin/hive/serde/CustomerDelimitedJSONSerDe.java b/linkis-engineconn-plugins/hive/src/main/java/org/apache/linkis/engineplugin/hive/serde/CustomerDelimitedJSONSerDe.java
index 671b0c1d1..9c425ff05 100644
--- a/linkis-engineconn-plugins/hive/src/main/java/org/apache/linkis/engineplugin/hive/serde/CustomerDelimitedJSONSerDe.java
+++ b/linkis-engineconn-plugins/hive/src/main/java/org/apache/linkis/engineplugin/hive/serde/CustomerDelimitedJSONSerDe.java
@@ -17,6 +17,8 @@
 
 package org.apache.linkis.engineplugin.hive.serde;
 
+import org.apache.linkis.common.utils.ClassUtils;
+
 import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.hive.serde2.ByteStream;
 import org.apache.hadoop.hive.serde2.SerDeException;
@@ -33,6 +35,7 @@ import org.apache.hadoop.io.WritableComparable;
 
 import java.io.IOException;
 import java.io.OutputStream;
+import java.lang.reflect.InvocationTargetException;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
@@ -309,18 +312,6 @@ public class CustomerDelimitedJSONSerDe extends LazySimpleSerDe {
           binaryData = Base64.encodeBase64(String.valueOf(wc).getBytes());
           break;
         }
-      case INTERVAL_YEAR_MONTH:
-        {
-          wc = ((HiveIntervalYearMonthObjectInspector) oi).getPrimitiveWritableObject(o);
-          binaryData = Base64.encodeBase64(String.valueOf(wc).getBytes());
-          break;
-        }
-      case INTERVAL_DAY_TIME:
-        {
-          wc = ((HiveIntervalDayTimeObjectInspector) oi).getPrimitiveWritableObject(o);
-          binaryData = Base64.encodeBase64(String.valueOf(wc).getBytes());
-          break;
-        }
       case DECIMAL:
         {
           HiveDecimalObjectInspector decimalOI = (HiveDecimalObjectInspector) oi;
@@ -329,7 +320,44 @@ public class CustomerDelimitedJSONSerDe extends LazySimpleSerDe {
         }
       default:
         {
-          throw new RuntimeException("Unknown primitive type: " + category);
+          boolean containsIntervalYearMonth = false;
+          boolean containsIntervalDayTime = false;
+          for (PrimitiveObjectInspector.PrimitiveCategory primitiveCategory :
+              PrimitiveObjectInspector.PrimitiveCategory.values()) {
+            containsIntervalYearMonth = "INTERVAL_YEAR_MONTH".equals(primitiveCategory.name());
+            containsIntervalDayTime = "INTERVAL_DAY_TIME".equals(primitiveCategory.name());
+            try {
+              if (containsIntervalYearMonth) {
+                wc =
+                    (WritableComparable)
+                        ClassUtils.getClassInstance(
+                                "org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalYearMonthObjectInspector")
+                            .getClass()
+                            .getMethod("getPrimitiveWritableObject", Object.class)
+                            .invoke(oi, o);
+                binaryData = Base64.encodeBase64(String.valueOf(wc).getBytes());
+                break;
+              }
+              if (containsIntervalDayTime) {
+                wc =
+                    (WritableComparable)
+                        ClassUtils.getClassInstance(
+                                "org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalDayTimeObjectInspector")
+                            .getClass()
+                            .getMethod("getPrimitiveWritableObject", Object.class)
+                            .invoke(oi, o);
+                binaryData = Base64.encodeBase64(String.valueOf(wc).getBytes());
+                break;
+              }
+            } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) {
+              LOG.error("Fail to invoke method:[getPrimitiveWritableObject]!", e);
+            }
+          }
+          if (containsIntervalYearMonth || containsIntervalDayTime) {
+            break;
+          } else {
+            throw new RuntimeException("Unknown primitive type: " + category);
+          }
         }
     }
     if (binaryData == null) {
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/pom.xml
index 127913944..b0c9cdef3 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/pom.xml
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/pom.xml
@@ -26,7 +26,6 @@
   <artifactId>linkis-metadata-query-service-hive</artifactId>
 
   <properties>
-    <hive.version>2.3.3</hive.version>
     <hadoop.version>2.7.2</hadoop.version>
     <datanucleus-api-jdo.version>4.2.4</datanucleus-api-jdo.version>
   </properties>
diff --git a/pom.xml b/pom.xml
index 6e42ea39d..030b00f87 100644
--- a/pom.xml
+++ b/pom.xml
@@ -106,6 +106,7 @@
     <revision>1.3.2-SNAPSHOT</revision>
     <jedis.version>2.9.2</jedis.version>
     <spark.version>2.4.3</spark.version>
+    <hive.version>2.3.3</hive.version>
     <hadoop.version>2.7.2</hadoop.version>
     <hadoop-hdfs-client.artifact>hadoop-hdfs</hadoop-hdfs-client.artifact>
     <hadoop-hdfs-client-shade.version>2.7.2</hadoop-hdfs-client-shade.version>


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org