You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by sh...@apache.org on 2016/06/07 08:31:45 UTC

[4/8] kylin git commit: KYLIN-1758 add the hive set statements

KYLIN-1758 add the hive set statements


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/a1d17e21
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/a1d17e21
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/a1d17e21

Branch: refs/heads/1.5.x-HBase1.x
Commit: a1d17e215561089bb5958b84e876a74fc60f092f
Parents: ef1d13f
Author: shaofengshi <sh...@apache.org>
Authored: Sat Jun 4 16:22:38 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Sat Jun 4 16:22:38 2016 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/job/JoinedFlatTable.java   | 56 +++++++++++---------
 .../apache/kylin/source/hive/HiveMRInput.java   | 14 +++++
 2 files changed, 44 insertions(+), 26 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/a1d17e21/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java b/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java
index 5886325..f13a48c 100644
--- a/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java
+++ b/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java
@@ -55,6 +55,36 @@ public class JoinedFlatTable {
         return storageDfsDir + "/" + intermediateTableDesc.getTableName();
     }
 
+    public static String generateHiveSetStatements(JobEngineConfig engineConfig) throws IOException {
+        StringBuilder buffer = new StringBuilder();
+        File hadoopPropertiesFile = new File(engineConfig.getHiveConfFilePath());
+
+        if (hadoopPropertiesFile.exists()) {
+            DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
+            DocumentBuilder builder;
+            Document doc;
+            try {
+                builder = factory.newDocumentBuilder();
+                doc = builder.parse(hadoopPropertiesFile);
+                NodeList nl = doc.getElementsByTagName("property");
+                for (int i = 0; i < nl.getLength(); i++) {
+                    String name = doc.getElementsByTagName("name").item(i).getFirstChild().getNodeValue();
+                    String value = doc.getElementsByTagName("value").item(i).getFirstChild().getNodeValue();
+                    if (!name.equals("tmpjars")) {
+                        buffer.append("SET " + name + "=" + value + ";\n");
+                    }
+                }
+
+            } catch (ParserConfigurationException e) {
+                throw new IOException(e);
+            } catch (SAXException e) {
+                throw new IOException(e);
+            }
+        }
+
+        return buffer.toString();
+    }
+
     public static String generateCreateTableStatement(IJoinedFlatTableDesc intermediateTableDesc, String storageDfsDir) {
         StringBuilder ddl = new StringBuilder();
 
@@ -86,32 +116,6 @@ public class JoinedFlatTable {
 
     public static String generateInsertDataStatement(IJoinedFlatTableDesc intermediateTableDesc, JobEngineConfig engineConfig) throws IOException {
         StringBuilder sql = new StringBuilder();
-
-        File hadoopPropertiesFile = new File(engineConfig.getHiveConfFilePath());
-
-        if (hadoopPropertiesFile.exists()) {
-            DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
-            DocumentBuilder builder;
-            Document doc;
-            try {
-                builder = factory.newDocumentBuilder();
-                doc = builder.parse(hadoopPropertiesFile);
-                NodeList nl = doc.getElementsByTagName("property");
-                for (int i = 0; i < nl.getLength(); i++) {
-                    String name = doc.getElementsByTagName("name").item(i).getFirstChild().getNodeValue();
-                    String value = doc.getElementsByTagName("value").item(i).getFirstChild().getNodeValue();
-                    if (name.equals("tmpjars") == false) {
-                        sql.append("SET " + name + "=" + value + ";").append("\n");
-                    }
-                }
-
-            } catch (ParserConfigurationException e) {
-                throw new IOException(e);
-            } catch (SAXException e) {
-                throw new IOException(e);
-            }
-        }
-
         sql.append("INSERT OVERWRITE TABLE " + intermediateTableDesc.getTableName() + " " + generateSelectDataStatement(intermediateTableDesc) + ";").append("\n");
 
         return sql.toString();

http://git-wip-us.apache.org/repos/asf/kylin/blob/a1d17e21/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
index 16642fa..4026006 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
@@ -117,6 +117,12 @@ public class HiveMRInput implements IMRInput {
         }
 
         public static AbstractExecutable createFlatHiveTableStep(JobEngineConfig conf, IJoinedFlatTableDesc flatTableDesc, String jobId) {
+            String setHql = "";
+            try {
+                setHql = JoinedFlatTable.generateHiveSetStatements(conf);
+            } catch (IOException e) {
+                throw new RuntimeException("Failed to generate hive set statements for createFlatHiveTableStep", e);
+            }
 
             final String useDatabaseHql = "USE " + conf.getConfig().getHiveDatabaseForIntermediateTable() + ";";
             final String dropTableHql = JoinedFlatTable.generateDropTableStatement(flatTableDesc);
@@ -134,6 +140,7 @@ public class HiveMRInput implements IMRInput {
             hiveCmdBuilder.addStatement(useDatabaseHql);
             hiveCmdBuilder.addStatement(dropTableHql);
             hiveCmdBuilder.addStatement(createTableHql);
+            hiveCmdBuilder.addStatement(setHql);
             hiveCmdBuilder.addStatement(insertDataHqls);
 
             step.setCmd(hiveCmdBuilder.build());
@@ -164,8 +171,15 @@ public class HiveMRInput implements IMRInput {
             if(lookupViewsTables.size() == 0) {
                 return null;
             }
+            String setHql = "";
+            try {
+                setHql = JoinedFlatTable.generateHiveSetStatements(conf);
+            } catch (IOException e) {
+                throw new RuntimeException("Failed to generate hive set statements for createFlatHiveTableStep", e);
+            }
             final String useDatabaseHql = "USE " + conf.getConfig().getHiveDatabaseForIntermediateTable() + ";";
             hiveCmdBuilder.addStatement(useDatabaseHql);
+            hiveCmdBuilder.addStatement(setHql);
             for(TableDesc lookUpTableDesc : lookupViewsTables) {
                 if (TableDesc.TABLE_TYPE_VIRTUAL_VIEW.equalsIgnoreCase(lookUpTableDesc.getTableType())) {
                     StringBuilder createIntermediateTableHql = new StringBuilder();