You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by sh...@apache.org on 2018/09/19 01:45:32 UTC

[kylin] branch master updated: KYLIN-3543 Export HBase configuration only for Spark engine

This is an automated email from the ASF dual-hosted git repository.

shaofengshi pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kylin.git


The following commit(s) were added to refs/heads/master by this push:
     new 6dcd795  KYLIN-3543 Export HBase configuration only for Spark engine
6dcd795 is described below

commit 6dcd79566e3a8a65650dfde3a3962bf51d52c4b6
Author: Yichen Zhou <zh...@gmail.com>
AuthorDate: Tue Sep 18 10:30:02 2018 +0800

    KYLIN-3543 Export HBase configuration only for Spark engine
---
 .../java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)

diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
index 93c1296..7205802 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
@@ -53,6 +53,7 @@ import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
 import org.apache.kylin.engine.mr.common.CubeStatsReader;
 import org.apache.kylin.engine.mr.common.CuboidShardUtil;
 import org.apache.kylin.job.exception.ExecuteException;
+import org.apache.kylin.metadata.model.IEngineAware;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -119,7 +120,11 @@ public class CreateHTableJob extends AbstractHadoopJob {
                 partitionFilePath.getParent());
 
         CubeHTableUtil.createHTable(cubeSegment, splitKeys);
-        exportHBaseConfiguration(cubeSegment.getStorageLocationIdentifier());
+
+        // export configuration in advance to avoid connecting to hbase from spark
+        if (cubeDesc.getEngineType()== IEngineAware.ID_SPARK){
+            exportHBaseConfiguration(cubeSegment.getStorageLocationIdentifier());
+        }
         return 0;
     }