You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by ni...@apache.org on 2019/12/23 03:14:35 UTC

[kylin] 22/30: KYLIN-4071 Fix segments merge with different building algorithms

This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 61a08150b8f415d9d2225e6a9a618e9167a41010
Author: nichunen <ni...@apache.org>
AuthorDate: Thu Jul 4 14:50:22 2019 +0800

    KYLIN-4071 Fix segments merge with different building algorithms
---
 .../src/main/java/org/apache/kylin/engine/flink/FlinkUtil.java       | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)

diff --git a/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkUtil.java b/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkUtil.java
index 4ea5fc2..73f532a 100644
--- a/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkUtil.java
+++ b/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkUtil.java
@@ -31,6 +31,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
 import org.apache.kylin.common.util.Bytes;
@@ -89,7 +90,9 @@ public class FlinkUtil {
             return env.createInput(HadoopInputs.readSequenceFile(keyClass, valueClass, inputHDFSPath.toString()));
         }
 
-        return env.createInput(HadoopInputs.readSequenceFile(keyClass, valueClass, StringUtil.join(inputFolders, ",")));
+        Job job = Job.getInstance();
+        FileInputFormat.setInputPaths(job, StringUtil.join(inputFolders, ","));
+        return env.createInput(HadoopInputs.createHadoopInput(new SequenceFileInputFormat(), keyClass, valueClass, job));
     }
 
     public static void setHadoopConfForCuboid(Job job, CubeSegment segment, String metaUrl) throws Exception {