You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by ni...@apache.org on 2019/12/23 03:14:30 UTC
[kylin] 17/30: Less array copy
This is an automated email from the ASF dual-hosted git repository.
nic pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kylin.git
commit 0f258e66c1d830180cc99838f74e2d3be1b25803
Author: shaofengshi <sh...@apache.org>
AuthorDate: Sun Mar 31 10:40:05 2019 +0800
Less array copy
---
.../main/java/org/apache/kylin/engine/flink/FlinkCubingByLayer.java | 2 +-
.../main/java/org/apache/kylin/engine/flink/FlinkCubingMerge.java | 6 +++---
.../src/main/java/org/apache/kylin/engine/flink/FlinkUtil.java | 2 --
.../main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java | 2 +-
4 files changed, 5 insertions(+), 7 deletions(-)
diff --git a/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkCubingByLayer.java b/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkCubingByLayer.java
index ba1f233..b8ddf95 100644
--- a/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkCubingByLayer.java
+++ b/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkCubingByLayer.java
@@ -70,7 +70,7 @@ import java.util.List;
import java.util.Locale;
/**
- * Flink application to build cube with the "by-layer" algorithm. Only support source data from Hive; Metadata in HBase.
+ * Flink application to build cube with the "by-layer" algorithm.
*/
public class FlinkCubingByLayer extends AbstractApplication implements Serializable {
diff --git a/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkCubingMerge.java b/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkCubingMerge.java
index c51d69c..fcf8d6c 100644
--- a/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkCubingMerge.java
+++ b/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkCubingMerge.java
@@ -299,9 +299,9 @@ public class FlinkCubingMerge extends AbstractApplication implements Serializabl
@Override
public Tuple2<Text, Text> map(Tuple2<Text, Object[]> tuple2) throws Exception {
ByteBuffer valueBuf = codec.encode(tuple2.f1);
- byte[] encodedBytes = new byte[valueBuf.position()];
- System.arraycopy(valueBuf.array(), 0, encodedBytes, 0, valueBuf.position());
- return new Tuple2<>(tuple2.f0, new Text(encodedBytes));
+ Text result = new Text();
+ result.set(valueBuf.array(), 0, valueBuf.position());
+ return new Tuple2<>(tuple2.f0, result);
}
}
diff --git a/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkUtil.java b/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkUtil.java
index 4473a44..e7b1a49 100644
--- a/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkUtil.java
+++ b/engine-flink/src/main/java/org/apache/kylin/engine/flink/FlinkUtil.java
@@ -140,8 +140,6 @@ public class FlinkUtil {
@Override
public String[] map(Tuple2<BytesWritable, Text> tuple2) throws Exception {
- System.out.println("read records from hive.");
-
String s = Bytes.toString(tuple2.f1.getBytes(), 0, tuple2.f1.getLength());
return s.split(BatchConstants.SEQUENCE_FILE_DEFAULT_DELIMITER);
}
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
index 33f3e51..1c0c18f 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
@@ -73,7 +73,7 @@ import org.slf4j.LoggerFactory;
import scala.Tuple2;
/**
- * Spark application to build cube with the "by-layer" algorithm. Only support source data from Hive; Metadata in HBase.
+ * Spark application to build cube with the "by-layer" algorithm.
*/
public class SparkCubingByLayer extends AbstractApplication implements Serializable {