You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by ni...@apache.org on 2019/05/31 14:20:25 UTC

[kylin] branch master updated (cfce8bb -> d533d18)

This is an automated email from the ASF dual-hosted git repository.

nic pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/kylin.git.


    from cfce8bb  KYLIN-3994: StorageCleanupJob may delete cube id data of new built segment because of cube cache in CubeManager (#633)
     new 21045e8  Rename some not friendly variable name and method.
     new d533d18  Remove -p paramter when check port availability, which will output warning info if not running as root user.

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 build/bin/check-port-availability.sh                                  | 2 +-
 engine-mr/src/main/java/org/apache/kylin/engine/mr/IMROutput2.java    | 4 ++--
 .../src/main/java/org/apache/kylin/engine/mr/JobBuilderSupport.java   | 2 +-
 .../main/java/org/apache/kylin/engine/mr/common/BatchConstants.java   | 2 +-
 .../src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java     | 4 ++--
 .../main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java    | 2 +-
 .../main/java/org/apache/kylin/engine/mr/steps/MergeCuboidJob.java    | 2 +-
 .../main/java/org/apache/kylin/engine/mr/steps/MergeCuboidMapper.java | 2 +-
 .../org/apache/kylin/engine/spark/SparkBatchCubingJobBuilder2.java    | 2 +-
 .../main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java   | 2 +-
 .../src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java  | 4 ++--
 .../main/java/org/apache/kylin/engine/spark/SparkExecutableLivy.java  | 2 +-
 .../main/java/org/apache/kylin/engine/spark/SparkFactDistinct.java    | 4 ++--
 .../apache/kylin/storage/hbase/steps/HBaseMROutput2Transition.java    | 4 ++--
 .../java/org/apache/kylin/storage/hbase/steps/HBaseSparkSteps.java    | 2 +-
 .../java/org/apache/kylin/storage/hbase/steps/SparkCubeHFile.java     | 4 ++--
 16 files changed, 22 insertions(+), 22 deletions(-)


[kylin] 01/02: Rename some not friendly variable name and method.

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 21045e85ff83674b735bc8d9a7640668d8f385c1
Author: 凡梦星尘 <el...@gmail.com>
AuthorDate: Mon May 20 17:59:57 2019 +0800

    Rename some not friendly variable name and method.
---
 engine-mr/src/main/java/org/apache/kylin/engine/mr/IMROutput2.java    | 4 ++--
 .../src/main/java/org/apache/kylin/engine/mr/JobBuilderSupport.java   | 2 +-
 .../main/java/org/apache/kylin/engine/mr/common/BatchConstants.java   | 2 +-
 .../src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java     | 4 ++--
 .../main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java    | 2 +-
 .../main/java/org/apache/kylin/engine/mr/steps/MergeCuboidJob.java    | 2 +-
 .../main/java/org/apache/kylin/engine/mr/steps/MergeCuboidMapper.java | 2 +-
 .../org/apache/kylin/engine/spark/SparkBatchCubingJobBuilder2.java    | 2 +-
 .../main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java   | 2 +-
 .../src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java  | 4 ++--
 .../main/java/org/apache/kylin/engine/spark/SparkExecutableLivy.java  | 2 +-
 .../main/java/org/apache/kylin/engine/spark/SparkFactDistinct.java    | 4 ++--
 .../apache/kylin/storage/hbase/steps/HBaseMROutput2Transition.java    | 4 ++--
 .../java/org/apache/kylin/storage/hbase/steps/HBaseSparkSteps.java    | 2 +-
 .../java/org/apache/kylin/storage/hbase/steps/SparkCubeHFile.java     | 4 ++--
 15 files changed, 21 insertions(+), 21 deletions(-)

diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/IMROutput2.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/IMROutput2.java
index e70b497..2334f5e 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/IMROutput2.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/IMROutput2.java
@@ -58,7 +58,7 @@ public interface IMROutput2 {
         /** Add step that does any necessary clean up. */
         public void addStepPhase4_Cleanup(DefaultChainedExecutable jobFlow);
 
-        public IMROutputFormat getOuputFormat();
+        public IMROutputFormat getOutputFormat();
 
     }
 
@@ -100,7 +100,7 @@ public interface IMROutput2 {
         /** Add step that does any necessary clean up. */
         public void addStepPhase3_Cleanup(DefaultChainedExecutable jobFlow);
 
-        public IMRMergeOutputFormat getOuputFormat();
+        public IMRMergeOutputFormat getOutputFormat();
     }
 
     public interface IMRMergeOutputFormat {
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/JobBuilderSupport.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/JobBuilderSupport.java
index 4a83dea..74d3535 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/JobBuilderSupport.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/JobBuilderSupport.java
@@ -344,7 +344,7 @@ public class JobBuilderSupport {
         return getJobWorkingDir(jobId) + "/hbase-conf.xml";
     }
 
-    public String getCounterOuputPath(String jobId) {
+    public String getCounterOutputPath(String jobId) {
         return getRealizationRootPath(jobId) + "/counter";
     }
 
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BatchConstants.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BatchConstants.java
index af11bb6..d20de2c 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BatchConstants.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BatchConstants.java
@@ -108,7 +108,7 @@ public interface BatchConstants {
     String ARG_META_URL = "metadataUrl";
     String ARG_HBASE_CONF_PATH = "hbaseConfPath";
     String ARG_SHRUNKEN_DICT_PATH = "shrunkenDictPath";
-    String ARG_COUNTER_OUPUT = "counterOutput";
+    String ARG_COUNTER_OUTPUT = "counterOutput";
 
     /**
      * logger and counter
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java
index 0e93ee7..d35f6b6 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java
@@ -148,7 +148,7 @@ public class CuboidJob extends AbstractHadoopJob {
             configureMapperInputFormat(segment);
 
             // set output
-            IMROutput2.IMROutputFormat outputFormat = MRUtil.getBatchCubingOutputSide2(segment).getOuputFormat();
+            IMROutput2.IMROutputFormat outputFormat = MRUtil.getBatchCubingOutputSide2(segment).getOutputFormat();
             outputFormat.configureJobOutput(job, output, segment, cuboidScheduler, nCuboidLevel);
 
             // set job configuration
@@ -174,7 +174,7 @@ public class CuboidJob extends AbstractHadoopJob {
             flatTableInputFormat.configureJob(job);
         } else {
             // n-dimension cuboid case
-            IMROutput2.IMROutputFormat outputFormat = MRUtil.getBatchCubingOutputSide2(cubeSeg).getOuputFormat();
+            IMROutput2.IMROutputFormat outputFormat = MRUtil.getBatchCubingOutputSide2(cubeSeg).getOutputFormat();
             outputFormat.configureJobInput(job, input);
             FileInputFormat.setInputPaths(job, new Path(input));
         }
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java
index 02dc71c..eac29c2 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java
@@ -121,7 +121,7 @@ public class InMemCuboidJob extends AbstractHadoopJob {
             flatTableInputFormat.configureJob(job);
 
             // set output
-            IMROutput2.IMROutputFormat outputFormat = MRUtil.getBatchCubingOutputSide2(segment).getOuputFormat();
+            IMROutput2.IMROutputFormat outputFormat = MRUtil.getBatchCubingOutputSide2(segment).getOutputFormat();
             outputFormat.configureJobOutput(job, output, segment, segment.getCuboidScheduler(), 0);
 
             return waitForCompletion(job);
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidJob.java
index 026fdbc..311ec4f 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidJob.java
@@ -77,7 +77,7 @@ public class MergeCuboidJob extends CuboidJob {
             job.setOutputValueClass(Text.class);
 
             // set inputs
-            IMROutput2.IMRMergeOutputFormat outputFormat = MRUtil.getBatchMergeOutputSide2(cubeSeg).getOuputFormat();
+            IMROutput2.IMRMergeOutputFormat outputFormat = MRUtil.getBatchMergeOutputSide2(cubeSeg).getOutputFormat();
             outputFormat.configureJobInput(job, input);
             addInputDirs(input, job);
 
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidMapper.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidMapper.java
index d63c8b8..3619d87 100755
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidMapper.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidMapper.java
@@ -60,7 +60,7 @@ public class MergeCuboidMapper extends KylinMapper<Text, Text, Text, Text> {
         // decide which source segment
         FileSplit fileSplit = (FileSplit) context.getInputSplit();
         IMROutput2.IMRMergeOutputFormat outputFormat = MRUtil.getBatchMergeOutputSide2(mergedCubeSegment)
-                .getOuputFormat();
+                .getOutputFormat();
         CubeSegment sourceCubeSegment = outputFormat.findSourceSegment(fileSplit, cube);
         reEncoder = new SegmentReEncoder(cubeDesc, sourceCubeSegment, mergedCubeSegment, config);
     }
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkBatchCubingJobBuilder2.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkBatchCubingJobBuilder2.java
index 1d2e78e..feac787 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkBatchCubingJobBuilder2.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkBatchCubingJobBuilder2.java
@@ -116,7 +116,7 @@ public class SparkBatchCubingJobBuilder2 extends JobBuilderSupport {
 
         sparkExecutable.setJobId(jobId);
         sparkExecutable.setName(ExecutableConstants.STEP_NAME_FACT_DISTINCT_COLUMNS);
-        sparkExecutable.setCounterSaveAs(CubingJob.SOURCE_RECORD_COUNT + "," + CubingJob.SOURCE_SIZE_BYTES, getCounterOuputPath(jobId));
+        sparkExecutable.setCounterSaveAs(CubingJob.SOURCE_RECORD_COUNT + "," + CubingJob.SOURCE_SIZE_BYTES, getCounterOutputPath(jobId));
 
         StringBuilder jars = new StringBuilder();
 
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
index f3b0a13..232a1c7 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
@@ -220,7 +220,7 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
         final String cuboidOutputPath = BatchCubingJobBuilder2.getCuboidOutputPathsByLevel(hdfsBaseLocation, level);
         final SerializableConfiguration sConf = new SerializableConfiguration(job.getConfiguration());
 
-        IMROutput2.IMROutputFormat outputFormat = MRUtil.getBatchCubingOutputSide2(cubeSeg).getOuputFormat();
+        IMROutput2.IMROutputFormat outputFormat = MRUtil.getBatchCubingOutputSide2(cubeSeg).getOutputFormat();
         outputFormat.configureJobOutput(job, cuboidOutputPath, cubeSeg, cubeSeg.getCuboidScheduler(), level);
 
         prepareOutput(rdd, kylinConfig, cubeSeg, level).mapToPair(
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java
index 74213ec..b5e8cce 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java
@@ -88,7 +88,7 @@ public class SparkExecutable extends AbstractExecutable {
 
     public void setCounterSaveAs(String value, String counterOutputPath) {
         this.setParam(COUNTER_SAVE_AS, value);
-        this.setParam(BatchConstants.ARG_COUNTER_OUPUT, counterOutputPath);
+        this.setParam(BatchConstants.ARG_COUNTER_OUTPUT, counterOutputPath);
     }
 
     public String getCounterSaveAs() {
@@ -326,7 +326,7 @@ public class SparkExecutable extends AbstractExecutable {
                     // done, update all properties
                     Map<String, String> joblogInfo = patternedLogger.getInfo();
                     // read counter from hdfs
-                    String counterOutput = getParam(BatchConstants.ARG_COUNTER_OUPUT);
+                    String counterOutput = getParam(BatchConstants.ARG_COUNTER_OUTPUT);
                     if (counterOutput != null) {
                         if (HadoopUtil.getWorkingFileSystem().exists(new Path(counterOutput))) {
                             Map<String, String> counterMap = HadoopUtil.readFromSequenceFile(counterOutput);
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutableLivy.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutableLivy.java
index d512104..709c1b7 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutableLivy.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutableLivy.java
@@ -206,7 +206,7 @@ public class SparkExecutableLivy extends SparkExecutable {
                 // done, update all properties
                 Map<String, String> joblogInfo = patternedLogger.getInfo();
                 // read counter from hdfs
-                String counterOutput = getParam(BatchConstants.ARG_COUNTER_OUPUT);
+                String counterOutput = getParam(BatchConstants.ARG_COUNTER_OUTPUT);
                 if (counterOutput != null) {
                     if (HadoopUtil.getWorkingFileSystem().exists(new Path(counterOutput))) {
                         Map<String, String> counterMap = HadoopUtil.readFromSequenceFile(counterOutput);
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkFactDistinct.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkFactDistinct.java
index 53b8a4d..6defcc8 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkFactDistinct.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkFactDistinct.java
@@ -122,8 +122,8 @@ public class SparkFactDistinct extends AbstractApplication implements Serializab
             .withDescription("Hive Intermediate Table").create("hiveTable");
     public static final Option OPTION_INPUT_PATH = OptionBuilder.withArgName(BatchConstants.ARG_INPUT).hasArg()
             .isRequired(true).withDescription("Hive Intermediate Table PATH").create(BatchConstants.ARG_INPUT);
-    public static final Option OPTION_COUNTER_PATH = OptionBuilder.withArgName(BatchConstants.ARG_COUNTER_OUPUT)
-            .hasArg().isRequired(true).withDescription("counter output path").create(BatchConstants.ARG_COUNTER_OUPUT);
+    public static final Option OPTION_COUNTER_PATH = OptionBuilder.withArgName(BatchConstants.ARG_COUNTER_OUTPUT)
+            .hasArg().isRequired(true).withDescription("counter output path").create(BatchConstants.ARG_COUNTER_OUTPUT);
 
     private Options options;
 
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput2Transition.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput2Transition.java
index 5739166..fcc3754 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput2Transition.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput2Transition.java
@@ -87,7 +87,7 @@ public class HBaseMROutput2Transition implements IMROutput2 {
             }
 
             @Override
-            public IMROutputFormat getOuputFormat() {
+            public IMROutputFormat getOutputFormat() {
                 return new HBaseMROutputFormat();
             }
         };
@@ -152,7 +152,7 @@ public class HBaseMROutput2Transition implements IMROutput2 {
             }
 
             @Override
-            public IMRMergeOutputFormat getOuputFormat() {
+            public IMRMergeOutputFormat getOutputFormat() {
                 return new HBaseMergeMROutputFormat();
             }
         };
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseSparkSteps.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseSparkSteps.java
index 1d480c3..d636e7d 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseSparkSteps.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseSparkSteps.java
@@ -77,7 +77,7 @@ public class HBaseSparkSteps extends HBaseJobSteps {
         sparkExecutable.setJars(jars.toString());
 
         sparkExecutable.setName(ExecutableConstants.STEP_NAME_CONVERT_CUBOID_TO_HFILE);
-        sparkExecutable.setCounterSaveAs(",," + CubingJob.CUBE_SIZE_BYTES, getCounterOuputPath(jobId));
+        sparkExecutable.setCounterSaveAs(",," + CubingJob.CUBE_SIZE_BYTES, getCounterOutputPath(jobId));
 
         return sparkExecutable;
     }
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SparkCubeHFile.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SparkCubeHFile.java
index ed05cba..a8a7877 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SparkCubeHFile.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SparkCubeHFile.java
@@ -91,8 +91,8 @@ public class SparkCubeHFile extends AbstractApplication implements Serializable
             .isRequired(true).withDescription("Cuboid files PATH").create(BatchConstants.ARG_INPUT);
     public static final Option OPTION_PARTITION_FILE_PATH = OptionBuilder.withArgName(BatchConstants.ARG_PARTITION)
             .hasArg().isRequired(true).withDescription("Partition file path.").create(BatchConstants.ARG_PARTITION);
-    public static final Option OPTION_COUNTER_PATH = OptionBuilder.withArgName(BatchConstants.ARG_COUNTER_OUPUT).hasArg()
-            .isRequired(true).withDescription("counter output path").create(BatchConstants.ARG_COUNTER_OUPUT);
+    public static final Option OPTION_COUNTER_PATH = OptionBuilder.withArgName(BatchConstants.ARG_COUNTER_OUTPUT).hasArg()
+            .isRequired(true).withDescription("counter output path").create(BatchConstants.ARG_COUNTER_OUTPUT);
 
     private Options options;
 


[kylin] 02/02: Remove -p paramter when check port availability, which will output warning info if not running as root user.

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit d533d186494e9e5ec23f8e6576d2a15004aed7a3
Author: 凡梦星尘 <el...@gmail.com>
AuthorDate: Mon May 20 18:13:23 2019 +0800

    Remove -p paramter when check port availability, which will output warning info if not running as root user.
---
 build/bin/check-port-availability.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/build/bin/check-port-availability.sh b/build/bin/check-port-availability.sh
index e27b893..1208269 100644
--- a/build/bin/check-port-availability.sh
+++ b/build/bin/check-port-availability.sh
@@ -27,7 +27,7 @@ kylin_port=`grep "<Connector port=" ${KYLIN_HOME}/tomcat/conf/server.xml |grep p
 if isMacosX; then
     kylin_port_in_use=`lsof -nP -iTCP:"${kylin_port}" | grep LISTEN`
 else
-    kylin_port_in_use=`netstat -tlpn | grep "\b${kylin_port}\b"`
+    kylin_port_in_use=`netstat -tln | grep "\b${kylin_port}\b"`
 fi
 
 # if not available, prompt error messeage