You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by ni...@apache.org on 2020/02/07 14:25:55 UTC

[kylin] branch 3.0.x updated (62fe6f9 -> ef1e63c)

This is an automated email from the ASF dual-hosted git repository.

nic pushed a change to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git.


    from 62fe6f9  [maven-release-plugin] prepare for next development iteration
     new 46001fc  KYLIN-4292 Use HFileOutputFormat3 in all places to replace HFileOutputFormat2
     new d30ac84  Minor, fix typo in KylinHealthCheckJob
     new f888372  #4238 Fix kylin_streaming_model broken when changing kylin.source.hive.database-for-flattable to non-default value
     new 4300fa5  KYLIN-4280 SegmenPruner support 'OR' and 'NOT'
     new c581826  KYLIN-4287  segmenPruner satisfy "IN" filter bug
     new a233418  KYLIN-4166 Case when return null when SQL no GROUP BY
     new 6635c04  KYLIN-4225 close hive session manually
     new 753e1a8  KYLIN-4197 Set deadline for ClientEnvExtractor (#994)
     new a2e3bcc  KYLIN-4304 Project list cannot be correctly sorted by 'Create Time'
     new 14bad16  KYLIN-4252 Fix the error "Cannot read property 'index' of null" in visualization page
     new cb6e44c  KYLIN-4235 Failed to load table metadata from JDBC data source
     new a9945ca  KYLIN-4237 Return error when execute "explain plan for SQL" to get the execution plan of SQL
     new b764400  KYLIN-4250 FechRunnner should skip the job to process other jobs instead of throwing exception when the job section metadata is not found
     new 8e2bc1f  KYLIN-3609 KYLIN-3409 KYLIN-4161 CubeSegmentStatisticsMap exist only on OLAPQuery.EnumeratorTypeEnum.OLAP query type. (#942)
     new e8e1a20  KYLIN-4260 When using server side PreparedStatement cache, the query result are not match on TopN scenario
     new 21241e8  fix bug KYLIN-4300.
     new 892d68b  KYLIN-4297 Build cube throw NPE when partition column is not set in JDBC Data Source
     new b8c5252  fix bug in KYLIN-4243
     new a877fee  KYLIN-4295 Instances displayed on Query Node are inconsistent with Job Node
     new 466a7a3  KYLIN-4080 Project schema update event casues error reload NEW DataModelDesc
     new 00559cd  KYLIN-4306: Delete data model desc when encountering WriteConflictException
     new 393bc1f  KYLIN-3956 Segments of not only streaming cube but also batch cube need to show their status
     new 988b428  Set repository sites to https
     new ebfc745  KYLIN-4333 Build Server OOM
     new e373c64  Fix sql injection issue
     new 2ee8e24  Prevent uncontrolled data used in path expression
     new a1bd108  Encrept response output for BroadcasterReceiveServlet
     new 17777e9  Fix "equals" on incomparable types
     new 0d83874  Fix type mismatch on container access
     new e683fff  Fix not thread-safe double-checked locking
     new 228f848  Fix potential possibility of array index out of bounds error
     new abd29e4  Remove container never accessed
     new 7395b46  Ensure the validity of http header from concated string
     new bb03e27  Fix synchronization on boxed types or strings
     new 7ea101d  KYLIN-4327 TOPN Comparator may violate its general contract
     new ac69061  Prevent uncontrolled data used in path expression
     new ceefc82  Fix equals method does not inspect argument type
     new 2e0d58a  Fix queries built from user-controlled sources
     new 173d88b  Prevent uncontrolled data used in path expression
     new ec7558e  Prevent uncontrolled data used in path expression
     new 5016842  Minor, throw exception in case of illegal project name
     new 3b55848  Minor, add class TableDesc.TableProject to avoid the use of Pair
     new cff4593  Fix sql starts with "with" cannot be queried on web
     new ef1e63c  statement cache eviction invalidation base on time

The 44 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 build/bin/kylin-port-replace-util.sh               |   2 +-
 build/bin/sample.sh                                |  13 +-
 .../org/apache/kylin/common/KylinConfigBase.java   |   4 +
 .../common/metrics/metrics2/CodahaleMetrics.java   |  13 +-
 .../common/persistence/JDBCResourceStore.java      |  47 ++--
 .../kylin/common/persistence/ResourceStore.java    |   3 +-
 .../apache/kylin/cube/common/SegmentPruner.java    |  56 ++---
 .../apache/kylin/cube/common/TupleFilterNode.java  |  76 +++++++
 .../model/validation/rule/StreamingCubeRule.java   |   3 +-
 .../kylin/cube/common/SegmentPrunerTest.java       | 176 +++++++++++++++
 .../apache/kylin/dict/lookup/SnapshotManager.java  |  44 ++--
 .../kylin/job/execution/ExecutableManager.java     |   4 +
 .../job/impl/threadpool/DefaultFetcherRunner.java  |  14 +-
 .../job/impl/threadpool/PriorityFetcherRunner.java |  14 +-
 .../measure/percentile/PercentileSerializer.java   |   5 +-
 .../kylin/measure/topn/DoubleDeltaSerializer.java  |   6 +-
 .../org/apache/kylin/measure/topn/TopNCounter.java |   8 +-
 .../apache/kylin/measure/topn/TopNMeasureType.java |   7 +-
 .../kylin/metadata/TableMetadataManager.java       |  15 +-
 .../metadata/badquery/BadQueryHistoryManager.java  |   2 +
 .../metadata/datatype/DataTypeSerializer.java      |   5 +-
 .../apache/kylin/metadata/model/DataModelDesc.java |  12 +
 .../kylin/metadata/model/DataModelManager.java     |  38 ++--
 .../org/apache/kylin/metadata/model/TableDesc.java |  31 ++-
 .../apache/kylin/metadata/model/TableExtDesc.java  | 137 ++++++-----
 .../kylin/metadata/realization/SQLDigest.java      |   4 +-
 .../kylin/measure/topn/TopNCounterBasicTest.java   |  47 ++++
 .../kylin/storage/hybrid/HybridInstanceTest.java   |   1 +
 .../apache/kylin/storage/hbase/ITStorageTest.java  |   2 +-
 .../kylin/metrics/lib/impl/hive/HiveProducer.java  |  12 +-
 pom.xml                                            |   6 +-
 .../kylin/query/enumerator/OLAPEnumerator.java     |   5 +-
 .../apache/kylin/query/relnode/OLAPContext.java    |   3 +-
 .../apache/kylin/query/relnode/OLAPProjectRel.java |  32 +--
 .../org/apache/kylin/query/util/QueryUtil.java     |  55 ++---
 .../org/apache/kylin/query/util/QueryUtilTest.java |  35 +++
 .../broadcaster/BroadcasterReceiveServlet.java     |  24 +-
 .../kylin/rest/controller/CubeController.java      |   3 +-
 .../kylin/rest/controller/DashboardController.java |  17 +-
 .../rest/controller/ExternalFilterController.java  |  12 +-
 .../kylin/rest/controller/ModelController.java     |   1 +
 .../kylin/rest/controller/QueryController.java     |  10 +-
 .../ServiceDiscoveryStateController.java           |  29 ++-
 .../rest/controller/StreamingV2Controller.java     |  11 +-
 .../apache/kylin/rest/job/KylinHealthCheckJob.java |   4 +-
 .../kylin/rest/metrics/QueryMetricsFacade.java     |  47 ++--
 .../org/apache/kylin/rest/service/CubeService.java |  81 ++++---
 .../kylin/rest/service/DashboardService.java       | 250 +++++++++++----------
 .../apache/kylin/rest/service/ModelService.java    |  17 +-
 .../apache/kylin/rest/service/QueryService.java    |  12 +
 .../org/apache/kylin/rest/util/ValidateUtil.java   |   8 +-
 .../kylin/source/hive/BeelineHiveClient.java       |   2 +-
 .../org/apache/kylin/source/jdbc/JdbcExplorer.java |  21 +-
 .../kylin/source/jdbc/JdbcHiveInputBase.java       |   8 +-
 .../org/apache/kylin/source/jdbc/JdbcSource.java   |   6 +-
 .../kylin/storage/hbase/HBaseResourceStore.java    |   5 +-
 .../hbase/lookup/LookupTableToHFileJob.java        |   4 +-
 .../kylin/storage/hbase/steps/CreateHTableJob.java |   3 +-
 .../storage/hbase/steps/HFileOutputFormat3.java    |   3 +-
 .../assign/CubePartitionRoundRobinAssigner.java    |   4 +-
 .../coordinate/StreamingCoordinator.java           |   2 +-
 .../storage/columnar/ColumnarStoreDimDesc.java     |   5 +-
 .../storage/columnar/ColumnarStoreMetricsDesc.java |   6 +-
 .../org/apache/kylin/tool/ClientEnvExtractor.java  |  52 +++--
 .../org/apache/kylin/tool/CubeMigrationCLI.java    |   4 +-
 .../apache/kylin/tool/ClientEnvExtractorTest.java  |  68 ++++++
 webapp/app/js/controllers/instances.js             |   2 +-
 webapp/app/js/controllers/query.js                 |   4 +-
 webapp/app/js/model/projectConfig.js               |   2 +-
 webapp/app/partials/cubes/cube_detail.html         |   2 +-
 70 files changed, 1143 insertions(+), 523 deletions(-)
 create mode 100644 core-cube/src/main/java/org/apache/kylin/cube/common/TupleFilterNode.java
 create mode 100644 tool/src/test/java/org/apache/kylin/tool/ClientEnvExtractorTest.java


[kylin] 01/44: KYLIN-4292 Use HFileOutputFormat3 in all places to replace HFileOutputFormat2

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 46001fc2deffc16248da40944ce90bcbd912525a
Author: shaofengshi <sh...@apache.org>
AuthorDate: Tue Dec 10 22:27:05 2019 +0800

    KYLIN-4292 Use HFileOutputFormat3 in all places to replace HFileOutputFormat2
---
 .../org/apache/kylin/storage/hbase/lookup/LookupTableToHFileJob.java  | 4 ++--
 .../java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java    | 3 +--
 2 files changed, 3 insertions(+), 4 deletions(-)

diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/lookup/LookupTableToHFileJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/lookup/LookupTableToHFileJob.java
index 2a12575..4f6dcb8 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/lookup/LookupTableToHFileJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/lookup/LookupTableToHFileJob.java
@@ -33,7 +33,6 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2;
 import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.mapreduce.Job;
@@ -65,6 +64,7 @@ import org.apache.kylin.source.IReadableTable.TableSignature;
 import org.apache.kylin.source.SourceManager;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.steps.CubeHTableUtil;
+import org.apache.kylin.storage.hbase.steps.HFileOutputFormat3;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -137,7 +137,7 @@ public class LookupTableToHFileJob extends AbstractHadoopJob {
             HTable htable = (HTable) conn.getTable(TableName.valueOf(hTableNameAndShard.getFirst()));
 
             // Automatic config !
-            HFileOutputFormat2.configureIncrementalLoad(job, htable, htable.getRegionLocator());
+            HFileOutputFormat3.configureIncrementalLoad(job, htable, htable.getRegionLocator());
 
             job.setReducerClass(KVSortReducerWithDupKeyCheck.class);
 
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
index 23a865d..4b2218b 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
@@ -34,7 +34,6 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.mapreduce.Job;
@@ -133,7 +132,7 @@ public class CreateHTableJob extends AbstractHadoopJob {
         HadoopUtil.healSickConfig(hbaseConf);
         Job job = Job.getInstance(hbaseConf, hbaseTableName);
         HTable table = new HTable(hbaseConf, hbaseTableName);
-        HFileOutputFormat2.configureIncrementalLoadMap(job, table);
+        HFileOutputFormat3.configureIncrementalLoadMap(job, table);
 
         logger.info("Saving HBase configuration to {}", hbaseConfPath);
         FileSystem fs = HadoopUtil.getWorkingFileSystem();


[kylin] 02/44: Minor, fix typo in KylinHealthCheckJob

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit d30ac84a0b827e247776f5963adaeb903c14ccb3
Author: Zhou Kang <zh...@xiaomi.com>
AuthorDate: Tue Dec 10 14:51:01 2019 +0800

    Minor, fix typo in KylinHealthCheckJob
---
 .../src/main/java/org/apache/kylin/rest/job/KylinHealthCheckJob.java  | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/server-base/src/main/java/org/apache/kylin/rest/job/KylinHealthCheckJob.java b/server-base/src/main/java/org/apache/kylin/rest/job/KylinHealthCheckJob.java
index ecca373..0e25117 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/job/KylinHealthCheckJob.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/job/KylinHealthCheckJob.java
@@ -58,7 +58,7 @@ public class KylinHealthCheckJob extends AbstractApplication {
 
     @SuppressWarnings("static-access")
     private static final Option OPTION_FIX = OptionBuilder.withArgName("fix").hasArg().isRequired(false)
-            .withDescription("Fix the unhealth cube").create("fix");
+            .withDescription("Fix the unhealthy cube").create("fix");
 
     public static void main(String[] args) throws Exception {
         new KylinHealthCheckJob().execute(args);
@@ -130,7 +130,7 @@ public class KylinHealthCheckJob extends AbstractApplication {
 
     private void sendMail(String content) {
         logger.info("Send Kylin cluster report");
-        String subject = "Kylin Cluster Health Resport of " + config.getClusterName() + " on "
+        String subject = "Kylin Cluster Health Report of " + config.getClusterName() + " on "
                 + new SimpleDateFormat("yyyy-MM-dd", Locale.ROOT).format(new Date());
         List<String> users = Lists.newArrayList(config.getAdminDls());
         new MailService(config).sendMail(users, subject, content, false);


[kylin] 31/44: Fix potential possibility of array index out of bounds error

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 228f848d4f1a78b51ff4a779cbc1705345bbb562
Author: nichunen <ni...@apache.org>
AuthorDate: Sun Jan 12 19:37:14 2020 +0800

    Fix potential possibility of array index out of bounds error
---
 .../src/main/java/org/apache/kylin/source/hive/BeelineHiveClient.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineHiveClient.java b/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineHiveClient.java
index 7f1f61d..29538aa 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineHiveClient.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineHiveClient.java
@@ -55,7 +55,7 @@ public class BeelineHiveClient implements IHiveClient {
         }
         String[] splits = StringUtils.split(beelineParams);
         String url = "", username = "", password = "";
-        for (int i = 0; i < splits.length; i++) {
+        for (int i = 0; i < splits.length - 1; i++) {
             if ("-u".equals(splits[i])) {
                 url = stripQuotes(splits[i + 1]);
             }


[kylin] 23/44: Set repository sites to https

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 988b4280727ceceef348a3f71d82384e80865d77
Author: nichunen <ni...@apache.org>
AuthorDate: Fri Feb 7 19:06:57 2020 +0800

    Set repository sites to https
---
 pom.xml | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/pom.xml b/pom.xml
index b631c51..fb040f4 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1069,7 +1069,7 @@
     <repository>
       <id>central</id>
       <name>Central Repository</name>
-      <url>http://repo.maven.apache.org/maven2</url>
+      <url>https://repo.maven.apache.org/maven2</url>
       <layout>default</layout>
       <snapshots>
         <enabled>false</enabled>
@@ -1078,7 +1078,7 @@
 
     <repository>
       <id>conjars</id>
-      <url>http://conjars.org/repo/</url>
+      <url>https://conjars.org/repo/</url>
     </repository>
 
     <repository>
@@ -1094,7 +1094,7 @@
     <repository>
       <id>kyligence</id>
       <name>Kyligence Repository</name>
-      <url>http://repository.kyligence.io:8081/repository/maven-public/
+      <url>https://repository.kyligence.io/repository/maven-public/
       </url>
       <releases>
         <enabled>true</enabled>


[kylin] 11/44: KYLIN-4235 Failed to load table metadata from JDBC data source

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit cb6e44cf5355f99364938a437b84b8e56345c2ea
Author: Guangxu Cheng <gx...@apache.org>
AuthorDate: Sat Nov 2 00:13:18 2019 +0800

    KYLIN-4235 Failed to load table metadata from JDBC data source
---
 .../org/apache/kylin/source/jdbc/JdbcExplorer.java  | 21 ++++++++++++---------
 .../org/apache/kylin/source/jdbc/JdbcSource.java    |  6 ++++--
 2 files changed, 16 insertions(+), 11 deletions(-)

diff --git a/source-jdbc/src/main/java/org/apache/kylin/source/jdbc/JdbcExplorer.java b/source-jdbc/src/main/java/org/apache/kylin/source/jdbc/JdbcExplorer.java
index d728dcf..96614dd 100644
--- a/source-jdbc/src/main/java/org/apache/kylin/source/jdbc/JdbcExplorer.java
+++ b/source-jdbc/src/main/java/org/apache/kylin/source/jdbc/JdbcExplorer.java
@@ -50,22 +50,25 @@ import org.slf4j.LoggerFactory;
 public class JdbcExplorer implements ISourceMetadataExplorer, ISampleDataDeployer {
     private static final Logger logger = LoggerFactory.getLogger(JdbcExplorer.class);
 
-    private final KylinConfig config;
     private final SourceDialect dialect;
     private final DBConnConf dbconf;
     private final IJdbcMetadata jdbcMetadataDialect;
 
+    @Deprecated
     public JdbcExplorer() {
-        config = KylinConfig.getInstanceFromEnv();
-        String connectionUrl = config.getJdbcSourceConnectionUrl();
-        String driverClass = config.getJdbcSourceDriver();
-        String jdbcUser = config.getJdbcSourceUser();
-        String jdbcPass = config.getJdbcSourcePass();
-        this.dbconf = new DBConnConf(driverClass, connectionUrl, jdbcUser, jdbcPass);
-        this.dialect = SourceDialect.getDialect(config.getJdbcSourceDialect());
-        this.jdbcMetadataDialect = JdbcMetadataFactory.getJdbcMetadata(dialect, dbconf);
+        this(KylinConfig.getInstanceFromEnv());
     }
 
+    public JdbcExplorer(KylinConfig config) {
+         String connectionUrl = config.getJdbcSourceConnectionUrl();
+         String driverClass = config.getJdbcSourceDriver();
+         String jdbcUser = config.getJdbcSourceUser();
+         String jdbcPass = config.getJdbcSourcePass();
+         this.dbconf = new DBConnConf(driverClass, connectionUrl, jdbcUser, jdbcPass);
+         this.dialect = SourceDialect.getDialect(config.getJdbcSourceDialect());
+         this.jdbcMetadataDialect = JdbcMetadataFactory.getJdbcMetadata(dialect, dbconf);
+     }
+
     @Override
     public List<String> listDatabases() throws SQLException {
         return jdbcMetadataDialect.listDatabases();
diff --git a/source-jdbc/src/main/java/org/apache/kylin/source/jdbc/JdbcSource.java b/source-jdbc/src/main/java/org/apache/kylin/source/jdbc/JdbcSource.java
index 1bda6c2..809a465 100644
--- a/source-jdbc/src/main/java/org/apache/kylin/source/jdbc/JdbcSource.java
+++ b/source-jdbc/src/main/java/org/apache/kylin/source/jdbc/JdbcSource.java
@@ -31,13 +31,15 @@ import org.apache.kylin.source.ISourceMetadataExplorer;
 import org.apache.kylin.source.SourcePartition;
 
 public class JdbcSource implements ISource {
+    private final KylinConfig config;
     //used by reflection
     public JdbcSource(KylinConfig config) {
+        this.config = config;
     }
 
     @Override
     public ISourceMetadataExplorer getSourceMetadataExplorer() {
-        return new JdbcExplorer();
+        return new JdbcExplorer(this.config);
     }
 
     @SuppressWarnings("unchecked")
@@ -66,7 +68,7 @@ public class JdbcSource implements ISource {
 
     @Override
     public ISampleDataDeployer getSampleDataDeployer() {
-        return new JdbcExplorer();
+        return new JdbcExplorer(this.config);
     }
 
     @Override


[kylin] 06/44: KYLIN-4166 Case when return null when SQL no GROUP BY

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit a233418d45bc459e865ed89f69177ac27e183cf9
Author: yaqian.zhang <59...@qq.com>
AuthorDate: Mon Oct 21 11:36:09 2019 +0800

    KYLIN-4166 Case when return null when SQL no GROUP BY
---
 .../apache/kylin/query/relnode/OLAPProjectRel.java | 32 ++++++++++++----------
 1 file changed, 17 insertions(+), 15 deletions(-)

diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPProjectRel.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPProjectRel.java
index 59d2c38..8be7249 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPProjectRel.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPProjectRel.java
@@ -306,21 +306,23 @@ public class OLAPProjectRel extends Project implements OLAPRel {
         }
 
         // replace projects with dynamic fields
-        Map<TblColRef, RelDataType> dynFields = this.context.dynamicFields;
-        for (TblColRef dynFieldCol : dynFields.keySet()) {
-            String replaceFieldName = dynFieldCol.getName();
-            int rowIndex = this.columnRowType.getIndexByName(replaceFieldName);
-            if (rowIndex >= 0) {
-                int inputIndex = inputColumnRowType.getIndexByName(replaceFieldName);
-                if (inputIndex >= 0) {
-                    // field to be replaced
-                    RelDataType fieldType = dynFields.get(dynFieldCol);
-                    RelDataTypeField newField = new RelDataTypeFieldImpl(replaceFieldName, rowIndex, fieldType);
-                    // project to be replaced
-                    RelDataTypeField inputField = getInput().getRowType().getFieldList().get(inputIndex);
-                    RexInputRef newFieldRef = new RexInputRef(inputField.getIndex(), inputField.getType());
-
-                    replaceFieldMap.put(rowIndex, new Pair<RelDataTypeField, RexNode>(newField, newFieldRef));
+        if (this.context.afterAggregate) {
+            Map<TblColRef, RelDataType> dynFields = this.context.dynamicFields;
+            for (TblColRef dynFieldCol : dynFields.keySet()) {
+                String replaceFieldName = dynFieldCol.getName();
+                int rowIndex = this.columnRowType.getIndexByName(replaceFieldName);
+                if (rowIndex >= 0) {
+                    int inputIndex = inputColumnRowType.getIndexByName(replaceFieldName);
+                    if (inputIndex >= 0) {
+                        // field to be replaced
+                        RelDataType fieldType = dynFields.get(dynFieldCol);
+                        RelDataTypeField newField = new RelDataTypeFieldImpl(replaceFieldName, rowIndex, fieldType);
+                        // project to be replaced
+                        RelDataTypeField inputField = getInput().getRowType().getFieldList().get(inputIndex);
+                        RexInputRef newFieldRef = new RexInputRef(inputField.getIndex(), inputField.getType());
+
+                        replaceFieldMap.put(rowIndex, new Pair<RelDataTypeField, RexNode>(newField, newFieldRef));
+                    }
                 }
             }
         }


[kylin] 03/44: #4238 Fix kylin_streaming_model broken when changing kylin.source.hive.database-for-flattable to non-default value

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit f88837234b38db517429e516114b75d30348fa73
Author: mingxiang.zhao <mz...@gmail.com>
AuthorDate: Tue Nov 5 20:29:54 2019 +0800

    #4238 Fix kylin_streaming_model broken when changing kylin.source.hive.database-for-flattable to non-default value
---
 build/bin/sample.sh | 13 +++++++++++--
 1 file changed, 11 insertions(+), 2 deletions(-)

diff --git a/build/bin/sample.sh b/build/bin/sample.sh
index 18823ae..09ca710 100755
--- a/build/bin/sample.sh
+++ b/build/bin/sample.sh
@@ -104,8 +104,17 @@ sed -i "s/DEFAULT./$sample_database./g" ${KYLIN_HOME}/sample_cube/metadata/model
 sed -i "s/DEFAULT./$sample_database./g" ${KYLIN_HOME}/sample_cube/metadata/model_desc/kylin_streaming_model.json
 sed -i "s/DEFAULT./$sample_database./g" ${KYLIN_HOME}/sample_cube/metadata/project/learn_kylin.json
 sed -i "s/DEFAULT/$sample_database/g" ${KYLIN_HOME}/sample_cube/metadata/table/*.json
-cd ${KYLIN_HOME}/sample_cube/metadata/table
-ls -1 DEFAULT.KYLIN_*.json|sed "s/\(DEFAULT\)\(.*\)\.json/mv & $sample_database\2.json/"|sh -v
+sed -i "s/DEFAULT/$sample_database/g" ${KYLIN_HOME}/sample_cube/metadata/kafka/*.json
+sed -i "s/DEFAULT/$sample_database/g" ${KYLIN_HOME}/sample_cube/metadata/streaming/*.json
+strings=(
+    table
+    streaming
+    kafka
+)
+for i in "${strings[@]}"; do
+    cd "${KYLIN_HOME}/sample_cube/metadata/${i}"
+    ls -1 DEFAULT.KYLIN_*.json|sed "s/\(DEFAULT\)\(.*\)\.json/mv & $sample_database\2.json/"|sh -v
+done
 
 cd ${KYLIN_HOME}
 ${dir}/kylin.sh org.apache.kylin.common.persistence.ResourceTool upload ${KYLIN_HOME}/sample_cube/metadata  || { exit 1; }


[kylin] 15/44: KYLIN-4260 When using server side PreparedStatement cache, the query result are not match on TopN scenario

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit e8e1a20c902d756c66a22097c63828b93fec6334
Author: mawu2 <ma...@cisco.com>
AuthorDate: Sat Nov 30 16:23:18 2019 +0800

    KYLIN-4260 When using server side PreparedStatement cache, the query result are not match on TopN scenario
---
 .../java/org/apache/kylin/measure/topn/TopNMeasureType.java   |  5 +++++
 .../java/org/apache/kylin/metadata/realization/SQLDigest.java |  4 +++-
 .../org/apache/kylin/storage/hybrid/HybridInstanceTest.java   |  1 +
 .../java/org/apache/kylin/storage/hbase/ITStorageTest.java    |  2 +-
 .../org/apache/kylin/query/enumerator/OLAPEnumerator.java     |  5 ++++-
 .../main/java/org/apache/kylin/query/relnode/OLAPContext.java |  3 ++-
 .../main/java/org/apache/kylin/rest/service/QueryService.java | 11 +++++++++++
 7 files changed, 27 insertions(+), 4 deletions(-)

diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java b/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
index 472de3c..0586370 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
@@ -395,6 +395,11 @@ public class TopNMeasureType extends MeasureType<TopNCounter<ByteArray>> {
 
     @Override
     public void adjustSqlDigest(List<MeasureDesc> measureDescs, SQLDigest sqlDigest) {
+        // If sqlDiegest is already adjusted, then not to adjust it again.
+        if (sqlDigest.isBorrowedContext) {
+            return;
+        }
+
         if (sqlDigest.aggregations.size() > 1) {
             return;
         }
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/realization/SQLDigest.java b/core-metadata/src/main/java/org/apache/kylin/metadata/realization/SQLDigest.java
index 78f0adc..875068b 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/realization/SQLDigest.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/realization/SQLDigest.java
@@ -80,6 +80,7 @@ public class SQLDigest {
     public List<TblColRef> sortColumns;
     public List<OrderEnum> sortOrders;
     public boolean isRawQuery;
+    public boolean isBorrowedContext;
     public boolean limitPrecedesAggr;
     public boolean hasLimit;
 
@@ -92,7 +93,7 @@ public class SQLDigest {
             List<DynamicFunctionDesc> dynAggregations, //
             Set<TblColRef> rtDimensionColumns, Set<TblColRef> rtMetricColumns, // dynamic col related columns
             Set<TblColRef> filterColumns, TupleFilter filter, TupleFilter havingFilter, // filter
-            List<TblColRef> sortColumns, List<OrderEnum> sortOrders, boolean limitPrecedesAggr, boolean hasLimit, // sort & limit
+            List<TblColRef> sortColumns, List<OrderEnum> sortOrders, boolean limitPrecedesAggr, boolean hasLimit, boolean isBorrowedContext, // sort & limit
             Set<MeasureDesc> involvedMeasure
     ) {
         this.factTable = factTable;
@@ -121,6 +122,7 @@ public class SQLDigest {
         this.sortColumns = sortColumns;
         this.sortOrders = sortOrders;
         this.isRawQuery = isRawQuery();
+        this.isBorrowedContext = isBorrowedContext;
         this.limitPrecedesAggr = limitPrecedesAggr;
         this.hasLimit = hasLimit;
 
diff --git a/core-storage/src/test/java/org/apache/kylin/storage/hybrid/HybridInstanceTest.java b/core-storage/src/test/java/org/apache/kylin/storage/hybrid/HybridInstanceTest.java
index 99aede4..04e938a 100644
--- a/core-storage/src/test/java/org/apache/kylin/storage/hybrid/HybridInstanceTest.java
+++ b/core-storage/src/test/java/org/apache/kylin/storage/hybrid/HybridInstanceTest.java
@@ -74,6 +74,7 @@ public class HybridInstanceTest {
                 new LinkedList<>(),
                 true,
                 true,
+                false,
                 new LinkedHashSet<>());
         CapabilityResult capabilityResult = hybridInstance.isCapable(sQLDigest);
 
diff --git a/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITStorageTest.java b/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITStorageTest.java
index 9022016..3e174bb 100644
--- a/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITStorageTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITStorageTest.java
@@ -148,7 +148,7 @@ public class ITStorageTest extends HBaseMetadataTestCase {
                     /*runtimeDimensionColumns*/ Collections.<TblColRef> emptySet(), //
                     /*runtimeMetricColumns*/ Collections.<TblColRef> emptySet(), //
                     /*filter col*/ Collections.<TblColRef> emptySet(), filter, null, //
-                    /*sortCol*/ new ArrayList<TblColRef>(), new ArrayList<SQLDigest.OrderEnum>(), false, false, new HashSet<MeasureDesc>());
+                    /*sortCol*/ new ArrayList<TblColRef>(), new ArrayList<SQLDigest.OrderEnum>(), false, false, false, new HashSet<MeasureDesc>());
             iterator = storageEngine.search(context, sqlDigest, mockup.newTupleInfo(groups, aggregations));
             while (iterator.hasNext()) {
                 ITuple tuple = iterator.next();
diff --git a/query/src/main/java/org/apache/kylin/query/enumerator/OLAPEnumerator.java b/query/src/main/java/org/apache/kylin/query/enumerator/OLAPEnumerator.java
index 3f7beff..129be02 100644
--- a/query/src/main/java/org/apache/kylin/query/enumerator/OLAPEnumerator.java
+++ b/query/src/main/java/org/apache/kylin/query/enumerator/OLAPEnumerator.java
@@ -106,7 +106,10 @@ public class OLAPEnumerator implements Enumerator<Object[]> {
         // bind dynamic variables
         olapContext.bindVariable(optiqContext);
 
-        olapContext.resetSQLDigest();
+        // If olapContext is cached, then inherit it.
+        if (!olapContext.isBorrowedContext) {
+            olapContext.resetSQLDigest();
+        }
         SQLDigest sqlDigest = olapContext.getSQLDigest();
 
         // query storage engine
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPContext.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPContext.java
index 59f84df..39c3472 100755
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPContext.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPContext.java
@@ -157,6 +157,7 @@ public class OLAPContext {
     public TupleFilter havingFilter;
     public List<JoinDesc> joins = new LinkedList<>();
     public JoinsTree joinsTree;
+    public boolean isBorrowedContext = false; // Whether preparedContext is borrowed from cache
     List<TblColRef> sortColumns;
     List<SQLDigest.OrderEnum> sortOrders;
 
@@ -200,7 +201,7 @@ public class OLAPContext {
                     metricsColumns, aggregations, aggrSqlCalls, dynFuncs, // aggregation
                     rtDimColumns, rtMetricColumns, // runtime related columns
                     filterColumns, filter, havingFilter, // filter
-                    sortColumns, sortOrders, limitPrecedesAggr, hasLimit, // sort & limit
+                    sortColumns, sortOrders, limitPrecedesAggr, hasLimit, isBorrowedContext, // sort & limit
                     involvedMeasure);
         }
         return sqlDigest;
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
index 293b421..0ce8379 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
@@ -696,6 +696,13 @@ public class QueryService extends BasicService {
             DBUtils.closeQuietly(conn);
             if (preparedContext != null) {
                 if (borrowPrepareContext) {
+                    // Set tag isBorrowedContext true, when return preparedContext back
+                    for (OLAPContext olapContext : preparedContext.olapContexts) {
+                        if (borrowPrepareContext) {
+                            olapContext.isBorrowedContext = true;
+                        }
+                    }
+
                     preparedContextPool.returnObject(preparedContextKey, preparedContext);
                 } else {
                     preparedContext.close();
@@ -1253,6 +1260,10 @@ public class QueryService extends BasicService {
         Connection conn = QueryConnection.getConnection(project);
         PreparedStatement preparedStatement = conn.prepareStatement(sql);
         Collection<OLAPContext> olapContexts = OLAPContext.getThreadLocalContexts();
+        // If the preparedContext is first initialized, then set the borrowed tag to false
+        for (OLAPContext olapContext : olapContexts) {
+            olapContext.isBorrowedContext = false;
+        }
         return new PreparedContext(conn, preparedStatement, olapContexts);
     }
 


[kylin] 10/44: KYLIN-4252 Fix the error "Cannot read property 'index' of null" in visualization page

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 14bad162d109cd1148bcccd064f8e0fc5cf24e39
Author: Kehua Wu <wu...@zte.com.cn>
AuthorDate: Wed Nov 13 17:58:45 2019 +0800

    KYLIN-4252 Fix the error "Cannot read property 'index' of null" in visualization page
---
 webapp/app/js/controllers/query.js | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/webapp/app/js/controllers/query.js b/webapp/app/js/controllers/query.js
index e0a92ae..18f1e4e 100644
--- a/webapp/app/js/controllers/query.js
+++ b/webapp/app/js/controllers/query.js
@@ -507,11 +507,11 @@ KylinApp
                 $scope.chart = undefined;
 
                 var selectedDimension = query.graph.state.dimensions;
-                if (selectedDimension && query.graph.type.dimension.types.indexOf(selectedDimension.type) > -1) {
+                var selectedMetric = query.graph.state.metrics;
+                if (selectedDimension && selectedMetric && query.graph.type.dimension.types.indexOf(selectedDimension.type) > -1) {
                     $scope.chart = {};
 
                     var chartType = query.graph.type.value;
-                    var selectedMetric = query.graph.state.metrics;
 
                     var dataValues = [];
                     angular.forEach(query.result.results, function(result, ind) {


[kylin] 22/44: KYLIN-3956 Segments of not only streaming cube but also batch cube need to show their status

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 393bc1f16e3f7558de0882e4a117c47c638c36e0
Author: yaqian.zhang <59...@qq.com>
AuthorDate: Fri Dec 13 17:24:34 2019 +0800

    KYLIN-3956 Segments of not only streaming cube but also batch cube need to show their status
---
 webapp/app/partials/cubes/cube_detail.html | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/webapp/app/partials/cubes/cube_detail.html b/webapp/app/partials/cubes/cube_detail.html
index 17e89b4..b747e84 100755
--- a/webapp/app/partials/cubes/cube_detail.html
+++ b/webapp/app/partials/cubes/cube_detail.html
@@ -109,7 +109,7 @@
             <div ng-repeat="table in cube.hbase" class="cube-segment-list" ng-class="{'cube-broken-segment': table.regionCount === 0}">
                 <h5><b>Segment:</b> {{table.segmentName}}</h5>
                 <ul>
-                    <li ng-if="cube.streaming">Status: <span>{{table.segmentStatus}}</span></li>
+                    <li>Status: <span>{{table.segmentStatus}}</span></li>
                     <li ng-if="cube.model.partition_desc.partition_date_column">Start Time: <span>{{table.dateRangeStart | reverseToGMT0}}</span></li>
                     <li ng-if="cube.model.partition_desc.partition_date_column">End Time: <span>{{table.dateRangeEnd | reverseToGMT0}}</span></li>
                     <li>Source Count: <span>{{table.sourceCount}}</span></li>


[kylin] 34/44: Fix synchronization on boxed types or strings

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit bb03e2741276d4dd1181dadf40b205aefe2fc82c
Author: nichunen <ni...@apache.org>
AuthorDate: Wed Jan 15 14:51:34 2020 +0800

    Fix synchronization on boxed types or strings
---
 .../common/persistence/JDBCResourceStore.java      | 47 +++++++++++++---------
 .../apache/kylin/dict/lookup/SnapshotManager.java  | 44 ++++++++++++--------
 2 files changed, 54 insertions(+), 37 deletions(-)

diff --git a/core-common/src/main/java/org/apache/kylin/common/persistence/JDBCResourceStore.java b/core-common/src/main/java/org/apache/kylin/common/persistence/JDBCResourceStore.java
index 61bbb98..fd98383 100644
--- a/core-common/src/main/java/org/apache/kylin/common/persistence/JDBCResourceStore.java
+++ b/core-common/src/main/java/org/apache/kylin/common/persistence/JDBCResourceStore.java
@@ -32,6 +32,7 @@ import java.sql.SQLException;
 import java.sql.Types;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.kylin.common.KylinConfig;
@@ -44,28 +45,17 @@ import com.google.common.base.Preconditions;
 
 public class JDBCResourceStore extends PushdownResourceStore {
 
-    private static Logger logger = LoggerFactory.getLogger(JDBCResourceStore.class);
-
     public static final String JDBC_SCHEME = "jdbc";
-
+    private static final ConcurrentHashMap<String, Object> lockObjectMap = new ConcurrentHashMap<>();
     private static final String META_TABLE_KEY = "META_TABLE_KEY";
-
     private static final String META_TABLE_TS = "META_TABLE_TS";
-
     private static final String META_TABLE_CONTENT = "META_TABLE_CONTENT";
-
-    public static void checkScheme(StorageURL url) {
-        Preconditions.checkState(JDBC_SCHEME.equals(url.getScheme()));
-    }
-
-    // ============================================================================
-
+    private static Logger logger = LoggerFactory.getLogger(JDBCResourceStore.class);
     private JDBCConnectionManager connectionManager;
 
+    // ============================================================================
     private String[] tableNames = new String[2];
-
     private String metadataIdentifier = null;
-
     // For test
     private long queriedSqlNum = 0;
 
@@ -82,11 +72,21 @@ public class JDBCResourceStore extends PushdownResourceStore {
         }
     }
 
-    abstract static class SqlOperation {
-        PreparedStatement pstat = null;
-        ResultSet rs = null;
+    public static void checkScheme(StorageURL url) {
+        Preconditions.checkState(JDBC_SCHEME.equals(url.getScheme()));
+    }
 
-        abstract public void execute(final Connection connection) throws SQLException, IOException;
+    private Object getConcurrentObject(String resPath) {
+        if (!lockObjectMap.containsKey(resPath)) {
+            addObject(resPath);
+        }
+        return lockObjectMap.get(resPath);
+    }
+
+    private synchronized void addObject(String resPath) {
+        if (!lockObjectMap.containsKey(resPath)) {
+            lockObjectMap.put(resPath, new Object());
+        }
     }
 
     private void executeSql(SqlOperation operation) throws SQLException, IOException {
@@ -349,7 +349,7 @@ public class JDBCResourceStore extends PushdownResourceStore {
             @Override
             public void execute(Connection connection) throws SQLException, IOException {
                 byte[] bytes = content.extractAllBytes();
-                synchronized (resPath.intern()) {
+                synchronized (getConcurrentObject(resPath)) {
                     JDBCResourceSQL sqls = getJDBCResourceSQL(getMetaTableName(resPath));
                     boolean existing = existsImpl(resPath);
                     if (existing) {
@@ -439,7 +439,7 @@ public class JDBCResourceStore extends PushdownResourceStore {
         executeSql(new SqlOperation() {
             @Override
             public void execute(Connection connection) throws SQLException, IOException {
-                synchronized (resPath.intern()) {
+                synchronized (getConcurrentObject(resPath)) {
                     JDBCResourceSQL sqls = getJDBCResourceSQL(getMetaTableName(resPath));
                     if (!existsImpl(resPath)) {
                         if (oldTS != 0) {
@@ -640,4 +640,11 @@ public class JDBCResourceStore extends PushdownResourceStore {
         return "/".equals(path);
     }
 
+    abstract static class SqlOperation {
+        PreparedStatement pstat = null;
+        ResultSet rs = null;
+
+        abstract public void execute(final Connection connection) throws SQLException, IOException;
+    }
+
 }
\ No newline at end of file
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotManager.java b/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotManager.java
index 8f68fb0..76a3df9 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotManager.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotManager.java
@@ -21,6 +21,7 @@ package org.apache.kylin.dict.lookup;
 import java.io.IOException;
 import java.util.List;
 import java.util.NavigableSet;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.TimeUnit;
 
@@ -38,8 +39,6 @@ import com.google.common.cache.CacheLoader;
 import com.google.common.cache.LoadingCache;
 import com.google.common.cache.RemovalListener;
 import com.google.common.cache.RemovalNotification;
-import com.google.common.collect.Interner;
-import com.google.common.collect.Interners;
 import com.google.common.collect.Lists;
 
 /**
@@ -48,23 +47,13 @@ import com.google.common.collect.Lists;
 public class SnapshotManager {
 
     private static final Logger logger = LoggerFactory.getLogger(SnapshotManager.class);
-
-    public static SnapshotManager getInstance(KylinConfig config) {
-        return config.getManager(SnapshotManager.class);
-    }
-
-    // called by reflection
-    static SnapshotManager newInstance(KylinConfig config) throws IOException {
-        return new SnapshotManager(config);
-    }
-
-    // ============================================================================
-
+    private static final ConcurrentHashMap<String, Object> lockObjectMap = new ConcurrentHashMap<>();
     private KylinConfig config;
-
     // path ==> SnapshotTable
     private LoadingCache<String, SnapshotTable> snapshotCache; // resource
 
+    // ============================================================================
+
     private SnapshotManager(KylinConfig config) {
         this.config = config;
         this.snapshotCache = CacheBuilder.newBuilder().removalListener(new RemovalListener<String, SnapshotTable>() {
@@ -83,6 +72,28 @@ public class SnapshotManager {
                 });
     }
 
+    public static SnapshotManager getInstance(KylinConfig config) {
+        return config.getManager(SnapshotManager.class);
+    }
+
+    // called by reflection
+    static SnapshotManager newInstance(KylinConfig config) throws IOException {
+        return new SnapshotManager(config);
+    }
+
+    private Object getConcurrentObject(String resPath) {
+        if (!lockObjectMap.containsKey(resPath)) {
+            addObject(resPath);
+        }
+        return lockObjectMap.get(resPath);
+    }
+
+    private synchronized void addObject(String resPath) {
+        if (!lockObjectMap.containsKey(resPath)) {
+            lockObjectMap.put(resPath, new Object());
+        }
+    }
+
     public void wipeoutCache() {
         snapshotCache.invalidateAll();
     }
@@ -127,9 +138,8 @@ public class SnapshotManager {
             throws IOException {
         SnapshotTable snapshot = new SnapshotTable(table, tableDesc.getIdentity());
         snapshot.updateRandomUuid();
-        Interner<String> pool = Interners.newWeakInterner();
 
-        synchronized (pool.intern(tableDesc.getIdentity())) {
+        synchronized (getConcurrentObject(tableDesc.getIdentity())) {
             SnapshotTable reusableSnapshot = getReusableSnapShot(table, snapshot, tableDesc, cubeConfig);
             if (reusableSnapshot != null)
                 return updateDictLastModifiedTime(reusableSnapshot.getResourcePath());


[kylin] 09/44: KYLIN-4304 Project list cannot be correctly sorted by 'Create Time'

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit a2e3bccec0cfe331e2bab36ba814b7d4756b9e3e
Author: xiacongling <xi...@xiaomi.com>
AuthorDate: Tue Dec 17 21:44:07 2019 +0800

    KYLIN-4304 Project list cannot be correctly sorted by 'Create Time'
---
 webapp/app/js/model/projectConfig.js | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/webapp/app/js/model/projectConfig.js b/webapp/app/js/model/projectConfig.js
index 270eb7a..6bd90a9 100644
--- a/webapp/app/js/model/projectConfig.js
+++ b/webapp/app/js/model/projectConfig.js
@@ -21,7 +21,7 @@ KylinApp.constant('projectConfig', {
     {attr: 'name', name: 'Name'},
     {attr: 'owner', name: 'Owner'},
     {attr: 'description', name: 'Description'},
-    {attr: 'create_time', name: 'Create Time'}
+    {attr: 'create_time_utc', name: 'Create Time'}
   ]
 
 });


[kylin] 26/44: Prevent uncontrolled data used in path expression

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 2ee8e246be1f53fa0334acf525f91b87e2ee8b6d
Author: nichunen <ni...@apache.org>
AuthorDate: Fri Jan 10 21:04:21 2020 +0800

    Prevent uncontrolled data used in path expression
---
 .../java/org/apache/kylin/metadata/badquery/BadQueryHistoryManager.java  | 1 +
 1 file changed, 1 insertion(+)

diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/badquery/BadQueryHistoryManager.java b/core-metadata/src/main/java/org/apache/kylin/metadata/badquery/BadQueryHistoryManager.java
index 843e9e9..812d3c3 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/badquery/BadQueryHistoryManager.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/badquery/BadQueryHistoryManager.java
@@ -57,6 +57,7 @@ public class BadQueryHistoryManager {
     }
 
     public BadQueryHistory getBadQueriesForProject(String project) throws IOException {
+        project = project.replaceAll("[./]", "");
         BadQueryHistory badQueryHistory = getStore().getResource(getResourcePathForProject(project), BAD_QUERY_INSTANCE_SERIALIZER);
         if (badQueryHistory == null) {
             badQueryHistory = new BadQueryHistory(project);


[kylin] 25/44: Fix sql injection issue

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit e373c64c96a54a7abfe4bccb82e8feb60db04749
Author: nichunen <ni...@apache.org>
AuthorDate: Fri Feb 7 20:22:59 2020 +0800

    Fix sql injection issue
---
 .../org/apache/kylin/rest/service/CubeService.java | 81 ++++++++++++++--------
 1 file changed, 51 insertions(+), 30 deletions(-)

diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
index 4fb2580..11b06a1 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -71,6 +71,7 @@ import org.apache.kylin.metadata.project.ProjectManager;
 import org.apache.kylin.metadata.project.RealizationEntry;
 import org.apache.kylin.metadata.realization.RealizationStatusEnum;
 import org.apache.kylin.metadata.realization.RealizationType;
+import org.apache.kylin.metrics.MetricsManager;
 import org.apache.kylin.metrics.property.QueryCubePropertyEnum;
 import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.exception.BadRequestException;
@@ -79,6 +80,7 @@ import org.apache.kylin.rest.exception.InternalErrorException;
 import org.apache.kylin.rest.msg.Message;
 import org.apache.kylin.rest.msg.MsgPicker;
 import org.apache.kylin.rest.request.MetricsRequest;
+import org.apache.kylin.rest.request.PrepareSqlRequest;
 import org.apache.kylin.rest.response.CubeInstanceResponse;
 import org.apache.kylin.rest.response.CuboidTreeResponse;
 import org.apache.kylin.rest.response.CuboidTreeResponse.NodeInfo;
@@ -544,7 +546,8 @@ public class CubeService extends BasicService implements InitializingBean {
 
         hr = new HBaseResponse();
         CubeInstance cube = CubeManager.getInstance(getConfig()).getCube(cubeName);
-        if (cube.getStorageType() == IStorageAware.ID_HBASE || cube.getStorageType() == IStorageAware.ID_SHARDED_HBASE || cube.getStorageType() == IStorageAware.ID_REALTIME_AND_HBASE) {
+        if (cube.getStorageType() == IStorageAware.ID_HBASE || cube.getStorageType() == IStorageAware.ID_SHARDED_HBASE
+                || cube.getStorageType() == IStorageAware.ID_REALTIME_AND_HBASE) {
             try {
                 logger.debug("Loading HTable info " + cubeName + ", " + tableName);
 
@@ -633,7 +636,8 @@ public class CubeService extends BasicService implements InitializingBean {
             List<String> toDelHDFSPaths = Lists.newArrayListWithCapacity(toRemoveSegs.size());
             for (CubeSegment seg : toRemoveSegs) {
                 toDropHTables.add(seg.getStorageLocationIdentifier());
-                toDelHDFSPaths.add(JobBuilderSupport.getJobWorkingDir(seg.getConfig().getHdfsWorkingDirectory(), seg.getLastBuildJobID()));
+                toDelHDFSPaths.add(JobBuilderSupport.getJobWorkingDir(seg.getConfig().getHdfsWorkingDirectory(),
+                        seg.getLastBuildJobID()));
             }
 
             StorageCleanUtil.dropHTables(new HBaseAdmin(HBaseConnection.getCurrentHBaseConfiguration()), toDropHTables);
@@ -763,10 +767,12 @@ public class CubeService extends BasicService implements InitializingBean {
     }
 
     //Don't merge the job that has been discarded manually before
-    private boolean isMergingJobBeenDiscarded(CubeInstance cubeInstance, String cubeName, String projectName, SegmentRange offsets) {
+    private boolean isMergingJobBeenDiscarded(CubeInstance cubeInstance, String cubeName, String projectName,
+            SegmentRange offsets) {
         SegmentRange.TSRange tsRange = new SegmentRange.TSRange((Long) offsets.start.v, (Long) offsets.end.v);
         String segmentName = CubeSegment.makeSegmentName(tsRange, null, cubeInstance.getModel());
-        final List<CubingJob> jobInstanceList = jobService.listJobsByRealizationName(cubeName, projectName, EnumSet.of(ExecutableState.DISCARDED));
+        final List<CubingJob> jobInstanceList = jobService.listJobsByRealizationName(cubeName, projectName,
+                EnumSet.of(ExecutableState.DISCARDED));
         for (CubingJob cubingJob : jobInstanceList) {
             if (cubingJob.getSegmentName().equals(segmentName)) {
                 logger.debug("Merge job {} has been discarded before, will not merge.", segmentName);
@@ -777,7 +783,6 @@ public class CubeService extends BasicService implements InitializingBean {
         return false;
     }
 
-
     public void validateCubeDesc(CubeDesc desc, boolean isDraft) {
         Message msg = MsgPicker.getMsg();
 
@@ -931,24 +936,6 @@ public class CubeService extends BasicService implements InitializingBean {
         Broadcaster.getInstance(getConfig()).registerStaticListener(new HTableInfoSyncListener(), "cube");
     }
 
-    private class HTableInfoSyncListener extends Broadcaster.Listener {
-        @Override
-        public void onClearAll(Broadcaster broadcaster) throws IOException {
-            htableInfoCache.invalidateAll();
-        }
-
-        @Override
-        public void onEntityChange(Broadcaster broadcaster, String entity, Broadcaster.Event event, String cacheKey)
-                throws IOException {
-            String cubeName = cacheKey;
-            String keyPrefix = cubeName + "/";
-            for (String k : htableInfoCache.asMap().keySet()) {
-                if (k.startsWith(keyPrefix))
-                    htableInfoCache.invalidate(k);
-            }
-        }
-    }
-
     public CubeInstanceResponse createCubeInstanceResponse(CubeInstance cube) {
         return new CubeInstanceResponse(cube, projectService.getProjectOfCube(cube.getName()));
     }
@@ -995,7 +982,7 @@ public class CubeService extends BasicService implements InitializingBean {
         long queryExactlyMatchCount = queryMatchMap == null || queryMatchMap.get(cuboidId) == null ? 0L
                 : queryMatchMap.get(cuboidId);
         boolean ifExist = currentCuboidSet.contains(cuboidId);
-        long rowCount = rowCountMap == null ? 0L : rowCountMap.get(cuboidId);
+        long rowCount = (rowCountMap == null || rowCountMap.size() == 0) ? 0L : rowCountMap.get(cuboidId);
 
         NodeInfo node = new NodeInfo();
         node.setId(cuboidId);
@@ -1044,9 +1031,10 @@ public class CubeService extends BasicService implements InitializingBean {
         String table = getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectQueryCube());
         String sql = "select " + cuboidColumn + ", sum(" + hitMeasure + ")" //
                 + " from " + table//
-                + " where " + QueryCubePropertyEnum.CUBE.toString() + " = '" + cubeName + "'" //
+                + " where " + QueryCubePropertyEnum.CUBE.toString() + " = ?" //
                 + " group by " + cuboidColumn;
-        List<List<String>> orgHitFrequency = queryService.querySystemCube(sql).getResults();
+
+        List<List<String>> orgHitFrequency = getPrepareQueryResult(cubeName, sql);
         return formatQueryCount(orgHitFrequency);
     }
 
@@ -1058,9 +1046,10 @@ public class CubeService extends BasicService implements InitializingBean {
         String table = getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectQueryCube());
         String sql = "select " + cuboidSource + ", " + cuboidTgt + ", avg(" + aggCount + "), avg(" + returnCount + ")"//
                 + " from " + table //
-                + " where " + QueryCubePropertyEnum.CUBE.toString() + " = '" + cubeName + "' " //
+                + " where " + QueryCubePropertyEnum.CUBE.toString() + " = ?" //
                 + " group by " + cuboidSource + ", " + cuboidTgt;
-        List<List<String>> orgRollingUpCount = queryService.querySystemCube(sql).getResults();
+
+        List<List<String>> orgRollingUpCount = getPrepareQueryResult(cubeName, sql);
         return formatRollingUpStats(orgRollingUpCount);
     }
 
@@ -1070,13 +1059,27 @@ public class CubeService extends BasicService implements InitializingBean {
         String table = getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectQueryCube());
         String sql = "select " + cuboidSource + ", sum(" + hitMeasure + ")" //
                 + " from " + table //
-                + " where " + QueryCubePropertyEnum.CUBE.toString() + " = '" + cubeName + "'" //
+                + " where " + QueryCubePropertyEnum.CUBE.toString() + " = ?" //
                 + " and " + QueryCubePropertyEnum.IF_MATCH.toString() + " = true" //
                 + " group by " + cuboidSource;
-        List<List<String>> orgMatchHitFrequency = queryService.querySystemCube(sql).getResults();
+
+        List<List<String>> orgMatchHitFrequency = getPrepareQueryResult(cubeName, sql);
         return formatQueryCount(orgMatchHitFrequency);
     }
 
+    private List<List<String>> getPrepareQueryResult(String cubeName, String sql) {
+        PrepareSqlRequest sqlRequest = new PrepareSqlRequest();
+        sqlRequest.setProject(MetricsManager.SYSTEM_PROJECT);
+        PrepareSqlRequest.StateParam[] params = new PrepareSqlRequest.StateParam[1];
+        params[0] = new PrepareSqlRequest.StateParam();
+        params[0].setClassName("java.lang.String");
+        params[0].setValue(cubeName);
+        sqlRequest.setParams(params);
+        sqlRequest.setSql(sql);
+
+        return queryService.doQueryWithCache(sqlRequest, false).getResults();
+    }
+
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
             + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'MANAGEMENT')")
     public void migrateCube(CubeInstance cube, String projectName) {
@@ -1114,4 +1117,22 @@ public class CubeService extends BasicService implements InitializingBean {
             throw new InternalErrorException("Failed to perform one-click migrating", e);
         }
     }
+
+    private class HTableInfoSyncListener extends Broadcaster.Listener {
+        @Override
+        public void onClearAll(Broadcaster broadcaster) throws IOException {
+            htableInfoCache.invalidateAll();
+        }
+
+        @Override
+        public void onEntityChange(Broadcaster broadcaster, String entity, Broadcaster.Event event, String cacheKey)
+                throws IOException {
+            String cubeName = cacheKey;
+            String keyPrefix = cubeName + "/";
+            for (String k : htableInfoCache.asMap().keySet()) {
+                if (k.startsWith(keyPrefix))
+                    htableInfoCache.invalidate(k);
+            }
+        }
+    }
 }


[kylin] 30/44: Fix not thread-safe double-checked locking

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit e683fff43053e368148bc20ad628cf3ef205a600
Author: nichunen <ni...@apache.org>
AuthorDate: Sun Jan 12 19:30:51 2020 +0800

    Fix not thread-safe double-checked locking
---
 .../src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java    | 2 +-
 .../kylin/stream/coordinator/coordinate/StreamingCoordinator.java       | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java b/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
index 0586370..518272e 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
@@ -129,7 +129,7 @@ public class TopNMeasureType extends MeasureType<TopNCounter<ByteArray>> {
             private List<TblColRef> literalCols = null;
             private int keyLength = 0;
 
-            private DimensionEncoding[] newDimensionEncodings = null;
+            private volatile DimensionEncoding[] newDimensionEncodings = null;
             private int newKeyLength = 0;
             private boolean needReEncode = true;
 
diff --git a/stream-coordinator/src/main/java/org/apache/kylin/stream/coordinator/coordinate/StreamingCoordinator.java b/stream-coordinator/src/main/java/org/apache/kylin/stream/coordinator/coordinate/StreamingCoordinator.java
index f234c2e..20e4947 100644
--- a/stream-coordinator/src/main/java/org/apache/kylin/stream/coordinator/coordinate/StreamingCoordinator.java
+++ b/stream-coordinator/src/main/java/org/apache/kylin/stream/coordinator/coordinate/StreamingCoordinator.java
@@ -96,7 +96,7 @@ import com.google.common.collect.Sets;
 public class StreamingCoordinator implements CoordinatorClient {
     private static final Logger logger = LoggerFactory.getLogger(StreamingCoordinator.class);
     private static final int DEFAULT_PORT = 7070;
-    private static StreamingCoordinator instance = null;
+    private static volatile StreamingCoordinator instance = null;
 
     private StreamMetadataStore streamMetadataStore;
     private Assigner assigner;


[kylin] 21/44: KYLIN-4306: Delete data model desc when encountering WriteConflictException

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 00559cdfaf4637dac0c02a26f77bb754244f2d26
Author: Zhou Kang <zh...@xiaomi.com>
AuthorDate: Wed Dec 18 21:50:02 2019 +0800

    KYLIN-4306: Delete data model desc when encountering WriteConflictException
---
 .../java/org/apache/kylin/metadata/model/DataModelManager.java   | 9 ++++++++-
 .../java/org/apache/kylin/storage/hbase/HBaseResourceStore.java  | 5 ++++-
 2 files changed, 12 insertions(+), 2 deletions(-)

diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelManager.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelManager.java
index dcbdf60..6ba1e5c 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelManager.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelManager.java
@@ -26,6 +26,7 @@ import java.util.Map;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.ResourceStore;
 import org.apache.kylin.common.persistence.Serializer;
+import org.apache.kylin.common.persistence.WriteConflictException;
 import org.apache.kylin.common.util.AutoReadWriteLock;
 import org.apache.kylin.common.util.AutoReadWriteLock.AutoLock;
 import org.apache.kylin.common.util.ClassUtil;
@@ -250,7 +251,13 @@ public class DataModelManager {
             desc = saveDataModelDesc(desc, projectName);
 
             // now that model is saved, update project formally
-            prjMgr.addModelToProject(name, projectName);
+            try {
+                prjMgr.addModelToProject(name, projectName);
+            } catch (WriteConflictException e) {
+                logger.warn("Add model: {} to project: {} failed for write conflicts, rollback", name, projectName, e);
+                crud.delete(desc);
+                throw e;
+            }
 
             return desc;
         }
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
index f1960a3..e5a2595 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
@@ -336,7 +336,10 @@ public class HBaseResourceStore extends PushdownResourceStore {
             if (!ok) {
                 long real = getResourceTimestampImpl(resPath);
                 throw new WriteConflictException(
-                        "Overwriting conflict " + resPath + ", expect old TS " + oldTS + ", but it is " + real);
+                        "Overwriting conflict " + resPath +
+                                ", expect old TS " + oldTS +
+                                ", but it is " + real +
+                                ", the expected new TS: " + newTS);
             }
 
             return newTS;


[kylin] 38/44: Fix queries built from user-controlled sources

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 2e0d58ae8998c8138b855b6744236ff2276a34a3
Author: nichunen <ni...@apache.org>
AuthorDate: Thu Jan 16 22:25:39 2020 +0800

    Fix queries built from user-controlled sources
---
 .../kylin/rest/controller/DashboardController.java |  17 +-
 .../kylin/rest/service/DashboardService.java       | 250 +++++++++++----------
 2 files changed, 147 insertions(+), 120 deletions(-)

diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/DashboardController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/DashboardController.java
index 846d6d3..8b669b3 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/DashboardController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/DashboardController.java
@@ -22,6 +22,7 @@ import java.util.List;
 
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.metadata.project.ProjectInstance;
+import org.apache.kylin.rest.request.PrepareSqlRequest;
 import org.apache.kylin.rest.response.MetricsResponse;
 import org.apache.kylin.rest.response.SQLResponse;
 import org.apache.kylin.rest.service.CubeService;
@@ -67,8 +68,9 @@ public class DashboardController extends BasicController {
             @RequestParam(value = "startTime") String startTime, @RequestParam(value = "endTime") String endTime) {
         checkAuthorization(projectName);
         MetricsResponse queryMetrics = new MetricsResponse();
-        String sql = dashboardService.getQueryMetricsSQL(startTime, endTime, projectName, cubeName);
-        SQLResponse sqlResponse = queryService.querySystemCube(sql);
+        PrepareSqlRequest sqlRequest = dashboardService.getQueryMetricsSQLRequest(startTime, endTime, projectName,
+                cubeName);
+        SQLResponse sqlResponse = queryService.doQueryWithCache(sqlRequest);
         if (!sqlResponse.getIsException()) {
             queryMetrics.increase("queryCount",
                     dashboardService.getMetricValue(sqlResponse.getResults().get(0).get(0)));
@@ -89,8 +91,9 @@ public class DashboardController extends BasicController {
             @RequestParam(value = "startTime") String startTime, @RequestParam(value = "endTime") String endTime) {
         checkAuthorization(projectName);
         MetricsResponse jobMetrics = new MetricsResponse();
-        String sql = dashboardService.getJobMetricsSQL(startTime, endTime, projectName, cubeName);
-        SQLResponse sqlResponse = queryService.querySystemCube(sql);
+        PrepareSqlRequest sqlRequest = dashboardService.getJobMetricsSQLRequest(startTime, endTime, projectName,
+                cubeName);
+        SQLResponse sqlResponse = queryService.doQueryWithCache(sqlRequest);
         if (!sqlResponse.getIsException()) {
             jobMetrics.increase("jobCount", dashboardService.getMetricValue(sqlResponse.getResults().get(0).get(0)));
             jobMetrics.increase("avgJobBuildTime",
@@ -110,9 +113,9 @@ public class DashboardController extends BasicController {
             @RequestParam(value = "cubeName", required = false) String cubeName,
             @RequestParam(value = "startTime") String startTime, @RequestParam(value = "endTime") String endTime) {
         checkAuthorization(projectName);
-        String sql = dashboardService.getChartSQL(startTime, endTime, projectName, cubeName, dimension, metric,
-                category);
-        return dashboardService.transformChartData(queryService.querySystemCube(sql));
+        PrepareSqlRequest sqlRequest = dashboardService.getChartSQLRequest(startTime, endTime, projectName, cubeName,
+                dimension, metric, category);
+        return dashboardService.transformChartData(queryService.doQueryWithCache(sqlRequest));
     }
 
     private void checkAuthorization(String projectName) {
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/DashboardService.java b/server-base/src/main/java/org/apache/kylin/rest/service/DashboardService.java
index 3910245..e547558 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/DashboardService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/DashboardService.java
@@ -18,9 +18,11 @@
 
 package org.apache.kylin.rest.service;
 
-import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Locale;
+import java.util.Map;
 
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.metadata.project.ProjectInstance;
@@ -32,6 +34,7 @@ import org.apache.kylin.metrics.property.JobPropertyEnum;
 import org.apache.kylin.metrics.property.QueryPropertyEnum;
 import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.exception.BadRequestException;
+import org.apache.kylin.rest.request.PrepareSqlRequest;
 import org.apache.kylin.rest.response.MetricsResponse;
 import org.apache.kylin.rest.response.SQLResponse;
 import org.apache.kylin.storage.hybrid.HybridInstance;
@@ -53,80 +56,6 @@ public class DashboardService extends BasicService {
     @Autowired
     private CubeService cubeService;
 
-    private enum CategoryEnum {
-        QUERY, JOB
-    }
-
-    private enum QueryDimensionEnum {
-        PROJECT(QueryPropertyEnum.PROJECT.toString()), //
-        CUBE(QueryPropertyEnum.REALIZATION.toString()), //
-        DAY(TimePropertyEnum.DAY_DATE.toString()), //
-        WEEK(TimePropertyEnum.WEEK_BEGIN_DATE.toString()), // 
-        MONTH(TimePropertyEnum.MONTH.toString());
-
-        private final String sql;
-
-        QueryDimensionEnum(String sql) {
-            this.sql = sql;
-        }
-
-        public String toSQL() {
-            return this.sql;
-        }
-    };
-
-    private enum JobDimensionEnum {
-        PROJECT(JobPropertyEnum.PROJECT.toString()), //
-        CUBE(JobPropertyEnum.CUBE.toString()), //
-        DAY(TimePropertyEnum.DAY_DATE.toString()), //
-        WEEK(TimePropertyEnum.WEEK_BEGIN_DATE.toString()), //
-        MONTH(TimePropertyEnum.MONTH.toString());
-
-        private final String sql;
-
-        JobDimensionEnum(String sql) {
-            this.sql = sql;
-        }
-
-        public String toSQL() {
-            return this.sql;
-        }
-    };
-
-    private enum QueryMetricEnum {
-        QUERY_COUNT("count(*)"), //
-        AVG_QUERY_LATENCY("avg(" + QueryPropertyEnum.TIME_COST.toString() + ")"), //
-        MAX_QUERY_LATENCY("max(" + QueryPropertyEnum.TIME_COST.toString() + ")"), //
-        MIN_QUERY_LATENCY("min(" + QueryPropertyEnum.TIME_COST.toString() + ")");
-
-        private final String sql;
-
-        QueryMetricEnum(String sql) {
-            this.sql = sql;
-        }
-
-        public String toSQL() {
-            return this.sql;
-        }
-    }
-
-    private enum JobMetricEnum {
-        JOB_COUNT("count(*)"), //
-        AVG_JOB_BUILD_TIME("avg(" + JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")"), //
-        MAX_JOB_BUILD_TIME("max(" + JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")"), //
-        MIN_JOB_BUILD_TIME("min(" + JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")");
-
-        private final String sql;
-
-        JobMetricEnum(String sql) {
-            this.sql = sql;
-        }
-
-        public String toSQL() {
-            return this.sql;
-        }
-    }
-
     public MetricsResponse getCubeMetrics(String projectName, String cubeName) {
         MetricsResponse cubeMetrics = new MetricsResponse();
         Float totalCubeSize = 0f;
@@ -180,31 +109,31 @@ public class DashboardService extends BasicService {
             }
         }
         return cubeInstances;
-    }
+    };
 
-    public String getQueryMetricsSQL(String startTime, String endTime, String projectName, String cubeName) {
+    public PrepareSqlRequest getQueryMetricsSQLRequest(String startTime, String endTime, String projectName,
+            String cubeName) {
         String[] metrics = new String[] { QueryMetricEnum.QUERY_COUNT.toSQL(),
                 QueryMetricEnum.AVG_QUERY_LATENCY.toSQL(), QueryMetricEnum.MAX_QUERY_LATENCY.toSQL(),
                 QueryMetricEnum.MIN_QUERY_LATENCY.toSQL() };
-        List<String> filters = getBaseFilters(CategoryEnum.QUERY, projectName, startTime, endTime);
-        filters = addCubeFilter(filters, CategoryEnum.QUERY, cubeName);
-        return createSql(null, metrics,
-                getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectQuery()),
-                filters.toArray(new String[filters.size()]));
-    }
+        Map<String, String> filterMap = getBaseFilterMap(CategoryEnum.QUERY, projectName, startTime, endTime);
+        filterMap.putAll(getCubeFilterMap(CategoryEnum.QUERY, cubeName));
+        return createPrepareSqlRequest(null, metrics,
+                getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectQuery()), filterMap);
+    };
 
-    public String getJobMetricsSQL(String startTime, String endTime, String projectName, String cubeName) {
+    public PrepareSqlRequest getJobMetricsSQLRequest(String startTime, String endTime, String projectName,
+            String cubeName) {
         String[] metrics = new String[] { JobMetricEnum.JOB_COUNT.toSQL(), JobMetricEnum.AVG_JOB_BUILD_TIME.toSQL(),
                 JobMetricEnum.MAX_JOB_BUILD_TIME.toSQL(), JobMetricEnum.MIN_JOB_BUILD_TIME.toSQL() };
-        List<String> filters = getBaseFilters(CategoryEnum.JOB, projectName, startTime, endTime);
-        filters = addCubeFilter(filters, CategoryEnum.JOB, cubeName);
-        return createSql(null, metrics,
-                getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectJob()),
-                filters.toArray(new String[filters.size()]));
+        Map<String, String> filterMap = getBaseFilterMap(CategoryEnum.JOB, projectName, startTime, endTime);
+        filterMap.putAll(getCubeFilterMap(CategoryEnum.JOB, cubeName));
+        return createPrepareSqlRequest(null, metrics,
+                getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectJob()), filterMap);
     }
 
-    public String getChartSQL(String startTime, String endTime, String projectName, String cubeName, String dimension,
-            String metric, String category) {
+    public PrepareSqlRequest getChartSQLRequest(String startTime, String endTime, String projectName, String cubeName,
+            String dimension, String metric, String category) {
         try {
             CategoryEnum categoryEnum = CategoryEnum.valueOf(category);
             String table = "";
@@ -221,10 +150,10 @@ public class DashboardService extends BasicService {
                 table = getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectJob());
             }
 
-            List<String> filters = getBaseFilters(categoryEnum, projectName, startTime, endTime);
-            filters = addCubeFilter(filters, categoryEnum, cubeName);
+            Map<String, String> filterMap = getBaseFilterMap(categoryEnum, projectName, startTime, endTime);
+            filterMap.putAll(getCubeFilterMap(categoryEnum, cubeName));
 
-            return createSql(dimensionSQL, metricSQL, table, filters.toArray(new String[filters.size()]));
+            return createPrepareSqlRequest(dimensionSQL, metricSQL, table, filterMap);
         } catch (IllegalArgumentException e) {
             String message = "Generate dashboard chart sql failed. Please double check the input parameter: dimension, metric or category.";
             logger.error(message, e);
@@ -264,29 +193,34 @@ public class DashboardService extends BasicService {
     public void checkAuthorization() throws AccessDeniedException {
     }
 
-    private List<String> getBaseFilters(CategoryEnum category, String projectName, String startTime, String endTime) {
-        List<String> filters = new ArrayList<String>();
+    private Map<String, String> getBaseFilterMap(CategoryEnum category, String projectName, String startTime,
+            String endTime) {
+        HashMap<String, String> filterMap = new HashMap<>();
         String project = "";
         if (category == CategoryEnum.QUERY) {
             project = QueryDimensionEnum.PROJECT.toSQL();
         } else {
             project = JobDimensionEnum.PROJECT.toSQL();
         }
-        filters.add(TimePropertyEnum.DAY_DATE.toString() + " >= '" + startTime + "'");
-        filters.add(TimePropertyEnum.DAY_DATE.toString() + " <= '" + endTime + "'");
+        filterMap.put(TimePropertyEnum.DAY_DATE.toString() + " >= ?", startTime);
+        filterMap.put(TimePropertyEnum.DAY_DATE.toString() + " <= ?", endTime);
+
         if (!Strings.isNullOrEmpty(projectName)) {
-            filters.add(project + " ='" + projectName.toUpperCase(Locale.ROOT) + "'");
+            filterMap.put(project + " = ?", projectName.toUpperCase(Locale.ROOT));
         } else {
-            filters.add(project + " <> '" + MetricsManager.SYSTEM_PROJECT + "'");
+            filterMap.put(project + " <> ?", MetricsManager.SYSTEM_PROJECT);
         }
-        return filters;
+        return filterMap;
     }
 
-    private List<String> addCubeFilter(List<String> baseFilter, CategoryEnum category, String cubeName) {
+    private Map<String, String> getCubeFilterMap(CategoryEnum category, String cubeName) {
+        HashMap<String, String> filterMap = new HashMap<>();
+
         if (category == CategoryEnum.QUERY) {
-            baseFilter.add(QueryPropertyEnum.EXCEPTION.toString() + " = 'NULL'");
+            filterMap.put(QueryPropertyEnum.EXCEPTION.toString() + " = ?", "NULL");
+
             if (!Strings.isNullOrEmpty(cubeName)) {
-                baseFilter.add(QueryPropertyEnum.REALIZATION + " = '" + cubeName + "'");
+                filterMap.put(QueryPropertyEnum.REALIZATION + " = ?", cubeName);
             }
         } else if (category == CategoryEnum.JOB && !Strings.isNullOrEmpty(cubeName)) {
             HybridInstance hybridInstance = getHybridManager().getHybridInstance(cubeName);
@@ -295,15 +229,18 @@ public class DashboardService extends BasicService {
                 for (CubeInstance cube : getCubeByHybrid(hybridInstance)) {
                     cubeNames.append(",'" + cube.getName() + "'");
                 }
-                baseFilter.add(JobPropertyEnum.CUBE.toString() + " IN (" + cubeNames.substring(1) + ")");
+                filterMap.put(JobPropertyEnum.CUBE.toString() + " IN (?)", cubeNames.substring(1));
             } else {
-                baseFilter.add(JobPropertyEnum.CUBE.toString() + " ='" + cubeName + "'");
+                filterMap.put(JobPropertyEnum.CUBE.toString() + " = ?", cubeName);
             }
         }
-        return baseFilter;
+        return filterMap;
     }
 
-    private String createSql(String[] dimensions, String[] metrics, String category, String[] filters) {
+    private PrepareSqlRequest createPrepareSqlRequest(String[] dimensions, String[] metrics, String category,
+            Map<String, String> filterMap) {
+        PrepareSqlRequest sqlRequest = new PrepareSqlRequest();
+        sqlRequest.setProject(MetricsManager.SYSTEM_PROJECT);
         StringBuffer baseSQL = new StringBuffer("select ");
         StringBuffer groupBy = new StringBuffer("");
         if (dimensions != null && dimensions.length > 0) {
@@ -330,17 +267,104 @@ public class DashboardService extends BasicService {
         }
         baseSQL.append(" from ");
         baseSQL.append(category);
-        if (filters != null && filters.length > 0) {
+        if (filterMap != null && filterMap.size() > 0) {
+            PrepareSqlRequest.StateParam[] params = new PrepareSqlRequest.StateParam[filterMap.size()];
+            int i = 0;
             StringBuffer filterSQL = new StringBuffer(" where ");
-            filterSQL.append(filters[0]);
-            for (int i = 1; i < filters.length; i++) {
+            Iterator<String> it = filterMap.keySet().iterator();
+            String filter = it.next();
+            filterSQL.append(filter);
+            params[i] = new PrepareSqlRequest.StateParam();
+            params[i].setClassName("java.lang.String");
+            params[i++].setValue(filterMap.get(filter));
+
+            while (it.hasNext()) {
+                filter = it.next();
                 filterSQL.append(" and ");
-                filterSQL.append(filters[i]);
+                filterSQL.append(filter);
+                params[i] = new PrepareSqlRequest.StateParam();
+                params[i].setClassName("java.lang.String");
+                params[i++].setValue(filterMap.get(filter));
             }
             baseSQL.append(filterSQL.toString());
+            sqlRequest.setParams(params);
         }
         baseSQL.append(groupBy);
+        sqlRequest.setSql(baseSQL.toString());
+        return sqlRequest;
+    }
+
+    private enum CategoryEnum {
+        QUERY, JOB
+    }
+
+    private enum QueryDimensionEnum {
+        PROJECT(QueryPropertyEnum.PROJECT.toString()), //
+        CUBE(QueryPropertyEnum.REALIZATION.toString()), //
+        DAY(TimePropertyEnum.DAY_DATE.toString()), //
+        WEEK(TimePropertyEnum.WEEK_BEGIN_DATE.toString()), //
+        MONTH(TimePropertyEnum.MONTH.toString());
+
+        private final String sql;
+
+        QueryDimensionEnum(String sql) {
+            this.sql = sql;
+        }
+
+        public String toSQL() {
+            return this.sql;
+        }
+    }
+
+    private enum JobDimensionEnum {
+        PROJECT(JobPropertyEnum.PROJECT.toString()), //
+        CUBE(JobPropertyEnum.CUBE.toString()), //
+        DAY(TimePropertyEnum.DAY_DATE.toString()), //
+        WEEK(TimePropertyEnum.WEEK_BEGIN_DATE.toString()), //
+        MONTH(TimePropertyEnum.MONTH.toString());
+
+        private final String sql;
+
+        JobDimensionEnum(String sql) {
+            this.sql = sql;
+        }
+
+        public String toSQL() {
+            return this.sql;
+        }
+    }
+
+    private enum QueryMetricEnum {
+        QUERY_COUNT("count(*)"), //
+        AVG_QUERY_LATENCY("avg(" + QueryPropertyEnum.TIME_COST.toString() + ")"), //
+        MAX_QUERY_LATENCY("max(" + QueryPropertyEnum.TIME_COST.toString() + ")"), //
+        MIN_QUERY_LATENCY("min(" + QueryPropertyEnum.TIME_COST.toString() + ")");
+
+        private final String sql;
+
+        QueryMetricEnum(String sql) {
+            this.sql = sql;
+        }
+
+        public String toSQL() {
+            return this.sql;
+        }
+    }
+
+    private enum JobMetricEnum {
+        JOB_COUNT("count(*)"), //
+        AVG_JOB_BUILD_TIME("avg(" + JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")"), //
+        MAX_JOB_BUILD_TIME("max(" + JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")"), //
+        MIN_JOB_BUILD_TIME("min(" + JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")");
 
-        return baseSQL.toString();
+        private final String sql;
+
+        JobMetricEnum(String sql) {
+            this.sql = sql;
+        }
+
+        public String toSQL() {
+            return this.sql;
+        }
     }
 }
\ No newline at end of file


[kylin] 19/44: KYLIN-4295 Instances displayed on Query Node are inconsistent with Job Node

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit a877fee84a0b4a13afa28a2cb78faeea7e663510
Author: yaqian.zhang <59...@qq.com>
AuthorDate: Mon Dec 23 15:01:39 2019 +0800

    KYLIN-4295 Instances displayed on Query Node are inconsistent with Job Node
---
 .../ServiceDiscoveryStateController.java           | 29 ++++++++++++++--------
 webapp/app/js/controllers/instances.js             |  2 +-
 2 files changed, 19 insertions(+), 12 deletions(-)

diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/ServiceDiscoveryStateController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/ServiceDiscoveryStateController.java
index a315694..42d6cc7 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/ServiceDiscoveryStateController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/ServiceDiscoveryStateController.java
@@ -63,6 +63,8 @@ public class ServiceDiscoveryStateController extends BasicController {
         checkCuratorSchedulerEnabled();
         Set<String> allNodes = new HashSet<>();
         Set<String> queryNodes = new HashSet<>();
+        Set<String> jobNodes = new HashSet<>();
+        Set<String> leaders = new HashSet<>();
 
         // get all nodes and query nodes
         for (String serverWithMode : KylinConfig.getInstanceFromEnv().getRestServersWithMode()) {
@@ -75,19 +77,24 @@ public class ServiceDiscoveryStateController extends BasicController {
             if (mode.equals("query") || mode.equals("all")) {
                 queryNodes.add(server);
             }
+            if (mode.equals("job") || mode.equals("all")) {
+                jobNodes.add(server);
+            }
         }
 
         // Get all selection participants(only job nodes will participate in the election) and selected leaders
         Set<Participant> allParticipants = serviceDiscoveryStateService.getAllParticipants();
-        Set<String> jobNodes = allParticipants.stream() //
-                .map(Participant::getId) //
-                .collect(Collectors.toSet()); //
-
-        // There should only one leader, if there are more than one leader, means something wrong happened
-        Set<String> leaders = allParticipants.stream() //
-                .filter(Participant::isLeader) //
-                .map(Participant::getId) //
-                .collect(Collectors.toSet()); //
+        if (!allParticipants.isEmpty()) {
+            jobNodes = allParticipants.stream() //
+                    .map(Participant::getId) //
+                    .collect(Collectors.toSet()); //
+
+            // There should only one leader, if there are more than one leader, means something wrong happened
+            leaders = allParticipants.stream() //
+                    .filter(Participant::isLeader) //
+                    .map(Participant::getId) //
+                    .collect(Collectors.toSet()); //
+        }
 
         // Ask for other nodes for its job server state
         // current Kylin only has one active job node
@@ -103,8 +110,8 @@ public class ServiceDiscoveryStateController extends BasicController {
         // 100 means CuratorScheduler
         // This monitor only meaningful to CuratorScheduler
         if (KylinConfig.getInstanceFromEnv().getSchedulerType() != 100) {
-            throw new UnsupportedOperationException("Only meaningful when scheduler is CuratorScheduler, " +
-                    "try set kylin.job.scheduler.default to 100 to enable CuratorScheduler.");
+            throw new UnsupportedOperationException("Only meaningful when scheduler is CuratorScheduler, "
+                    + "try set kylin.job.scheduler.default to 100 to enable CuratorScheduler.");
         }
     }
 
diff --git a/webapp/app/js/controllers/instances.js b/webapp/app/js/controllers/instances.js
index 77b9245..1ff3e69 100644
--- a/webapp/app/js/controllers/instances.js
+++ b/webapp/app/js/controllers/instances.js
@@ -54,7 +54,7 @@ KylinApp
       if ($scope.selectedLeaders.indexOf(node) >= 0) {
         return "Job Node (Leader)";
       } else {
-        return "Job Node (Follower)";
+        return "Job Node";
       }
     }
 


[kylin] 39/44: Prevent uncontrolled data used in path expression

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 173d88b7015c9a60779cc04586896453ee5b69ed
Author: nichunen <ni...@apache.org>
AuthorDate: Sat Jan 18 22:45:04 2020 +0800

    Prevent uncontrolled data used in path expression
---
 .../src/main/java/org/apache/kylin/metadata/TableMetadataManager.java    | 1 +
 1 file changed, 1 insertion(+)

diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/TableMetadataManager.java b/core-metadata/src/main/java/org/apache/kylin/metadata/TableMetadataManager.java
index 457c046..ce3017f 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/TableMetadataManager.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/TableMetadataManager.java
@@ -484,6 +484,7 @@ public class TableMetadataManager {
 
     public void removeExternalFilter(String name) throws IOException {
         try (AutoLock lock = extFilterMapLock.lockForWrite()) {
+            name = name.replaceAll("[./]", "");
             extFilterCrud.delete(name);
         }
     }


[kylin] 16/44: fix bug KYLIN-4300.

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 21241e847fb47d211dbc8b0dd9697961502a0101
Author: zengruios <57...@qq.com>
AuthorDate: Mon Dec 16 21:40:53 2019 +0800

    fix bug KYLIN-4300.
---
 .../cube/model/validation/rule/StreamingCubeRule.java   |  3 ++-
 .../org/apache/kylin/rest/service/ModelService.java     | 17 +++++++++++------
 2 files changed, 13 insertions(+), 7 deletions(-)

diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/StreamingCubeRule.java b/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/StreamingCubeRule.java
index 647f4c1..dfc5624 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/StreamingCubeRule.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/StreamingCubeRule.java
@@ -44,7 +44,8 @@ public class StreamingCubeRule implements IValidatorRule<CubeDesc> {
     public void validate(CubeDesc cube, ValidateContext context) {
         DataModelDesc model = cube.getModel();
         
-        if (model.getRootFactTable().getTableDesc().getSourceType() != ISourceAware.ID_STREAMING) {
+        if (model.getRootFactTable().getTableDesc().getSourceType() != ISourceAware.ID_STREAMING
+                && !model.getRootFactTable().getTableDesc().isStreamingTable()) {
             return;
         }
 
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/ModelService.java b/server-base/src/main/java/org/apache/kylin/rest/service/ModelService.java
index a385fab..888db7c 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/ModelService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/ModelService.java
@@ -136,12 +136,7 @@ public class ModelService extends BasicService {
             throw new BadRequestException(String.format(Locale.ROOT, msg.getDUPLICATE_MODEL_NAME(), desc.getName()));
         }
 
-        String factTableName = desc.getRootFactTableName();
-        TableDesc tableDesc = getTableManager().getTableDesc(factTableName, projectName);
-        if (tableDesc.getSourceType() == ISourceAware.ID_STREAMING
-                && (desc.getPartitionDesc() == null || desc.getPartitionDesc().getPartitionDateColumn() == null)) {
-            throw new IllegalArgumentException("Must define a partition column.");
-        }
+        validateModel(projectName, desc);
 
         DataModelDesc createdDesc = null;
         String owner = SecurityContextHolder.getContext().getAuthentication().getName();
@@ -151,10 +146,20 @@ public class ModelService extends BasicService {
 
     public DataModelDesc updateModelAndDesc(String project, DataModelDesc desc) throws IOException {
         aclEvaluate.checkProjectWritePermission(project);
+        validateModel(project, desc);
         getDataModelManager().updateDataModelDesc(desc);
         return desc;
     }
 
+    public void validateModel(String project, DataModelDesc desc) throws IllegalArgumentException {
+        String factTableName = desc.getRootFactTableName();
+        TableDesc tableDesc = getTableManager().getTableDesc(factTableName, project);
+        if ((tableDesc.getSourceType() == ISourceAware.ID_STREAMING || tableDesc.isStreamingTable())
+                && (desc.getPartitionDesc() == null || desc.getPartitionDesc().getPartitionDateColumn() == null)) {
+            throw new IllegalArgumentException("Must define a partition column.");
+        }
+    }
+
     public void dropModel(DataModelDesc desc) throws IOException {
         aclEvaluate.checkProjectWritePermission(desc.getProjectInstance().getName());
         Message msg = MsgPicker.getMsg();


[kylin] 27/44: Encrept response output for BroadcasterReceiveServlet

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit a1bd10889f6b0286036e165d477020cc3fac1384
Author: nichunen <ni...@apache.org>
AuthorDate: Fri Jan 10 21:51:03 2020 +0800

    Encrept response output for BroadcasterReceiveServlet
---
 .../broadcaster/BroadcasterReceiveServlet.java     | 24 ++++++++++++----------
 1 file changed, 13 insertions(+), 11 deletions(-)

diff --git a/server-base/src/main/java/org/apache/kylin/rest/broadcaster/BroadcasterReceiveServlet.java b/server-base/src/main/java/org/apache/kylin/rest/broadcaster/BroadcasterReceiveServlet.java
index a277cf3..8450f7a 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/broadcaster/BroadcasterReceiveServlet.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/broadcaster/BroadcasterReceiveServlet.java
@@ -28,26 +28,21 @@ import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
+import org.apache.kylin.common.util.EncryptUtil;
+
 /**
  */
 public class BroadcasterReceiveServlet extends HttpServlet {
 
     private static final long serialVersionUID = 1L;
-
-    public interface BroadcasterHandler {
-
-        void handle(String type, String name, String event);
-    }
-
+    private static final Pattern PATTERN = Pattern.compile("/(.+)/(.+)/(.+)");
+    private static final Pattern PATTERN2 = Pattern.compile("/(.+)/(.+)");
     private final BroadcasterHandler handler;
 
     public BroadcasterReceiveServlet(BroadcasterHandler handler) {
         this.handler = handler;
     }
 
-    private static final Pattern PATTERN = Pattern.compile("/(.+)/(.+)/(.+)");
-    private static final Pattern PATTERN2 = Pattern.compile("/(.+)/(.+)");
-
     @Override
     protected void doPut(HttpServletRequest req, HttpServletResponse resp) throws IOException {
         handle(req, resp);
@@ -72,7 +67,8 @@ public class BroadcasterReceiveServlet extends HttpServlet {
             if (handler != null) {
                 handler.handle(type, cacheKey, event);
             }
-            resp.getWriter().write("type:" + type + " name:" + cacheKey + " event:" + event);
+            resp.getWriter().write("Encrypted(type:" + EncryptUtil.encrypt(type) + " name:" + EncryptUtil.encrypt(cacheKey)
+                    + " event:" + EncryptUtil.encrypt(event) + ")");
         } else if (matcher2.matches()) {
             String type = matcher2.group(1);
             String event = matcher2.group(2);
@@ -82,10 +78,16 @@ public class BroadcasterReceiveServlet extends HttpServlet {
             if (handler != null) {
                 handler.handle(type, cacheKey, event);
             }
-            resp.getWriter().write("type:" + type + " name:" + cacheKey + " event:" + event);
+            resp.getWriter().write("Encrypted(type:" + EncryptUtil.encrypt(type) + " name:" + EncryptUtil.encrypt(cacheKey)
+                    + " event:" + EncryptUtil.encrypt(event) + ")");
         } else {
             resp.getWriter().write("not valid uri");
         }
         resp.getWriter().close();
     }
+
+    public interface BroadcasterHandler {
+
+        void handle(String type, String name, String event);
+    }
 }


[kylin] 05/44: KYLIN-4287 segmenPruner satisfy "IN" filter bug

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit c5818262b1ae01fd5f7f044fb9bc2d4be7841b99
Author: zhangbushi5 <84...@qq.com>
AuthorDate: Tue Dec 10 17:15:54 2019 +0800

    KYLIN-4287  segmenPruner satisfy "IN" filter bug
---
 .../apache/kylin/cube/common/SegmentPruner.java    |  8 ++++---
 .../kylin/cube/common/SegmentPrunerTest.java       | 25 ++++++++++++++++++++++
 2 files changed, 30 insertions(+), 3 deletions(-)

diff --git a/core-cube/src/main/java/org/apache/kylin/cube/common/SegmentPruner.java b/core-cube/src/main/java/org/apache/kylin/cube/common/SegmentPruner.java
index ae21a4d..2de62de 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/common/SegmentPruner.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/common/SegmentPruner.java
@@ -137,9 +137,11 @@ public class SegmentPruner {
         switch (comp.getOperator()) {
         case EQ:
         case IN:
-            String filterMin = order.min((Set<String>) comp.getValues());
-            String filterMax = order.max((Set<String>) comp.getValues());
-            return order.compare(filterMin, maxVal) <= 0 && order.compare(minVal, filterMax) <= 0;
+            for (String filterValue : (Set<String>) comp.getValues()) {
+                if (order.compare(filterValue, maxVal) <= 0 && order.compare(minVal, filterValue) <= 0)
+                    return true;
+            }
+            return false;
         case LT:
             return order.compare(minVal, filterVal) < 0;
         case LTE:
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/common/SegmentPrunerTest.java b/core-cube/src/test/java/org/apache/kylin/cube/common/SegmentPrunerTest.java
index 69f9b10..5d98d06 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/common/SegmentPrunerTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/common/SegmentPrunerTest.java
@@ -343,4 +343,29 @@ public class SegmentPrunerTest extends LocalFileMetadataTestCase {
             }
         }
     }
+
+    @Test
+    public void testPruneSegWithFilterIN() {
+        // legacy cube segments does not have DimensionRangeInfo, but with TSRange can do some pruning
+        CubeInstance cube = CubeManager.getInstance(getTestConfig())
+                .getCube("test_kylin_cube_without_slr_left_join_ready_2_segments");
+        TblColRef col = cube.getModel().findColumn("TEST_KYLIN_FACT.CAL_DT");
+        CubeSegment seg = cube.getSegments(SegmentStatusEnum.READY).get(0);
+        TSRange tsRange = seg.getTSRange();
+        String start = DateFormat.formatToTimeStr(tsRange.start.v, "yyyy-MM-dd");
+        CubeSegment seg2 = cube.getSegments(SegmentStatusEnum.READY).get(1);
+        TSRange tsRange2 = seg2.getTSRange();
+        try (SetAndUnsetSystemProp sns = new SetAndUnsetSystemProp("kylin.query.skip-empty-segments", "false")) {
+
+            {
+                TupleFilter inFilter = new ConstantTupleFilter(Sets.newHashSet(start,
+                        DateFormat.formatToTimeStr(tsRange2.end.v + 1000 * 60 * 60 * 24L, "yyyy-MM-dd")));
+                TupleFilter filter = compare(col, FilterOperatorEnum.IN, inFilter);
+                SegmentPruner segmentPruner = new SegmentPruner(filter);
+                Assert.assertTrue(segmentPruner.check(seg));
+                Assert.assertFalse(segmentPruner.check(seg2));
+
+            }
+        }
+    }
 }


[kylin] 42/44: Minor, add class TableDesc.TableProject to avoid the use of Pair

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 3b5584890a5f413a112292a5f2e940f8872f0efb
Author: nichunen <ni...@apache.org>
AuthorDate: Sat Feb 1 17:06:12 2020 +0800

    Minor, add class TableDesc.TableProject to avoid the use of Pair
---
 .../kylin/metadata/TableMetadataManager.java       |  14 +--
 .../org/apache/kylin/metadata/model/TableDesc.java |  31 ++++-
 .../apache/kylin/metadata/model/TableExtDesc.java  | 130 ++++++++++-----------
 .../org/apache/kylin/tool/CubeMigrationCLI.java    |   4 +-
 4 files changed, 96 insertions(+), 83 deletions(-)

diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/TableMetadataManager.java b/core-metadata/src/main/java/org/apache/kylin/metadata/TableMetadataManager.java
index ce3017f..688e377 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/TableMetadataManager.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/TableMetadataManager.java
@@ -35,7 +35,6 @@ import org.apache.kylin.common.persistence.Serializer;
 import org.apache.kylin.common.util.AutoReadWriteLock;
 import org.apache.kylin.common.util.AutoReadWriteLock.AutoLock;
 import org.apache.kylin.common.util.JsonUtil;
-import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.common.util.RandomUtil;
 import org.apache.kylin.metadata.cachesync.Broadcaster;
 import org.apache.kylin.metadata.cachesync.Broadcaster.Event;
@@ -43,6 +42,7 @@ import org.apache.kylin.metadata.cachesync.CachedCrudAssist;
 import org.apache.kylin.metadata.cachesync.CaseInsensitiveStringCache;
 import org.apache.kylin.metadata.model.ExternalFilterDesc;
 import org.apache.kylin.metadata.model.TableDesc;
+import org.apache.kylin.metadata.model.TableDesc.TableProject;
 import org.apache.kylin.metadata.model.TableExtDesc;
 import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.metadata.project.ProjectManager;
@@ -118,7 +118,7 @@ public class TableMetadataManager {
                 TableDesc.class, srcTableMap) {
             @Override
             protected TableDesc initEntityAfterReload(TableDesc t, String resourceName) {
-                String prj = TableDesc.parseResourcePath(resourceName).getSecond();
+                String prj = TableDesc.parseResourcePath(resourceName).getProject();
                 t.init(config, prj);
                 return t;
             }
@@ -138,9 +138,9 @@ public class TableMetadataManager {
                     srcTableCrud.reloadQuietly(cacheKey);
             }
 
-            Pair<String, String> pair = TableDesc.parseResourcePath(cacheKey);
-            String table = pair.getFirst();
-            String prj = pair.getSecond();
+            TableProject tableProject = TableDesc.parseResourcePath(cacheKey);
+            String table = tableProject.getTable();
+            String prj = tableProject.getProject();
 
             if (prj == null) {
                 for (ProjectInstance p : ProjectManager.getInstance(config).findProjectsByTable(table)) {
@@ -299,7 +299,7 @@ public class TableMetadataManager {
                     t = convertOldTableExtToNewer(resourceName);
                 }
 
-                String prj = TableDesc.parseResourcePath(resourceName).getSecond();
+                String prj = TableDesc.parseResourcePath(resourceName).getProject();
                 t.init(prj);
                 return t;
             }
@@ -424,7 +424,7 @@ public class TableMetadataManager {
         String cardinality = attrs.get(MetadataConstants.TABLE_EXD_CARDINALITY);
 
         // parse table identity from file name
-        String tableIdentity = TableDesc.parseResourcePath(resourceName).getFirst();
+        String tableIdentity = TableDesc.parseResourcePath(resourceName).getTable();
         TableExtDesc result = new TableExtDesc();
         result.setIdentity(tableIdentity);
         result.setUuid(RandomUtil.randomUUID().toString());
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableDesc.java
index c1f08a9..e505a9f 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableDesc.java
@@ -50,6 +50,32 @@ public class TableDesc extends RootPersistentEntity implements ISourceAware {
 
     private static final String TABLE_TYPE_VIRTUAL_VIEW = "VIRTUAL_VIEW";
 
+    public static class TableProject {
+       private String table;
+       private String project;
+
+        TableProject(String table, String project) {
+            this.table = table;
+            this.project = project;
+        }
+
+        public String getTable() {
+            return table;
+        }
+
+        public void setTable(String table) {
+            this.table = table;
+        }
+
+        public String getProject() {
+            return project;
+        }
+
+        public void setProject(String project) {
+            this.project = project;
+        }
+    }
+
     public static String concatRawResourcePath(String nameOnPath) {
         return ResourceStore.TABLE_RESOURCE_ROOT + "/" + nameOnPath + ".json";
     }
@@ -64,8 +90,7 @@ public class TableDesc extends RootPersistentEntity implements ISourceAware {
         return concatRawResourcePath(makeResourceName(tableIdentity, prj));
     }
 
-    // returns <table, project>
-    public static Pair<String, String> parseResourcePath(String path) {
+    public static TableProject parseResourcePath(String path) {
         if (path.endsWith(".json"))
             path = path.substring(0, path.length() - ".json".length());
 
@@ -83,7 +108,7 @@ public class TableDesc extends RootPersistentEntity implements ISourceAware {
             table = path;
             prj = null;
         }
-        return Pair.newPair(table, prj);
+        return new TableProject(table, prj);
     }
 
     // ============================================================================
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableExtDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableExtDesc.java
index 7e9e8d0..8853908 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableExtDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableExtDesc.java
@@ -28,7 +28,6 @@ import java.util.Map;
 
 import org.apache.kylin.common.persistence.ResourceStore;
 import org.apache.kylin.common.persistence.RootPersistentEntity;
-import org.apache.kylin.common.util.Pair;
 
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
 import com.fasterxml.jackson.annotation.JsonBackReference;
@@ -39,34 +38,18 @@ import com.fasterxml.jackson.annotation.JsonProperty;
 @JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE)
 public class TableExtDesc extends RootPersistentEntity {
 
-    public static String concatRawResourcePath(String nameOnPath) {
-        return ResourceStore.TABLE_EXD_RESOURCE_ROOT + "/" + nameOnPath + ".json";
-    }
-
-    public static String concatResourcePath(String tableIdentity, String prj) {
-        return concatRawResourcePath(TableDesc.makeResourceName(tableIdentity, prj));
-    }
-
-    // returns <table, project>
-    public static Pair<String, String> parseResourcePath(String path) {
-        return TableDesc.parseResourcePath(path);
-    }
-    
-    // ============================================================================
-
     @JsonProperty("table_name")
     private String tableIdentity;
     @JsonProperty("last_build_job_id")
     private String jodID;
-
     @JsonProperty("frequency")
     private int frequency;
+
+    // ============================================================================
     @JsonProperty("columns_stats")
     private List<ColumnStats> columnStats = new ArrayList<>();
-
     @JsonProperty("sample_rows")
     private List<String[]> sampleRows = new ArrayList<>();
-
     @JsonProperty("last_modified_time")
     private long lastModifiedTime;
     @JsonProperty("total_rows")
@@ -75,17 +58,22 @@ public class TableExtDesc extends RootPersistentEntity {
     private List<Long> mapRecords = new ArrayList<>();
     @JsonProperty("data_source_properties")
     private Map<String, String> dataSourceProps = new HashMap<>();
-
     private String project;
-
     public TableExtDesc() {
     }
 
+    public static String concatRawResourcePath(String nameOnPath) {
+        return ResourceStore.TABLE_EXD_RESOURCE_ROOT + "/" + nameOnPath + ".json";
+    }
+
+    public static String concatResourcePath(String tableIdentity, String prj) {
+        return concatRawResourcePath(TableDesc.makeResourceName(tableIdentity, prj));
+    }
     @Override
     public String resourceName() {
         return TableDesc.makeResourceName(getIdentity(), getProject());
     }
-    
+
     public String getResourcePath() {
         return concatResourcePath(getIdentity(), getProject());
     }
@@ -106,10 +94,18 @@ public class TableExtDesc extends RootPersistentEntity {
         return this.tableIdentity;
     }
 
+    public void setIdentity(String name) {
+        this.tableIdentity = name;
+    }
+
     public String getJodID() {
         return this.jodID;
     }
 
+    public void setJodID(String jobID) {
+        this.jodID = jobID;
+    }
+
     public void addDataSourceProp(String key, String value) {
         this.dataSourceProps.put(key, value);
     }
@@ -118,22 +114,22 @@ public class TableExtDesc extends RootPersistentEntity {
         return this.dataSourceProps;
     }
 
-    public void setSampleRows(List<String[]> sampleRows) {
-        this.sampleRows = sampleRows;
-    }
-
     public List<String[]> getSampleRows() {
         return this.sampleRows;
     }
 
-    public void setMapRecords(List<Long> mapRecords) {
-        this.mapRecords = mapRecords;
+    public void setSampleRows(List<String[]> sampleRows) {
+        this.sampleRows = sampleRows;
     }
 
     public List<Long> getMapRecords() {
         return this.mapRecords;
     }
 
+    public void setMapRecords(List<Long> mapRecords) {
+        this.mapRecords = mapRecords;
+    }
+
     public String getCardinality() {
 
         StringBuffer cardinality = new StringBuffer();
@@ -144,14 +140,6 @@ public class TableExtDesc extends RootPersistentEntity {
         return cardinality.toString();
     }
 
-    public void resetCardinality() {
-        int columnSize = this.columnStats.size();
-        this.columnStats.clear();
-        for (int i = 0; i < columnSize; i++) {
-            this.columnStats.add(new ColumnStats());
-        }
-    }
-
     public void setCardinality(String cardinality) {
         if (null == cardinality)
             return;
@@ -174,6 +162,14 @@ public class TableExtDesc extends RootPersistentEntity {
         }
     }
 
+    public void resetCardinality() {
+        int columnSize = this.columnStats.size();
+        this.columnStats.clear();
+        for (int i = 0; i < columnSize; i++) {
+            this.columnStats.add(new ColumnStats());
+        }
+    }
+
     public List<ColumnStats> getColumnStats() {
         return this.columnStats;
     }
@@ -183,20 +179,12 @@ public class TableExtDesc extends RootPersistentEntity {
         this.columnStats = columnStats;
     }
 
-    public void setTotalRows(long totalRows) {
-        this.totalRows = totalRows;
-    }
-
     public long getTotalRows() {
         return this.totalRows;
     }
 
-    public void setIdentity(String name) {
-        this.tableIdentity = name;
-    }
-
-    public void setJodID(String jobID) {
-        this.jodID = jobID;
+    public void setTotalRows(long totalRows) {
+        this.totalRows = totalRows;
     }
 
     public void init(String project) {
@@ -206,14 +194,14 @@ public class TableExtDesc extends RootPersistentEntity {
             this.tableIdentity = this.tableIdentity.toUpperCase(Locale.ROOT);
     }
 
-    public void setLastModifiedTime(long lastModifiedTime) {
-        this.lastModifiedTime = lastModifiedTime;
-    }
-
     public long getLastModifiedTime() {
         return this.lastModifiedTime;
     }
 
+    public void setLastModifiedTime(long lastModifiedTime) {
+        this.lastModifiedTime = lastModifiedTime;
+    }
+
     public boolean isPartitioned() {
         return this.dataSourceProps.get("partition_column") == null ? false
                 : !this.dataSourceProps.get("partition_column").isEmpty();
@@ -278,86 +266,86 @@ public class TableExtDesc extends RootPersistentEntity {
         @JsonProperty("data_skew_samples")
         private Map<String, Long> dataSkewSamples = new HashMap<>();
 
+        public ColumnStats() {
+        }
+
         @Override
         public int compareTo(ColumnStats o) {
             return 0;
         }
 
-        public ColumnStats() {
+        public String getExceedPrecisionMaxLengthValue() {
+            return this.exceedPrecisionMaxLengthValue;
         }
 
         public void setExceedPrecisionMaxLengthValue(String value) {
             this.exceedPrecisionMaxLengthValue = value;
         }
 
-        public String getExceedPrecisionMaxLengthValue() {
-            return this.exceedPrecisionMaxLengthValue;
+        public long getExceedPrecisionCount() {
+            return this.exceedPrecisionCount;
         }
 
         public void setExceedPrecisionCount(long exceedPrecisionCount) {
             this.exceedPrecisionCount = exceedPrecisionCount;
         }
 
-        public long getExceedPrecisionCount() {
-            return this.exceedPrecisionCount;
+        public String getColumnName() {
+            return this.columnName;
         }
 
         public void setColumnName(String columnName) {
             this.columnName = columnName;
         }
 
-        public String getColumnName() {
-            return this.columnName;
+        public String getMaxValue() {
+            return this.maxValue;
         }
 
         public void setMaxValue(String maxValue) {
             this.maxValue = maxValue;
         }
 
-        public String getMaxValue() {
-            return this.maxValue;
+        public String getMinValue() {
+            return this.minValue;
         }
 
         public void setMinValue(String minValue) {
             this.minValue = minValue;
         }
 
-        public String getMinValue() {
-            return this.minValue;
+        public String getMaxLengthValue() {
+            return this.maxLengthValue;
         }
 
         public void setMaxLengthValue(String maxLengthValue) {
             this.maxLengthValue = maxLengthValue;
         }
 
-        public String getMaxLengthValue() {
-            return this.maxLengthValue;
+        public String getMinLengthValue() {
+            return this.minLengthValue;
         }
 
         public void setMinLengthValue(String minLengthValue) {
             this.minLengthValue = minLengthValue;
         }
 
-        public String getMinLengthValue() {
-            return this.minLengthValue;
+        public long getCardinality() {
+            return this.cardinality;
         }
 
         public void setCardinality(long cardinality) {
             this.cardinality = cardinality;
         }
 
-        public long getCardinality() {
-            return this.cardinality;
+        public Map<String, Long> getDataSkewSamples() {
+            return this.dataSkewSamples;
         }
 
         public void setDataSkewSamples(Map<String, Long> dataSkewSamples) {
             this.dataSkewSamples = dataSkewSamples;
         }
 
-        public Map<String, Long> getDataSkewSamples() {
-            return this.dataSkewSamples;
-        }
-
         public void setColumnSamples(String max, String min, String maxLenValue, String minLenValue) {
             this.maxValue = max;
             this.minValue = min;
diff --git a/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java b/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
index 472be42..ce3b203 100644
--- a/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
@@ -296,7 +296,7 @@ public class CubeMigrationCLI extends AbstractApplication {
             for (TableRef tableRef : tableRefs) {
                 String tableId = tableRef.getTableIdentity();
                 if (path.contains(tableId)) {
-                    String prj = TableDesc.parseResourcePath(path).getSecond();
+                    String prj = TableDesc.parseResourcePath(path).getProject();
                     if (prj == null && tableMap.get(tableId) == null)
                         tableMap.put(tableRef.getTableIdentity(), path);
 
@@ -635,7 +635,7 @@ public class CubeMigrationCLI extends AbstractApplication {
     
     private String renameTableWithinProject(String srcItem) {
         if (dstProject != null && srcItem.contains(ResourceStore.TABLE_RESOURCE_ROOT)) {
-            String tableIdentity = TableDesc.parseResourcePath(srcItem).getFirst();
+            String tableIdentity = TableDesc.parseResourcePath(srcItem).getTable();
             if (srcItem.contains(ResourceStore.TABLE_EXD_RESOURCE_ROOT))
                 return TableExtDesc.concatResourcePath(tableIdentity, dstProject);
             else


[kylin] 44/44: statement cache eviction invalidation base on time

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit ef1e63c4cff83d4e83ddcc46a41b07bc998fc126
Author: mrzhao <76...@qq.com>
AuthorDate: Mon Jan 20 10:40:25 2020 +0800

    statement cache eviction invalidation base on time
---
 .../src/main/java/org/apache/kylin/rest/service/QueryService.java        | 1 +
 1 file changed, 1 insertion(+)

diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
index 0ce8379..652cbfc 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
@@ -180,6 +180,7 @@ public class QueryService extends BasicService {
         config.setMaxTotal(kylinConfig.getQueryMaxCacheStatementNum());
         config.setBlockWhenExhausted(false);
         config.setMinEvictableIdleTimeMillis(10 * 60 * 1000L); // cached statement will be evict if idle for 10 minutes
+        config.setTimeBetweenEvictionRunsMillis(60 * 1000L); 
         GenericKeyedObjectPool<PreparedContextKey, PreparedContext> pool = new GenericKeyedObjectPool<>(factory,
                 config);
         return pool;


[kylin] 35/44: KYLIN-4327 TOPN Comparator may violate its general contract

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 7ea101db55d9524260ad8d9e63f2020f9ab32f71
Author: Yifei.Wu <va...@gmail.com>
AuthorDate: Mon Jan 6 11:28:47 2020 +0800

    KYLIN-4327 TOPN Comparator may violate its general contract
---
 .../org/apache/kylin/measure/topn/TopNCounter.java |  8 ++--
 .../kylin/measure/topn/TopNCounterBasicTest.java   | 47 ++++++++++++++++++++++
 2 files changed, 51 insertions(+), 4 deletions(-)

diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNCounter.java b/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNCounter.java
index 8037eac..754fd55 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNCounter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNCounter.java
@@ -238,18 +238,18 @@ public class TopNCounter<T> implements Iterable<Counter<T>>, java.io.Serializabl
         }
     }
 
-    private static final Comparator ASC_COMPARATOR = new Comparator<Counter>() {
+    static final Comparator ASC_COMPARATOR = new Comparator<Counter>() {
         @Override
         public int compare(Counter o1, Counter o2) {
-            return o1.getCount() > o2.getCount() ? 1 : o1.getCount() == o2.getCount() ? 0 : -1;
+            return Double.compare(o1.getCount(), o2.getCount());
         }
 
     };
 
-    private static final Comparator DESC_COMPARATOR = new Comparator<Counter>() {
+    static final Comparator DESC_COMPARATOR = new Comparator<Counter>() {
         @Override
         public int compare(Counter o1, Counter o2) {
-            return o1.getCount() > o2.getCount() ? -1 : o1.getCount() == o2.getCount() ? 0 : 1;
+            return Double.compare(o2.getCount(), o1.getCount());
         }
 
     };
diff --git a/core-metadata/src/test/java/org/apache/kylin/measure/topn/TopNCounterBasicTest.java b/core-metadata/src/test/java/org/apache/kylin/measure/topn/TopNCounterBasicTest.java
index 9d034fe..506ecf3 100644
--- a/core-metadata/src/test/java/org/apache/kylin/measure/topn/TopNCounterBasicTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/measure/topn/TopNCounterBasicTest.java
@@ -24,8 +24,11 @@ import static org.junit.Assert.assertTrue;
 import java.util.Arrays;
 import java.util.List;
 
+import org.junit.Assert;
 import org.junit.Test;
 
+import com.google.common.collect.Lists;
+
 public class TopNCounterBasicTest {
 
     @Test
@@ -130,4 +133,48 @@ public class TopNCounterBasicTest {
         }
 
     }
+
+    @Test
+    public void testComparatorSymmetry() {
+        List<Counter> counters = Lists.newArrayList(new Counter<>("item", 0d), new Counter<>("item", 0d),
+                new Counter<>("item", 0d), new Counter<>("item", 0d), new Counter<>("item", 0d),
+                new Counter<>("item", 0d), new Counter<>("item", 0d), new Counter<>("item", 3d),
+                new Counter<>("item", 0d), new Counter<>("item", 0d), new Counter<>("item", 0d),
+                new Counter<>("item", 0d), new Counter<>("item", 0d), new Counter<>("item", 0d),
+                new Counter<>("item", 0d), new Counter<>("item", 0d), new Counter<>("item", 0d),
+                new Counter<>("item", 0d), new Counter<>("item", 0d), new Counter<>("item", 0d),
+                new Counter<>("item", 0d), new Counter<>("item", 0d), new Counter<>("item", 0d),
+                new Counter<>("item", 0d), new Counter<>("item", 0d), new Counter<>("item", 0d),
+                new Counter<>("item", 0d), new Counter<>("item", 0d), new Counter<>("item", 0d),
+                new Counter<>("item", 0d), new Counter<>("item", 0d), new Counter<>("item", 0d),
+                new Counter<>("item", 0d), new Counter<>("item", 1d), new Counter<>("item", 1d),
+                new Counter<>("item", 0d), new Counter<>("item", 0d), new Counter<>("item", 1d),
+                new Counter<>("item", 0d), new Counter<>("item", 1d), new Counter<>("item", 0d),
+                new Counter<>("item", 0d), new Counter<>("item", 0d), new Counter<>("item", 0d),
+                new Counter<>("item", 1d), new Counter<>("item", 0d), new Counter<>("item", 0d),
+                new Counter<>("item", 1d), new Counter<>("item", 0d), new Counter<>("item", 0d),
+                new Counter<>("item", 0d), new Counter<>("item", 2d), new Counter<>("item", 1d),
+                new Counter<>("item", 0d), new Counter<>("item", 0d), new Counter<>("item", 0d),
+                new Counter<>("item", 2d), new Counter<>("item", 4d), new Counter<>("item", 0d),
+                new Counter<>("item", 3d));
+        counters.sort(TopNCounter.ASC_COMPARATOR);
+        List<Double> expectedCounts = Lists.newArrayList(0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d,
+                0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d,
+                0d, 0d, 0d, 0d, 0d, 0d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 2d, 2d, 3d, 3d, 4d);
+        List<Double> originCounts = Lists.newArrayList();
+        counters.stream().forEach(counter -> {
+            originCounts.add(counter.getCount());
+        });
+        Assert.assertArrayEquals(expectedCounts.toArray(), originCounts.toArray());
+
+        counters.sort(TopNCounter.DESC_COMPARATOR);
+        List<Double> expectedDescCounts = Lists.newArrayList(4d, 3d, 3d, 2d, 2d, 1d, 1d, 1d, 1d, 1d, 1d, 1d, 0d, 0d, 0d,
+                0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d,
+                0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d, 0d);
+        List<Double> originDescCounts = Lists.newArrayList();
+        counters.stream().forEach(counter -> {
+            originDescCounts.add(counter.getCount());
+        });
+        Assert.assertArrayEquals(expectedDescCounts.toArray(), originDescCounts.toArray());
+    }
 }


[kylin] 20/44: KYLIN-4080 Project schema update event casues error reload NEW DataModelDesc

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 466a7a3a8343e807177553d6b35acd6297635276
Author: yaqian.zhang <59...@qq.com>
AuthorDate: Wed Dec 4 19:50:38 2019 +0800

    KYLIN-4080 Project schema update event casues error reload NEW DataModelDesc
---
 .../apache/kylin/metadata/model/DataModelDesc.java | 12 +++++++++
 .../kylin/metadata/model/DataModelManager.java     | 29 ++++++++--------------
 .../kylin/rest/controller/ModelController.java     |  1 +
 3 files changed, 23 insertions(+), 19 deletions(-)

diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
index 5795d78..818afdf 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
@@ -104,6 +104,9 @@ public class DataModelDesc extends RootPersistentEntity {
     @JsonProperty("capacity")
     private RealizationCapacity capacity = RealizationCapacity.MEDIUM;
 
+    @JsonProperty("projectName")
+    private String projectName; //for KYLIN-4080
+
     // computed attributes
     private TableRef rootFactTableRef;
     private Set<TableRef> factTableRefs = Sets.newLinkedHashSet();
@@ -765,6 +768,14 @@ public class DataModelDesc extends RootPersistentEntity {
         return ProjectManager.getInstance(getConfig()).getProjectOfModel(this.getName());
     }
 
+    public String getProjectName() {
+        return projectName;
+    }
+
+    public void setProjectName(String projectName) {
+        this.projectName = projectName;
+    }
+
     public static DataModelDesc getCopyOf(DataModelDesc orig) {
         return copy(orig, new DataModelDesc());
     }
@@ -781,6 +792,7 @@ public class DataModelDesc extends RootPersistentEntity {
         copy.metrics = orig.metrics;
         copy.filterCondition = orig.filterCondition;
         copy.capacity = orig.capacity;
+        copy.projectName = orig.projectName;
         if (orig.getPartitionDesc() != null) {
             copy.partitionDesc = PartitionDesc.getCopyOf(orig.getPartitionDesc());
         }
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelManager.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelManager.java
index 9d5bf6a..dcbdf60 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelManager.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelManager.java
@@ -86,7 +86,9 @@ public class DataModelManager {
                 getDataModelImplClass(), dataModelDescMap) {
             @Override
             protected DataModelDesc initEntityAfterReload(DataModelDesc model, String resourceName) {
-                String prj = ProjectManager.getInstance(config).getProjectOfModel(model.getName()).getName();
+                String prj = (null == model.getProjectName()
+                        ? ProjectManager.getInstance(config).getProjectOfModel(model.getName()).getName()
+                        : model.getProjectName());
                 if (!model.isDraft()) {
                     model.init(config, getAllTablesMap(prj));
                 }
@@ -240,21 +242,12 @@ public class DataModelManager {
 
             ProjectManager prjMgr = ProjectManager.getInstance(config);
             ProjectInstance prj = prjMgr.getProject(projectName);
-            if (prj.containsModel(name))
+            if (prj.containsModel(name)) {
                 throw new IllegalStateException("project " + projectName + " already contains model " + name);
-
-            try {
-                // Temporarily register model under project, because we want to 
-                // update project formally after model is saved.
-                prj.getModels().add(name);
-
-                desc.setOwner(owner);
-                logger.info("Saving Model {} to Project {} with {} as owner", desc.getName(), projectName, owner);
-                desc = saveDataModelDesc(desc);
-
-            } finally {
-                prj.getModels().remove(name);
             }
+            desc.setOwner(owner);
+            logger.info("Saving Model {} to Project {} with {} as owner", desc.getName(), projectName, owner);
+            desc = saveDataModelDesc(desc, projectName);
 
             // now that model is saved, update project formally
             prjMgr.addModelToProject(name, projectName);
@@ -270,16 +263,14 @@ public class DataModelManager {
                 throw new IllegalArgumentException("DataModelDesc '" + name + "' does not exist.");
             }
 
-            return saveDataModelDesc(desc);
+            return saveDataModelDesc(desc, ProjectManager.getInstance(config).getProjectOfModel(desc.getName()).getName());
         }
     }
 
-    private DataModelDesc saveDataModelDesc(DataModelDesc dataModelDesc) throws IOException {
-
-        String prj = ProjectManager.getInstance(config).getProjectOfModel(dataModelDesc.getName()).getName();
+    private DataModelDesc saveDataModelDesc(DataModelDesc dataModelDesc, String projectName) throws IOException {
 
         if (!dataModelDesc.isDraft())
-            dataModelDesc.init(config, this.getAllTablesMap(prj));
+            dataModelDesc.init(config, this.getAllTablesMap(projectName));
 
         crud.save(dataModelDesc);
 
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/ModelController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/ModelController.java
index 820d422..5ffb51c 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/ModelController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/ModelController.java
@@ -228,6 +228,7 @@ public class ModelController extends BasicController {
         try {
             logger.debug("Saving MODEL " + modelRequest.getModelDescData());
             desc = JsonUtil.readValue(modelRequest.getModelDescData(), DataModelDesc.class);
+            desc.setProjectName(modelRequest.getProject());
         } catch (JsonParseException e) {
             logger.error("The data model definition is not valid.", e);
             updateRequest(modelRequest, false, e.getMessage());


[kylin] 43/44: Fix sql starts with "with" cannot be queried on web

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit cff45938f56e9788e74cca79d0eb4d4123ed9d46
Author: nichunen <ni...@apache.org>
AuthorDate: Fri Feb 7 22:24:37 2020 +0800

    Fix sql starts with "with" cannot be queried on web
---
 .../org/apache/kylin/query/util/QueryUtil.java     | 41 +++++++++++-----------
 1 file changed, 20 insertions(+), 21 deletions(-)

diff --git a/query/src/main/java/org/apache/kylin/query/util/QueryUtil.java b/query/src/main/java/org/apache/kylin/query/util/QueryUtil.java
index a17c578..ad2fef4 100644
--- a/query/src/main/java/org/apache/kylin/query/util/QueryUtil.java
+++ b/query/src/main/java/org/apache/kylin/query/util/QueryUtil.java
@@ -6,23 +6,20 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *     http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
-*/
+ */
 
 package org.apache.kylin.query.util;
 
-import java.util.List;
-import java.util.Locale;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
+import com.google.common.collect.Lists;
+import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.ClassUtil;
 import org.apache.kylin.metadata.project.ProjectInstance;
@@ -30,33 +27,31 @@ import org.apache.kylin.metadata.project.ProjectManager;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.Lists;
+import java.util.List;
+import java.util.Locale;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
 /**
+ *
  */
 public class QueryUtil {
 
     protected static final Logger logger = LoggerFactory.getLogger(QueryUtil.class);
-
-    private QueryUtil() {
-        throw new IllegalStateException("Class QueryUtil is an utility class !");
-    }
-
-    private static List<IQueryTransformer> queryTransformers;
-
-    public interface IQueryTransformer {
-        String transform(String sql, String project, String defaultSchema);
-    }
-
     static final String KEYWORD_SELECT = "select";
     static final String KEYWORD_WITH = "with";
     static final String KEYWORD_EXPLAIN = "explain";
+    private static List<IQueryTransformer> queryTransformers;
+    private QueryUtil() {
+        throw new IllegalStateException("Class QueryUtil is an utility class !");
+    }
 
     public static String appendLimitOffsetToSql(String sql, int limit, int offset) {
         String retSql = sql;
         String prefixSql = "select * from (";
         String suffixSql = ")";
-        if (sql.startsWith(KEYWORD_EXPLAIN)) {
+        if (StringUtils.startsWithIgnoreCase(sql, KEYWORD_EXPLAIN)
+                || StringUtils.startsWithIgnoreCase(sql, KEYWORD_WITH)) {
             prefixSql = "";
             suffixSql = "";
         }
@@ -224,4 +219,8 @@ public class QueryUtil {
 
         return sql1;
     }
+
+    public interface IQueryTransformer {
+        String transform(String sql, String project, String defaultSchema);
+    }
 }


[kylin] 32/44: Remove container never accessed

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit abd29e4310dec1d2a812358f475d530b5a4a185a
Author: nichunen <ni...@apache.org>
AuthorDate: Sun Jan 12 19:46:53 2020 +0800

    Remove container never accessed
---
 .../apache/kylin/common/metrics/metrics2/CodahaleMetrics.java  | 10 ----------
 1 file changed, 10 deletions(-)

diff --git a/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/CodahaleMetrics.java b/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/CodahaleMetrics.java
index 7487930..1217558 100644
--- a/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/CodahaleMetrics.java
+++ b/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/CodahaleMetrics.java
@@ -27,7 +27,6 @@ import java.util.List;
 import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.locks.Lock;
@@ -85,7 +84,6 @@ public class CodahaleMetrics implements Metrics {
     private LoadingCache<String, Counter> counters;
     private LoadingCache<String, Meter> meters;
     private LoadingCache<String, Histogram> histograms;
-    private ConcurrentHashMap<String, Gauge> gauges;
     private KylinConfig conf;
 
     public CodahaleMetrics() {
@@ -123,13 +121,6 @@ public class CodahaleMetrics implements Metrics {
                 return histogram;
             }
         });
-        gauges = new ConcurrentHashMap<String, Gauge>();
-        //register JVM metrics
-        //        registerAll("gc", new GarbageCollectorMetricSet());
-        //        registerAll("buffers", new BufferPoolMetricSet(ManagementFactory.getPlatformMBeanServer()));
-        //        registerAll("memory", new MemoryUsageGaugeSet());
-        //        registerAll("threads", new ThreadStatesGaugeSet());
-        //        registerAll("classLoadingz", new ClassLoadingGaugeSet());
 
         //initialize reporters
         initReporting();
@@ -262,7 +253,6 @@ public class CodahaleMetrics implements Metrics {
     private void addGaugeInternal(String name, Gauge gauge) {
         try {
             gaugesLock.lock();
-            gauges.put(name, gauge);
             // Metrics throws an Exception if we don't do this when the key already exists
             if (metricRegistry.getGauges().containsKey(name)) {
                 LOGGER.warn("A Gauge with name [" + name + "] already exists. "


[kylin] 14/44: KYLIN-3609 KYLIN-3409 KYLIN-4161 CubeSegmentStatisticsMap exist only on OLAPQuery.EnumeratorTypeEnum.OLAP query type. (#942)

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 8e2bc1f7ec0e85bad0f62e07c1320ad3b536e06d
Author: Alexander Sidorchuk <39...@users.noreply.github.com>
AuthorDate: Mon Dec 23 16:17:06 2019 +0300

    KYLIN-3609 KYLIN-3409 KYLIN-4161 CubeSegmentStatisticsMap exist only on OLAPQuery.EnumeratorTypeEnum.OLAP query type. (#942)
---
 .../kylin/rest/metrics/QueryMetricsFacade.java     | 47 +++++++++++++---------
 1 file changed, 27 insertions(+), 20 deletions(-)

diff --git a/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetricsFacade.java b/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetricsFacade.java
index 497f477..226166d 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetricsFacade.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetricsFacade.java
@@ -36,6 +36,7 @@ import org.apache.kylin.metrics.lib.impl.TimedRecordEvent;
 import org.apache.kylin.metrics.property.QueryCubePropertyEnum;
 import org.apache.kylin.metrics.property.QueryPropertyEnum;
 import org.apache.kylin.metrics.property.QueryRPCPropertyEnum;
+import org.apache.kylin.query.enumerator.OLAPQuery;
 import org.apache.kylin.rest.request.SQLRequest;
 import org.apache.kylin.rest.response.SQLResponse;
 import org.slf4j.Logger;
@@ -121,26 +122,32 @@ public class QueryMetricsFacade {
                     sqlResponse.getThrowable());
 
             long totalStorageReturnCount = 0L;
-            for (Map<String, QueryContext.CubeSegmentStatistics> cubeEntry : contextEntry.getCubeSegmentStatisticsMap()
-                    .values()) {
-                for (QueryContext.CubeSegmentStatistics segmentEntry : cubeEntry.values()) {
-                    RecordEvent cubeSegmentMetricsEvent = new TimedRecordEvent(
-                            KylinConfig.getInstanceFromEnv().getKylinMetricsSubjectQueryCube());
-
-                    setCubeWrapper(cubeSegmentMetricsEvent, //
-                            norm(sqlRequest.getProject()), segmentEntry.getCubeName(), segmentEntry.getSegmentName(),
-                            segmentEntry.getSourceCuboidId(), segmentEntry.getTargetCuboidId(),
-                            segmentEntry.getFilterMask());
-
-                    setCubeStats(cubeSegmentMetricsEvent, //
-                            segmentEntry.getCallCount(), segmentEntry.getCallTimeSum(), segmentEntry.getCallTimeMax(),
-                            segmentEntry.getStorageSkippedRows(), segmentEntry.getStorageScannedRows(),
-                            segmentEntry.getStorageReturnedRows(), segmentEntry.getStorageAggregatedRows(),
-                            segmentEntry.isIfSuccess(), 1.0 / cubeEntry.size());
-
-                    totalStorageReturnCount += segmentEntry.getStorageReturnedRows();
-                    //For update cube segment level related query metrics
-                    MetricsManager.getInstance().update(cubeSegmentMetricsEvent);
+            if (contextEntry.getQueryType().equalsIgnoreCase(OLAPQuery.EnumeratorTypeEnum.OLAP.name())) {
+                for (Map<String, QueryContext.CubeSegmentStatistics> cubeEntry : contextEntry.getCubeSegmentStatisticsMap()
+                        .values()) {
+                    for (QueryContext.CubeSegmentStatistics segmentEntry : cubeEntry.values()) {
+                        RecordEvent cubeSegmentMetricsEvent = new TimedRecordEvent(
+                                KylinConfig.getInstanceFromEnv().getKylinMetricsSubjectQueryCube());
+
+                        setCubeWrapper(cubeSegmentMetricsEvent, //
+                                norm(sqlRequest.getProject()), segmentEntry.getCubeName(), segmentEntry.getSegmentName(),
+                                segmentEntry.getSourceCuboidId(), segmentEntry.getTargetCuboidId(),
+                                segmentEntry.getFilterMask());
+
+                        setCubeStats(cubeSegmentMetricsEvent, //
+                                segmentEntry.getCallCount(), segmentEntry.getCallTimeSum(), segmentEntry.getCallTimeMax(),
+                                segmentEntry.getStorageSkippedRows(), segmentEntry.getStorageScannedRows(),
+                                segmentEntry.getStorageReturnedRows(), segmentEntry.getStorageAggregatedRows(),
+                                segmentEntry.isIfSuccess(), 1.0 / cubeEntry.size());
+
+                        totalStorageReturnCount += segmentEntry.getStorageReturnedRows();
+                        //For update cube segment level related query metrics
+                        MetricsManager.getInstance().update(cubeSegmentMetricsEvent);
+                    }
+                }
+            } else {
+                if (!sqlResponse.getIsException()) {
+                    totalStorageReturnCount = sqlResponse.getResults().size();
                 }
             }
             setQueryStats(queryMetricsEvent, //


[kylin] 28/44: Fix "equals" on incomparable types

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 17777e929df99eb595d32eb696ad1db71035438c
Author: nichunen <ni...@apache.org>
AuthorDate: Sun Jan 12 18:42:25 2020 +0800

    Fix "equals" on incomparable types
---
 .../java/org/apache/kylin/storage/hbase/steps/HFileOutputFormat3.java  | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HFileOutputFormat3.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HFileOutputFormat3.java
index 1f75660..12c30ea 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HFileOutputFormat3.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HFileOutputFormat3.java
@@ -23,6 +23,7 @@ import java.net.URLDecoder;
 import java.net.URLEncoder;
 import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
 import java.util.Map;
@@ -286,7 +287,7 @@ public class HFileOutputFormat3 extends FileOutputFormat<ImmutableBytesWritable,
         TreeSet<ImmutableBytesWritable> sorted = new TreeSet<ImmutableBytesWritable>(startKeys);
 
         ImmutableBytesWritable first = sorted.first();
-        if (!first.equals(HConstants.EMPTY_BYTE_ARRAY)) {
+        if (!Arrays.equals(first.get(), HConstants.EMPTY_BYTE_ARRAY)) {
             throw new IllegalArgumentException("First region of table should have empty start key. Instead has: "
                     + Bytes.toStringBinary(first.get()));
         }


[kylin] 36/44: Prevent uncontrolled data used in path expression

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit ac6906189ea20cafdc41be0ed54611d52284b6bb
Author: nichunen <ni...@apache.org>
AuthorDate: Thu Jan 16 17:27:20 2020 +0800

    Prevent uncontrolled data used in path expression
---
 .../src/main/java/org/apache/kylin/job/execution/ExecutableManager.java  | 1 +
 1 file changed, 1 insertion(+)

diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java b/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java
index ce9cc35..6f8d789 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java
@@ -149,6 +149,7 @@ public class ExecutableManager {
 
     public AbstractExecutable getJob(String uuid) {
         try {
+            uuid = uuid.replaceAll("[./]", "");
             return parseTo(executableDao.getJob(uuid));
         } catch (PersistentException e) {
             logger.error("fail to get job:" + uuid, e);


[kylin] 29/44: Fix type mismatch on container access

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 0d83874099cce6d3cb4057d381467516815a69e3
Author: nichunen <ni...@apache.org>
AuthorDate: Sun Jan 12 18:47:31 2020 +0800

    Fix type mismatch on container access
---
 .../stream/coordinator/assign/CubePartitionRoundRobinAssigner.java    | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/stream-coordinator/src/main/java/org/apache/kylin/stream/coordinator/assign/CubePartitionRoundRobinAssigner.java b/stream-coordinator/src/main/java/org/apache/kylin/stream/coordinator/assign/CubePartitionRoundRobinAssigner.java
index 8232177..c03340d 100644
--- a/stream-coordinator/src/main/java/org/apache/kylin/stream/coordinator/assign/CubePartitionRoundRobinAssigner.java
+++ b/stream-coordinator/src/main/java/org/apache/kylin/stream/coordinator/assign/CubePartitionRoundRobinAssigner.java
@@ -146,9 +146,9 @@ public class CubePartitionRoundRobinAssigner implements Assigner {
         Collections.sort(replicaSets, new Comparator<ReplicaSet>() {
             @Override
             public int compare(ReplicaSet o1, ReplicaSet o2) {
-                Integer partitionNum1Obj = replicaSetPartitionNumMap.get(o1);
+                Integer partitionNum1Obj = replicaSetPartitionNumMap.get(o1.getReplicaSetID());
                 int partitionNum1 = partitionNum1Obj == null ? 0 : partitionNum1Obj;
-                Integer partitionNum2Obj = replicaSetPartitionNumMap.get(o2);
+                Integer partitionNum2Obj = replicaSetPartitionNumMap.get(o2.getReplicaSetID());
                 int partitionNum2 = partitionNum2Obj == null ? 0 : partitionNum2Obj;
                 return partitionNum1 - partitionNum2;
             }


[kylin] 12/44: KYLIN-4237 Return error when execute "explain plan for SQL" to get the execution plan of SQL

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit a9945ca8d11ab0960c30ba9757425f006567db0a
Author: yaqian.zhang <59...@qq.com>
AuthorDate: Mon Nov 4 19:50:20 2019 +0800

    KYLIN-4237 Return error when execute "explain plan for SQL" to get the execution plan of SQL
---
 .../org/apache/kylin/query/util/QueryUtil.java     | 16 ++++++----
 .../org/apache/kylin/query/util/QueryUtilTest.java | 35 ++++++++++++++++++++++
 2 files changed, 46 insertions(+), 5 deletions(-)

diff --git a/query/src/main/java/org/apache/kylin/query/util/QueryUtil.java b/query/src/main/java/org/apache/kylin/query/util/QueryUtil.java
index 30db7e1..a17c578 100644
--- a/query/src/main/java/org/apache/kylin/query/util/QueryUtil.java
+++ b/query/src/main/java/org/apache/kylin/query/util/QueryUtil.java
@@ -52,15 +52,21 @@ public class QueryUtil {
     static final String KEYWORD_WITH = "with";
     static final String KEYWORD_EXPLAIN = "explain";
 
-    private static String appendLimitOffsetToSql(String sql, int limit, int offset) {
+    public static String appendLimitOffsetToSql(String sql, int limit, int offset) {
         String retSql = sql;
+        String prefixSql = "select * from (";
+        String suffixSql = ")";
+        if (sql.startsWith(KEYWORD_EXPLAIN)) {
+            prefixSql = "";
+            suffixSql = "";
+        }
         if (0 != limit && 0 != offset) {
-            retSql = "select * from (" + sql + ") limit " + String.valueOf(limit) +
-                    " offset " + String.valueOf(offset);
+            retSql = prefixSql + sql + suffixSql + " limit " + String.valueOf(limit) + " offset "
+                    + String.valueOf(offset);
         } else if (0 == limit && 0 != offset) {
-            retSql = "select * from (" + sql + ") offset " + String.valueOf(offset);
+            retSql = prefixSql + sql + suffixSql + " offset " + String.valueOf(offset);
         } else if (0 != limit && 0 == offset) {
-            retSql = "select * from (" + sql + ") limit " + String.valueOf(limit);
+            retSql = prefixSql + sql + suffixSql + " limit " + String.valueOf(limit);
         } else {
             // do nothing
         }
diff --git a/query/src/test/java/org/apache/kylin/query/util/QueryUtilTest.java b/query/src/test/java/org/apache/kylin/query/util/QueryUtilTest.java
index ffd169d..9b769b8 100644
--- a/query/src/test/java/org/apache/kylin/query/util/QueryUtilTest.java
+++ b/query/src/test/java/org/apache/kylin/query/util/QueryUtilTest.java
@@ -40,6 +40,41 @@ public class QueryUtilTest extends LocalFileMetadataTestCase {
     }
 
     @Test
+    public void testappendLimitOffsetToSql() {
+        {
+            String sql = "select ( date '2001-09-28' + interval floor(1.2) day) from test_kylin_fact";
+            String newsql = QueryUtil.appendLimitOffsetToSql(sql, 100, 100);
+            Assert.assertEquals(
+                    "select * from (select ( date '2001-09-28' + interval floor(1.2) day) from test_kylin_fact) limit 100 offset 100",
+                    newsql);
+        }
+
+        {
+            String sql = "select ( date '2001-09-28' + interval floor(1.2) day) from test_kylin_fact";
+            String newsql = QueryUtil.appendLimitOffsetToSql(sql, 0, 0);
+            Assert.assertEquals(
+                    "select ( date '2001-09-28' + interval floor(1.2) day) from test_kylin_fact",
+                    newsql);
+        }
+
+        {
+            String sql = "explain plan for select ( date '2001-09-28' + interval floor(1.2) day) from test_kylin_fact";
+            String newsql = QueryUtil.appendLimitOffsetToSql(sql, 100, 100);
+            Assert.assertEquals(
+                    "explain plan for select ( date '2001-09-28' + interval floor(1.2) day) from test_kylin_fact limit 100 offset 100",
+                    newsql);
+        }
+
+        {
+            String sql = "explain plan for select ( date '2001-09-28' + interval floor(1.2) day) from test_kylin_fact";
+            String newsql = QueryUtil.appendLimitOffsetToSql(sql, 0, 0);
+            Assert.assertEquals(
+                    "explain plan for select ( date '2001-09-28' + interval floor(1.2) day) from test_kylin_fact",
+                    newsql);
+        }
+    }
+
+    @Test
     public void testMassageSql() {
         {
             String sql = "select ( date '2001-09-28' + interval floor(1.2) day) from test_kylin_fact";


[kylin] 07/44: KYLIN-4225 close hive session manually

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 6635c0464a9ef9882ae20f69d911af19e823afe1
Author: Zhou Kang <zh...@xiaomi.com>
AuthorDate: Wed Oct 30 10:14:57 2019 +0800

    KYLIN-4225 close hive session manually
---
 .../org/apache/kylin/metrics/lib/impl/hive/HiveProducer.java | 12 ++++++++++--
 1 file changed, 10 insertions(+), 2 deletions(-)

diff --git a/metrics-reporter-hive/src/main/java/org/apache/kylin/metrics/lib/impl/hive/HiveProducer.java b/metrics-reporter-hive/src/main/java/org/apache/kylin/metrics/lib/impl/hive/HiveProducer.java
index 8b67e8a..5082b6a 100644
--- a/metrics-reporter-hive/src/main/java/org/apache/kylin/metrics/lib/impl/hive/HiveProducer.java
+++ b/metrics-reporter-hive/src/main/java/org/apache/kylin/metrics/lib/impl/hive/HiveProducer.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.metrics.lib.ActiveReservoirReporter;
@@ -174,8 +175,15 @@ public class HiveProducer {
             hql.append(")");
             logger.debug("create partition by {}.", hql);
             Driver driver = new Driver(hiveConf);
-            SessionState.start(new CliSessionState(hiveConf));
-            driver.run(hql.toString());
+            CliSessionState session = new CliSessionState(hiveConf);
+            SessionState.start(session);
+            CommandProcessorResponse res = driver.run(hql.toString());
+            if (res.getResponseCode() != 0) {
+                logger.warn("Fail to add partition. HQL: {}; Cause by: {}",
+                        hql.toString(),
+                        res.toString());
+            }
+            session.close();
             driver.close();
         }
 


[kylin] 33/44: Ensure the validity of http header from concated string

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 7395b468c22cc41e44d818d4cab5d614ac5c64f0
Author: nichunen <ni...@apache.org>
AuthorDate: Mon Jan 13 13:17:15 2020 +0800

    Ensure the validity of http header from concated string
---
 .../java/org/apache/kylin/rest/controller/CubeController.java  |  3 ++-
 .../java/org/apache/kylin/rest/controller/QueryController.java | 10 ++++++----
 .../src/main/java/org/apache/kylin/rest/util/ValidateUtil.java |  8 ++++++--
 3 files changed, 14 insertions(+), 7 deletions(-)

diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
index 8e44ca1..85cad98 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
@@ -887,7 +887,8 @@ public class CubeController extends BasicController {
         }
 
         response.setContentType("text/json;charset=utf-8");
-        response.setHeader("Content-Disposition", "attachment; filename=\"" + cubeName + ".json\"");
+        response.setHeader("Content-Disposition",
+                "attachment; filename=\"" + ValidateUtil.convertStringToBeAlphanumericUnderscore(cubeName) + ".json\"");
         try (PrintWriter writer = response.getWriter()) {
             writer.write(JsonUtil.writeValueAsString(dimensionSetList));
         } catch (IOException e) {
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/QueryController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/QueryController.java
index 6b56e91..da0a1e5 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/QueryController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/QueryController.java
@@ -49,6 +49,7 @@ import org.apache.kylin.rest.request.SQLRequest;
 import org.apache.kylin.rest.request.SaveSqlRequest;
 import org.apache.kylin.rest.response.SQLResponse;
 import org.apache.kylin.rest.service.QueryService;
+import org.apache.kylin.rest.util.ValidateUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -77,14 +78,13 @@ public class QueryController extends BasicController {
 
     @SuppressWarnings("unused")
     private static final Logger logger = LoggerFactory.getLogger(QueryController.class);
-
+    private static String BOM_CHARACTER;
     @Autowired
     @Qualifier("queryService")
     private QueryService queryService;
 
-    private static String BOM_CHARACTER;
     {
-        BOM_CHARACTER = new String(new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF}, StandardCharsets.UTF_8);
+        BOM_CHARACTER = new String(new byte[] { (byte) 0xEF, (byte) 0xBB, (byte) 0xBF }, StandardCharsets.UTF_8);
     }
 
     @RequestMapping(value = "/query", method = RequestMethod.POST, produces = { "application/json" })
@@ -148,7 +148,9 @@ public class QueryController extends BasicController {
         SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmssSSS", Locale.ROOT);
         Date now = new Date();
         String nowStr = sdf.format(now);
-        response.setHeader("Content-Disposition", "attachment; filename=\"" + nowStr + ".result." + format + "\"");
+        response.setHeader("Content-Disposition",
+                "attachment; filename=\"" + ValidateUtil.convertStringToBeAlphanumericUnderscore(nowStr) + ".result."
+                        + ValidateUtil.convertStringToBeAlphanumericUnderscore(format) + "\"");
         ICsvListWriter csvWriter = null;
 
         try {
diff --git a/server-base/src/main/java/org/apache/kylin/rest/util/ValidateUtil.java b/server-base/src/main/java/org/apache/kylin/rest/util/ValidateUtil.java
index fe957fc..6273e14 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/util/ValidateUtil.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/util/ValidateUtil.java
@@ -50,7 +50,7 @@ import com.google.common.base.Preconditions;
 
 @Component("validateUtil")
 public class ValidateUtil {
-    private final static Pattern alphaNumUnderscorePattren = Pattern.compile("[a-zA-Z0-9_]+");
+    private final static Pattern alphaNumUnderscorePattern = Pattern.compile("[a-zA-Z0-9_]+");
 
     @Autowired
     @Qualifier("tableService")
@@ -73,7 +73,11 @@ public class ValidateUtil {
     private IUserGroupService userGroupService;
 
     public static boolean isAlphanumericUnderscore(String toCheck) {
-        return toCheck == null ? false : alphaNumUnderscorePattren.matcher(toCheck).matches();
+        return toCheck != null && alphaNumUnderscorePattern.matcher(toCheck).matches();
+    }
+
+    public static String convertStringToBeAlphanumericUnderscore(String toBeConverted) {
+        return toBeConverted.replaceAll("[^a-zA-Z0-9_]", "");
     }
 
     public void checkIdentifiersExists(String name, boolean isPrincipal) throws IOException {


[kylin] 41/44: Minor, throw exception in case of illegal project name

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 5016842d059dd77dc47585595729c53a37ca34c6
Author: nichunen <ni...@apache.org>
AuthorDate: Thu Jan 30 23:54:00 2020 +0800

    Minor, throw exception in case of illegal project name
---
 .../kylin/rest/controller/ExternalFilterController.java      | 12 ++++++++++--
 1 file changed, 10 insertions(+), 2 deletions(-)

diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/ExternalFilterController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/ExternalFilterController.java
index c6a5500..6c5ea08 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/ExternalFilterController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/ExternalFilterController.java
@@ -21,13 +21,16 @@ package org.apache.kylin.rest.controller;
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.RandomUtil;
 import org.apache.kylin.metadata.model.ExternalFilterDesc;
+import org.apache.kylin.rest.exception.BadRequestException;
 import org.apache.kylin.rest.request.ExternalFilterRequest;
 import org.apache.kylin.rest.service.ExtFilterService;
+import org.apache.kylin.rest.util.ValidateUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -80,7 +83,8 @@ public class ExternalFilterController extends BasicController {
 
     @RequestMapping(value = "/{filter}/{project}", method = { RequestMethod.DELETE }, produces = { "application/json" })
     @ResponseBody
-    public Map<String, String> removeFilter(@PathVariable String filter, @PathVariable String project) throws IOException {
+    public Map<String, String> removeFilter(@PathVariable String filter, @PathVariable String project)
+            throws IOException {
         Map<String, String> result = new HashMap<String, String>();
         extFilterService.removeExtFilterFromProject(filter, project);
         extFilterService.removeExternalFilter(filter);
@@ -90,7 +94,11 @@ public class ExternalFilterController extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.GET }, produces = { "application/json" })
     @ResponseBody
-    public List<ExternalFilterDesc> getExternalFilters(@RequestParam(value = "project", required = true) String project) throws IOException {
+    public List<ExternalFilterDesc> getExternalFilters(@RequestParam(value = "project", required = true) String project)
+            throws IOException {
+        if (!ValidateUtil.isAlphanumericUnderscore(project)) {
+            throw new BadRequestException(String.format(Locale.ROOT, "Invalid Project name %s.", project));
+        }
         List<ExternalFilterDesc> filterDescs = Lists.newArrayList();
         filterDescs.addAll(extFilterService.getProjectManager().listExternalFilterDescs(project).values());
         return filterDescs;


[kylin] 18/44: fix bug in KYLIN-4243

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit b8c5252cb15e9f1aa685a6387b7ba15378b1bd5a
Author: zengruios <57...@qq.com>
AuthorDate: Mon Dec 16 22:19:33 2019 +0800

    fix bug in KYLIN-4243
---
 .../kylin/stream/core/storage/columnar/ColumnarStoreDimDesc.java    | 5 +++--
 .../stream/core/storage/columnar/ColumnarStoreMetricsDesc.java      | 6 ++++--
 2 files changed, 7 insertions(+), 4 deletions(-)

diff --git a/stream-core/src/main/java/org/apache/kylin/stream/core/storage/columnar/ColumnarStoreDimDesc.java b/stream-core/src/main/java/org/apache/kylin/stream/core/storage/columnar/ColumnarStoreDimDesc.java
index 0406ecb..971d137 100644
--- a/stream-core/src/main/java/org/apache/kylin/stream/core/storage/columnar/ColumnarStoreDimDesc.java
+++ b/stream-core/src/main/java/org/apache/kylin/stream/core/storage/columnar/ColumnarStoreDimDesc.java
@@ -78,7 +78,7 @@ public class ColumnarStoreDimDesc {
         } else if (compression == Compression.RUN_LENGTH) {
             return new RunLengthCompressedColumnReader(dataBuffer, columnDataStartOffset, columnDataLength, rowCount);
         }
-        return new NoCompressedColumnReader(dataBuffer, columnDataStartOffset, columnDataLength, rowCount);
+        return new NoCompressedColumnReader(dataBuffer, columnDataStartOffset, columnDataLength / rowCount, rowCount);
     }
 
     public ColumnDataReader getDimReaderFromFSInput(FSDataInputStream inputStream, int columnDataStartOffset,
@@ -88,6 +88,7 @@ public class ColumnarStoreDimDesc {
         } else if (compression == Compression.RUN_LENGTH) {
             return new FSInputRLECompressedColumnReader(inputStream, columnDataStartOffset, columnDataLength, rowCount);
         }
-        return new FSInputNoCompressedColumnReader(inputStream, columnDataStartOffset, columnDataLength, rowCount);
+        return new FSInputNoCompressedColumnReader(inputStream, columnDataStartOffset, columnDataLength / rowCount,
+                rowCount);
     }
 }
diff --git a/stream-core/src/main/java/org/apache/kylin/stream/core/storage/columnar/ColumnarStoreMetricsDesc.java b/stream-core/src/main/java/org/apache/kylin/stream/core/storage/columnar/ColumnarStoreMetricsDesc.java
index 098134d..eca5ae7 100644
--- a/stream-core/src/main/java/org/apache/kylin/stream/core/storage/columnar/ColumnarStoreMetricsDesc.java
+++ b/stream-core/src/main/java/org/apache/kylin/stream/core/storage/columnar/ColumnarStoreMetricsDesc.java
@@ -68,7 +68,8 @@ public class ColumnarStoreMetricsDesc {
             return new LZ4CompressedColumnReader(dataBuffer, columnDataStartOffset, columnDataLength, rowCount);
         }
         if (fixLen != -1) {
-            return new NoCompressedColumnReader(dataBuffer, columnDataStartOffset, columnDataLength, rowCount);
+            return new NoCompressedColumnReader(dataBuffer, columnDataStartOffset, columnDataLength / rowCount,
+                    rowCount);
         }
         return new GeneralColumnDataReader(dataBuffer, columnDataStartOffset, columnDataLength);
     }
@@ -79,7 +80,8 @@ public class ColumnarStoreMetricsDesc {
             return new FSInputLZ4CompressedColumnReader(inputStream, columnDataStartOffset, columnDataLength, rowCount);
         }
         if (fixLen != -1) {
-            return new FSInputNoCompressedColumnReader(inputStream, columnDataStartOffset, columnDataLength, rowCount);
+            return new FSInputNoCompressedColumnReader(inputStream, columnDataStartOffset, columnDataLength / rowCount,
+                    rowCount);
         }
         return new FSInputGeneralColumnDataReader(inputStream, columnDataStartOffset, columnDataLength);
     }


[kylin] 04/44: KYLIN-4280 SegmenPruner support 'OR' and 'NOT'

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 4300fa51a048a4033c54fa775e4e7dff58413c07
Author: zhangbushi5 <84...@qq.com>
AuthorDate: Fri Dec 6 17:04:22 2019 +0800

    KYLIN-4280 SegmenPruner support 'OR' and 'NOT'
---
 .../apache/kylin/cube/common/SegmentPruner.java    |  48 ++-----
 .../apache/kylin/cube/common/TupleFilterNode.java  |  76 +++++++++++
 .../kylin/cube/common/SegmentPrunerTest.java       | 151 +++++++++++++++++++++
 3 files changed, 241 insertions(+), 34 deletions(-)

diff --git a/core-cube/src/main/java/org/apache/kylin/cube/common/SegmentPruner.java b/core-cube/src/main/java/org/apache/kylin/cube/common/SegmentPruner.java
index f3f2052..ae21a4d 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/common/SegmentPruner.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/common/SegmentPruner.java
@@ -19,9 +19,7 @@
 package org.apache.kylin.cube.common;
 
 import java.util.ArrayList;
-import java.util.Collections;
 import java.util.List;
-import java.util.Map;
 import java.util.Set;
 
 import org.apache.commons.lang.StringUtils;
@@ -45,11 +43,11 @@ import org.slf4j.LoggerFactory;
 public class SegmentPruner {
     private static final Logger logger = LoggerFactory.getLogger(SegmentPruner.class);
 
-    final private Set<CompareTupleFilter> mustTrueCompares;
+    private TupleFilterNode node;
 
     public SegmentPruner(TupleFilter filter) {
-        this.mustTrueCompares = filter == null ? Collections.<CompareTupleFilter> emptySet()
-                : filter.findMustTrueCompareFilters();
+        this.node = new TupleFilterNode(filter);
+
     }
 
     public List<CubeSegment> listSegmentsForQuery(CubeInstance cube) {
@@ -62,7 +60,7 @@ public class SegmentPruner {
     }
     
     public boolean check(CubeSegment seg) {
-        
+
         if (seg.getInputRecords() == 0) {
             if (seg.getConfig().isSkippingEmptySegments()) {
                 logger.debug("Prune segment {} due to 0 input record", seg);
@@ -72,53 +70,35 @@ public class SegmentPruner {
             }
         }
 
-        Map<String, DimensionRangeInfo> segDimRangInfoMap = seg.getDimensionRangeInfoMap();
-        for (CompareTupleFilter comp : mustTrueCompares) {
-            TblColRef col = comp.getColumn();
-
-            if (!col.getType().needCompare()) {
-                continue;
-            }
-            
-            DimensionRangeInfo dimRangeInfo = segDimRangInfoMap.get(col.getIdentity());
-            if (dimRangeInfo == null)
-                dimRangeInfo = tryDeduceRangeFromPartitionCol(seg, col);
-            if (dimRangeInfo == null)
-                continue;
-            
-            String minVal = dimRangeInfo.getMin();
-            String maxVal = dimRangeInfo.getMax();
-            
-            if (!satisfy(comp, minVal, maxVal)) {
-                logger.debug("Prune segment {} due to given filter", seg);
-                return false;
-            }
+        if (!node.checkSeg(seg)) {
+            logger.debug("Prune segment {} due to given filter", seg);
+            return false;
         }
 
         logger.debug("Pruner passed on segment {}", seg);
         return true;
     }
 
-    private DimensionRangeInfo tryDeduceRangeFromPartitionCol(CubeSegment seg, TblColRef col) {
+    public static DimensionRangeInfo tryDeduceRangeFromPartitionCol(CubeSegment seg, TblColRef col) {
         DataModelDesc model = seg.getModel();
         PartitionDesc part = model.getPartitionDesc();
-        
+
         if (!part.isPartitioned())
             return null;
         if (!col.equals(part.getPartitionDateColumnRef()))
             return null;
-        
+
         // deduce the dim range from TSRange
         TSRange tsRange = seg.getTSRange();
         if (tsRange.start.isMin || tsRange.end.isMax)
             return null; // DimensionRangeInfo cannot express infinite
-        
+
         String min = tsRangeToStr(tsRange.start.v, part);
         String max = tsRangeToStr(tsRange.end.v - 1, part); // note the -1, end side is exclusive
         return new DimensionRangeInfo(min, max);
     }
 
-    private String tsRangeToStr(long ts, PartitionDesc part) {
+    private static String tsRangeToStr(long ts, PartitionDesc part) {
         String value;
         DataType partitionColType = part.getPartitionDateColumnRef().getType();
         if (partitionColType.isDate()) {
@@ -138,7 +118,7 @@ public class SegmentPruner {
         return value;
     }
 
-    private boolean satisfy(CompareTupleFilter comp, String minVal, String maxVal) {
+    public static boolean satisfy(CompareTupleFilter comp, String minVal, String maxVal) {
 
         // When both min and max are null, it means all cells of the column are null.
         // In such case, return true to let query engine scan the segment, since the
@@ -177,7 +157,7 @@ public class SegmentPruner {
         }
     }
 
-    private String toString(Object v) {
+    private static String toString(Object v) {
         return v == null ? null : v.toString();
     }
 }
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/common/TupleFilterNode.java b/core-cube/src/main/java/org/apache/kylin/cube/common/TupleFilterNode.java
new file mode 100644
index 0000000..8a84f67
--- /dev/null
+++ b/core-cube/src/main/java/org/apache/kylin/cube/common/TupleFilterNode.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.cube.common;
+
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.cube.DimensionRangeInfo;
+import org.apache.kylin.metadata.filter.CompareTupleFilter;
+import org.apache.kylin.metadata.filter.LogicalTupleFilter;
+import org.apache.kylin.metadata.filter.TupleFilter;
+import org.apache.kylin.metadata.model.TblColRef;
+
+public class TupleFilterNode {
+    private TupleFilter filter;
+
+    public TupleFilterNode(TupleFilter filter) {
+        this.filter = filter;
+    }
+
+    public boolean checkSeg(CubeSegment seg) {
+        if (filter == null)
+            return true;
+        if (filter instanceof CompareTupleFilter) {
+            CompareTupleFilter compareTupleFilter = (CompareTupleFilter) filter;
+            TblColRef col = compareTupleFilter.getColumn();
+            if (col == null) {
+                return true;
+            }
+            DimensionRangeInfo dimRangeInfo = seg.getDimensionRangeInfoMap().get(col.getIdentity());
+            if (dimRangeInfo == null)
+                dimRangeInfo = SegmentPruner.tryDeduceRangeFromPartitionCol(seg, col);
+            if (dimRangeInfo == null)
+                return true;
+            String minVal = dimRangeInfo.getMin();
+            String maxVal = dimRangeInfo.getMax();
+            return SegmentPruner.satisfy(compareTupleFilter, minVal, maxVal);
+        }
+
+        if (filter instanceof LogicalTupleFilter) {
+            if (filter.getOperator() == TupleFilter.FilterOperatorEnum.AND) {
+                for (TupleFilter filter : filter.getChildren()) {
+                    if (!new TupleFilterNode(filter).checkSeg(seg))
+                        return false;
+                }
+                return true;
+
+            } else if (filter.getOperator() == TupleFilter.FilterOperatorEnum.OR) {
+                for (TupleFilter filter : filter.getChildren()) {
+                    if (new TupleFilterNode(filter).checkSeg(seg))
+                        return true;
+                }
+                return false;
+            } else if (filter.getOperator() == TupleFilter.FilterOperatorEnum.NOT) {
+                for (TupleFilter filter : filter.getChildren()) {
+                    return !(new TupleFilterNode(filter).checkSeg(seg));
+                }
+            }
+        }
+        return true;
+    }
+}
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/common/SegmentPrunerTest.java b/core-cube/src/test/java/org/apache/kylin/cube/common/SegmentPrunerTest.java
index 603db97..69f9b10 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/common/SegmentPrunerTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/common/SegmentPrunerTest.java
@@ -18,15 +18,21 @@
 
 package org.apache.kylin.cube.common;
 
+import static org.apache.kylin.metadata.filter.TupleFilter.and;
 import static org.apache.kylin.metadata.filter.TupleFilter.compare;
+import static org.apache.kylin.metadata.filter.TupleFilter.or;
 
+import com.google.common.collect.Maps;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.DateFormat;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
 import org.apache.kylin.common.util.SetAndUnsetSystemProp;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.cube.DimensionRangeInfo;
 import org.apache.kylin.metadata.filter.ConstantTupleFilter;
+import org.apache.kylin.metadata.filter.LogicalTupleFilter;
 import org.apache.kylin.metadata.filter.TupleFilter;
 import org.apache.kylin.metadata.filter.TupleFilter.FilterOperatorEnum;
 import org.apache.kylin.metadata.model.SegmentRange.TSRange;
@@ -39,6 +45,8 @@ import org.junit.Test;
 
 import com.google.common.collect.Sets;
 
+import java.util.Map;
+
 public class SegmentPrunerTest extends LocalFileMetadataTestCase {
     private CubeInstance cube;
 
@@ -192,4 +200,147 @@ public class SegmentPrunerTest extends LocalFileMetadataTestCase {
             }
         }
     }
+
+    @Test
+    public void testLegacyCubeSegWithOrFilter() {
+        // legacy cube segments does not have DimensionRangeInfo, but with TSRange can do some pruning
+        CubeInstance cube = CubeManager.getInstance(getTestConfig())
+                .getCube("test_kylin_cube_without_slr_left_join_ready_2_segments");
+
+        TblColRef col = cube.getModel().findColumn("TEST_KYLIN_FACT.CAL_DT");
+        TblColRef col2 = cube.getModel().findColumn("TEST_KYLIN_FACT.LSTG_SITE_ID");
+        CubeSegment seg = cube.getSegments(SegmentStatusEnum.READY).get(0);
+        Map<String, DimensionRangeInfo> dimensionRangeInfoMap = Maps.newHashMap();
+        dimensionRangeInfoMap.put("TEST_KYLIN_FACT.LSTG_SITE_ID", new DimensionRangeInfo("10", "20"));
+        seg.setDimensionRangeInfoMap(dimensionRangeInfoMap);
+        TSRange tsRange = seg.getTSRange();
+        String start = DateFormat.formatToTimeStr(tsRange.start.v, "yyyy-MM-dd");
+
+        CubeSegment seg2 = cube.getSegments(SegmentStatusEnum.READY).get(1);
+        Map<String, DimensionRangeInfo> dimensionRangeInfoMap2 = Maps.newHashMap();
+        dimensionRangeInfoMap2.put("TEST_KYLIN_FACT.LSTG_SITE_ID", new DimensionRangeInfo("20", "30"));
+        seg2.setDimensionRangeInfoMap(dimensionRangeInfoMap2);
+        TSRange tsRange2 = seg2.getTSRange();
+        String start2 = DateFormat.formatToTimeStr(tsRange2.start.v, "yyyy-MM-dd");
+
+        try (SetAndUnsetSystemProp sns = new SetAndUnsetSystemProp("kylin.query.skip-empty-segments", "false")) {
+            {
+                TupleFilter andFilter1 = compare(col, FilterOperatorEnum.EQ, start);
+                TupleFilter andFilter2 = compare(col2, FilterOperatorEnum.EQ, "15");
+                LogicalTupleFilter logicalAndFilter = and(andFilter1, andFilter2);
+
+                TupleFilter andFilter3 = compare(col, FilterOperatorEnum.EQ, start2);
+                TupleFilter andFilter4 = compare(col2, FilterOperatorEnum.EQ, "25");
+                LogicalTupleFilter logicalAndFilter2 = and(andFilter3, andFilter4);
+
+                LogicalTupleFilter finalFilter = or(logicalAndFilter, logicalAndFilter2);
+                SegmentPruner segmentPruner = new SegmentPruner(finalFilter);
+                Assert.assertTrue(segmentPruner.check(seg));
+                Assert.assertTrue(segmentPruner.check(seg2));
+            }
+
+            {
+                TupleFilter andFilter1 = compare(col, FilterOperatorEnum.EQ, start);
+                TupleFilter andFilter2 = compare(col2, FilterOperatorEnum.EQ, "15");
+                LogicalTupleFilter logicalAndFilter = and(andFilter1, andFilter2);
+
+                TupleFilter andFilter3 = compare(col, FilterOperatorEnum.EQ, start2);
+                TupleFilter andFilter4 = compare(col2, FilterOperatorEnum.EQ, "35");
+                LogicalTupleFilter logicalAndFilter2 = and(andFilter3, andFilter4);
+
+                LogicalTupleFilter finalFilter = or(logicalAndFilter, logicalAndFilter2);
+                SegmentPruner segmentPruner = new SegmentPruner(finalFilter);
+                Assert.assertTrue(segmentPruner.check(seg));
+                Assert.assertFalse(segmentPruner.check(seg2));
+            }
+
+            {
+                TupleFilter andFilter1 = compare(col, FilterOperatorEnum.EQ, start);
+                TupleFilter andFilter2 = compare(col2, FilterOperatorEnum.EQ, "15");
+                LogicalTupleFilter logicalAndFilter = and(andFilter1, andFilter2);
+
+                TupleFilter andFilter3 = compare(col, FilterOperatorEnum.EQ, start2);
+                TupleFilter andFilter4 = compare(col2, FilterOperatorEnum.EQ, "35");
+                LogicalTupleFilter logicalAndFilter2 = and(andFilter3, andFilter4);
+
+                LogicalTupleFilter finalFilter = and(logicalAndFilter, logicalAndFilter2);
+                SegmentPruner segmentPruner = new SegmentPruner(finalFilter);
+                Assert.assertFalse(segmentPruner.check(seg));
+                Assert.assertFalse(segmentPruner.check(seg2));
+            }
+
+            {
+                TupleFilter andFilter1 = compare(col, FilterOperatorEnum.EQ, start);
+                TupleFilter andFilter2 = compare(col2, FilterOperatorEnum.EQ, "15");
+                LogicalTupleFilter logicalAndFilter = and(andFilter1, andFilter2);
+
+                TupleFilter andFilter3 = compare(col, FilterOperatorEnum.LT, start2);
+                TupleFilter andFilter4 = compare(col2, FilterOperatorEnum.LT, "35");
+                LogicalTupleFilter logicalAndFilter2 = and(andFilter3, andFilter4);
+
+                LogicalTupleFilter finalFilter = and(logicalAndFilter, logicalAndFilter2);
+                SegmentPruner segmentPruner = new SegmentPruner(finalFilter);
+                Assert.assertTrue(segmentPruner.check(seg));
+                Assert.assertFalse(segmentPruner.check(seg2));
+            }
+
+            {
+                TupleFilter andFilter1 = compare(col, FilterOperatorEnum.EQ, start);
+                TupleFilter andFilter2 = compare(col2, FilterOperatorEnum.EQ, "35");
+                LogicalTupleFilter logicalAndFilter = or(andFilter1, andFilter2);
+
+                TupleFilter andFilter3 = compare(col, FilterOperatorEnum.EQ, start2);
+                TupleFilter andFilter4 = compare(col2, FilterOperatorEnum.LT, "35");
+                LogicalTupleFilter logicalAndFilter2 = or(andFilter3, andFilter4);
+
+                LogicalTupleFilter finalFilter = or(logicalAndFilter, logicalAndFilter2);
+                SegmentPruner segmentPruner = new SegmentPruner(finalFilter);
+                Assert.assertTrue(segmentPruner.check(seg));
+                Assert.assertTrue(segmentPruner.check(seg2));
+            }
+
+            {
+                TupleFilter andFilter1 = compare(col, FilterOperatorEnum.EQ, start);
+                TupleFilter andFilter2 = compare(col2, FilterOperatorEnum.IN, "15");
+                LogicalTupleFilter logicalAndFilter = and(andFilter1, andFilter2);
+
+                TupleFilter andFilter3 = compare(col, FilterOperatorEnum.EQ, start2);
+                TupleFilter andFilter4 = compare(col2, FilterOperatorEnum.LT, "35");
+                LogicalTupleFilter logicalAndFilter2 = or(andFilter3, andFilter4);
+
+                LogicalTupleFilter finalFilter = or(logicalAndFilter, logicalAndFilter2);
+                SegmentPruner segmentPruner = new SegmentPruner(finalFilter);
+                Assert.assertTrue(segmentPruner.check(seg));
+                Assert.assertTrue(segmentPruner.check(seg2));
+            }
+
+            {
+                TupleFilter andFilter1 = compare(col, FilterOperatorEnum.EQ, start);
+                TupleFilter andFilter2 = compare(col2, FilterOperatorEnum.EQ, "35");
+                LogicalTupleFilter logicalAndFilter = or(andFilter1, andFilter2);
+
+                TupleFilter andFilter3 = compare(col, FilterOperatorEnum.EQ, start2);
+                TupleFilter andFilter4 = compare(col2, FilterOperatorEnum.LT, "35");
+                LogicalTupleFilter logicalAndFilter2 = or(andFilter3, andFilter4);
+
+                LogicalTupleFilter finalFilter1 = or(logicalAndFilter, logicalAndFilter2);
+
+                TupleFilter andFilter5 = compare(col, FilterOperatorEnum.EQ, start);
+                TupleFilter andFilter6 = compare(col2, FilterOperatorEnum.IN, "15");
+                LogicalTupleFilter logicalAndFilter3 = and(andFilter5, andFilter6);
+
+                TupleFilter andFilter7 = compare(col, FilterOperatorEnum.EQ, start2);
+                TupleFilter andFilter8 = compare(col2, FilterOperatorEnum.LT, "35");
+                LogicalTupleFilter logicalAndFilter4 = or(andFilter7, andFilter8);
+
+                LogicalTupleFilter finalFilter2 = or(logicalAndFilter3, logicalAndFilter4);
+
+                LogicalTupleFilter finalFinalFilter = or(finalFilter1, finalFilter2);
+
+                SegmentPruner segmentPruner = new SegmentPruner(finalFinalFilter);
+                Assert.assertTrue(segmentPruner.check(seg));
+                Assert.assertTrue(segmentPruner.check(seg2));
+            }
+        }
+    }
 }


[kylin] 13/44: KYLIN-4250 FechRunnner should skip the job to process other jobs instead of throwing exception when the job section metadata is not found

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit b764400e3bd76cd6c4ca8f2345e369783a754284
Author: bigxiaochu <cr...@163.com>
AuthorDate: Tue Nov 12 17:57:36 2019 +0800

    KYLIN-4250 FechRunnner should skip the job to process other jobs instead of throwing exception when the job section metadata is not found
---
 .../kylin/job/impl/threadpool/DefaultFetcherRunner.java    | 14 +++++++++++++-
 .../kylin/job/impl/threadpool/PriorityFetcherRunner.java   | 14 +++++++++++++-
 2 files changed, 26 insertions(+), 2 deletions(-)

diff --git a/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultFetcherRunner.java b/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultFetcherRunner.java
index 3f51018..1c21902 100644
--- a/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultFetcherRunner.java
+++ b/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultFetcherRunner.java
@@ -66,7 +66,14 @@ public class DefaultFetcherRunner extends FetcherRunner {
                     continue;
                 }
 
-                final Output outputDigest = getExecutableManager().getOutputDigest(id);
+                final Output outputDigest;
+                try {
+                    outputDigest = getExecutableManager().getOutputDigest(id);
+                } catch (IllegalArgumentException e) {
+                    logger.warn("job " + id + " output digest is null, skip.", e);
+                    nOthers++;
+                    continue;
+                }
                 if ((outputDigest.getState() != ExecutableState.READY)) {
                     // logger.debug("Job id:" + id + " not runnable");
                     jobStateCount(id);
@@ -74,6 +81,11 @@ public class DefaultFetcherRunner extends FetcherRunner {
                 }
 
                 final AbstractExecutable executable = getExecutableManager().getJob(id);
+                if (executable == null) {
+                    logger.info("job " + id + " get job is null, skip.");
+                    nOthers++;
+                    continue;
+                }
                 if (!executable.isReady()) {
                     nOthers++;
                     continue;
diff --git a/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/PriorityFetcherRunner.java b/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/PriorityFetcherRunner.java
index 60bf77b..cb01475 100644
--- a/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/PriorityFetcherRunner.java
+++ b/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/PriorityFetcherRunner.java
@@ -99,13 +99,25 @@ public class PriorityFetcherRunner extends FetcherRunner {
                     continue;
                 }
 
-                final Output outputDigest = getExecutableManager().getOutputDigest(id);
+                final Output outputDigest;
+                try {
+                    outputDigest = getExecutableManager().getOutputDigest(id);
+                } catch (IllegalArgumentException e) {
+                    logger.warn("job " + id + " output digest is null, skip.", e);
+                    nOthers++;
+                    continue;
+                }
                 if ((outputDigest.getState() != ExecutableState.READY)) {
                     jobStateCount(id);
                     continue;
                 }
 
                 AbstractExecutable executable = getExecutableManager().getJob(id);
+                if (executable == null) {
+                    logger.info("job " + id + " get job is null, skip.");
+                    nOthers++;
+                    continue;
+                }
                 if (!executable.isReady()) {
                     nOthers++;
                     continue;


[kylin] 24/44: KYLIN-4333 Build Server OOM

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit ebfc745dd681d7e0c129ded50bd50ff509d2a393
Author: wangxiaojing <wa...@didichuxing.com>
AuthorDate: Sat Jan 11 13:29:51 2020 +0800

    KYLIN-4333 Build Server OOM
---
 .../org/apache/kylin/common/metrics/metrics2/CodahaleMetrics.java   | 3 +--
 .../java/org/apache/kylin/common/persistence/ResourceStore.java     | 3 +--
 .../org/apache/kylin/measure/percentile/PercentileSerializer.java   | 5 ++---
 .../java/org/apache/kylin/measure/topn/DoubleDeltaSerializer.java   | 6 ++----
 .../java/org/apache/kylin/metadata/datatype/DataTypeSerializer.java | 5 ++---
 5 files changed, 8 insertions(+), 14 deletions(-)

diff --git a/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/CodahaleMetrics.java b/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/CodahaleMetrics.java
index 53aaf8d..7487930 100644
--- a/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/CodahaleMetrics.java
+++ b/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/CodahaleMetrics.java
@@ -39,7 +39,6 @@ import org.apache.kylin.common.metrics.common.Metrics;
 import org.apache.kylin.common.metrics.common.MetricsConstant;
 import org.apache.kylin.common.metrics.common.MetricsScope;
 import org.apache.kylin.common.metrics.common.MetricsVariable;
-import org.apache.kylin.common.threadlocal.InternalThreadLocal;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -76,7 +75,7 @@ public class CodahaleMetrics implements Metrics {
     private final Lock metersLock = new ReentrantLock();
     private final Lock histogramLock = new ReentrantLock();
     private final Set<Closeable> reporters = new HashSet<Closeable>();
-    private final InternalThreadLocal<HashMap<String, CodahaleMetricsScope>> threadLocalScopes = new InternalThreadLocal<HashMap<String, CodahaleMetricsScope>>() {
+    private final ThreadLocal<HashMap<String, CodahaleMetricsScope>> threadLocalScopes = new ThreadLocal<HashMap<String, CodahaleMetricsScope>>() {
         @Override
         protected HashMap<String, CodahaleMetricsScope> initialValue() {
             return new HashMap<String, CodahaleMetricsScope>();
diff --git a/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java b/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java
index 2ab7045..b21c423 100644
--- a/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java
+++ b/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java
@@ -40,7 +40,6 @@ import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.StorageURL;
-import org.apache.kylin.common.threadlocal.InternalThreadLocal;
 import org.apache.kylin.common.util.ClassUtil;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.slf4j.Logger;
@@ -596,7 +595,7 @@ abstract public class ResourceStore {
 
     // ============================================================================
 
-    InternalThreadLocal<Checkpoint> checkpointing = new InternalThreadLocal<>();
+    ThreadLocal<Checkpoint> checkpointing = new ThreadLocal<>();
 
     public Checkpoint checkpoint() {
         Checkpoint cp = checkpointing.get();
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileSerializer.java b/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileSerializer.java
index 2f75e16..d0ecba7 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileSerializer.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileSerializer.java
@@ -20,13 +20,12 @@ package org.apache.kylin.measure.percentile;
 
 import java.nio.ByteBuffer;
 
-import org.apache.kylin.common.threadlocal.InternalThreadLocal;
 import org.apache.kylin.metadata.datatype.DataType;
 import org.apache.kylin.metadata.datatype.DataTypeSerializer;
 
 public class PercentileSerializer extends DataTypeSerializer<PercentileCounter> {
     // be thread-safe and avoid repeated obj creation
-    private transient InternalThreadLocal<PercentileCounter> current = null;
+    private transient ThreadLocal<PercentileCounter> current = null;
 
     private double compression;
 
@@ -56,7 +55,7 @@ public class PercentileSerializer extends DataTypeSerializer<PercentileCounter>
 
     private PercentileCounter current() {
         if (current == null) {
-            current = new InternalThreadLocal<>();
+            current = new ThreadLocal<>();
         }
 
         PercentileCounter counter = current.get();
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/topn/DoubleDeltaSerializer.java b/core-metadata/src/main/java/org/apache/kylin/measure/topn/DoubleDeltaSerializer.java
index 564d930..1cb63e4 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/topn/DoubleDeltaSerializer.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/topn/DoubleDeltaSerializer.java
@@ -17,8 +17,6 @@
  */
 package org.apache.kylin.measure.topn;
 
-import org.apache.kylin.common.threadlocal.InternalThreadLocal;
-
 import java.nio.ByteBuffer;
 
 /**
@@ -43,7 +41,7 @@ public class DoubleDeltaSerializer implements java.io.Serializable {
     final private int precision;
     final private int multiplier;
 
-    transient InternalThreadLocal<long[]> deltasThreadLocal;
+    transient ThreadLocal<long[]> deltasThreadLocal;
 
     public DoubleDeltaSerializer() {
         this(2);
@@ -114,7 +112,7 @@ public class DoubleDeltaSerializer implements java.io.Serializable {
         len = Math.max(0, len);
 
         if (deltasThreadLocal == null) {
-            deltasThreadLocal = new InternalThreadLocal<>();
+            deltasThreadLocal = new ThreadLocal<>();
         }
 
         long[] deltas = deltasThreadLocal.get();
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataTypeSerializer.java b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataTypeSerializer.java
index 96b2d88..7c1435d 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataTypeSerializer.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataTypeSerializer.java
@@ -23,7 +23,6 @@ import java.io.ObjectInputStream;
 import java.nio.ByteBuffer;
 import java.util.Map;
 
-import org.apache.kylin.common.threadlocal.InternalThreadLocal;
 import org.apache.kylin.common.util.BytesSerializer;
 
 import com.google.common.collect.Maps;
@@ -35,7 +34,7 @@ import com.google.common.collect.Maps;
 abstract public class DataTypeSerializer<T> implements BytesSerializer<T>, java.io.Serializable {
 
     final static Map<String, Class<?>> implementations = Maps.newHashMap();
-    protected transient InternalThreadLocal current = new InternalThreadLocal();
+    protected transient ThreadLocal current = new ThreadLocal();
     static {
         implementations.put("char", StringSerializer.class);
         implementations.put("varchar", StringSerializer.class);
@@ -119,6 +118,6 @@ abstract public class DataTypeSerializer<T> implements BytesSerializer<T>, java.
 
     private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
         in.defaultReadObject();
-        current = new InternalThreadLocal();
+        current = new ThreadLocal();
     }
 }


[kylin] 17/44: KYLIN-4297 Build cube throw NPE when partition column is not set in JDBC Data Source

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 892d68b4156aaf5acf4d05e11b61e7c58f1eded3
Author: yaqian.zhang <59...@qq.com>
AuthorDate: Mon Dec 16 10:14:39 2019 +0800

    KYLIN-4297 Build cube throw NPE when partition column is not set in JDBC Data Source
---
 .../main/java/org/apache/kylin/source/jdbc/JdbcHiveInputBase.java | 8 +++-----
 1 file changed, 3 insertions(+), 5 deletions(-)

diff --git a/source-jdbc/src/main/java/org/apache/kylin/source/jdbc/JdbcHiveInputBase.java b/source-jdbc/src/main/java/org/apache/kylin/source/jdbc/JdbcHiveInputBase.java
index 560aa19..f2264e0 100644
--- a/source-jdbc/src/main/java/org/apache/kylin/source/jdbc/JdbcHiveInputBase.java
+++ b/source-jdbc/src/main/java/org/apache/kylin/source/jdbc/JdbcHiveInputBase.java
@@ -223,13 +223,11 @@ public class JdbcHiveInputBase extends HiveInputBase {
 
             String splitTableAlias;
             String splitColumn;
-            String quoteFullNamedColumn; // `table.column`
             String splitDatabase;
             TblColRef splitColRef = determineSplitColumn();
             splitTableAlias = splitColRef.getTableAlias();
 
             splitColumn = getColumnIdentityQuoted(splitColRef, jdbcMetadataDialect, metaMap, true);
-            quoteFullNamedColumn = quoteIdentifier(partCol, jdbcMetadataDialect.getDialect());
             splitDatabase = splitColRef.getColumnDesc().getTable().getDatabase();
 
             String selectSql = generateSelectDataStatementRDBMS(flatDesc, true, new String[] { partCol },
@@ -247,8 +245,8 @@ public class JdbcHiveInputBase extends HiveInputBase {
             String filedDelimiter = config.getJdbcSourceFieldDelimiter();
             int mapperNum = config.getSqoopMapperNum();
 
-            String bquery = String.format(Locale.ROOT, "SELECT min(%s), max(%s) FROM %s.%s ", quoteFullNamedColumn,
-                    quoteFullNamedColumn, getSchemaQuoted(metaMap, splitDatabase, jdbcMetadataDialect, true),
+            String bquery = String.format(Locale.ROOT, "SELECT min(%s), max(%s) FROM %s.%s ", splitColumn,
+                    splitColumn, getSchemaQuoted(metaMap, splitDatabase, jdbcMetadataDialect, true),
                     getTableIdentityQuoted(splitColRef.getTableRef(), metaMap, jdbcMetadataDialect, true));
             if (partitionDesc.isPartitioned()) {
                 SegmentRange segRange = flatDesc.getSegRange();
@@ -273,7 +271,7 @@ public class JdbcHiveInputBase extends HiveInputBase {
                     + "--connect \"%s\" --driver %s --username %s --password \"%s\" --query \"%s AND \\$CONDITIONS\" "
                     + "--target-dir %s/%s --split-by %s --boundary-query \"%s\" --null-string '%s' "
                     + "--null-non-string '%s' --fields-terminated-by '%s' --num-mappers %d", sqoopHome, connectionUrl,
-                    driverClass, jdbcUser, jdbcPass, selectSql, jobWorkingDir, hiveTable, partCol, bquery,
+                    driverClass, jdbcUser, jdbcPass, selectSql, jobWorkingDir, hiveTable, splitColumn, bquery,
                     sqoopNullString, sqoopNullNonString, filedDelimiter, mapperNum);
             logger.debug("sqoop cmd : {}", cmd);
             CmdStep step = new CmdStep();


[kylin] 37/44: Fix equals method does not inspect argument type

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit ceefc82a01a0051792cf00d53364f92f6a25eab6
Author: nichunen <ni...@apache.org>
AuthorDate: Thu Jan 16 18:09:29 2020 +0800

    Fix equals method does not inspect argument type
---
 .../main/java/org/apache/kylin/metadata/model/TableExtDesc.java    | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)

diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableExtDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableExtDesc.java
index 0890e42..7e9e8d0 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableExtDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableExtDesc.java
@@ -228,7 +228,12 @@ public class TableExtDesc extends RootPersistentEntity {
     public boolean equals(Object o) {
         if (this == o)
             return true;
-        return false;
+        if (o == null || getClass() != o.getClass())
+            return false;
+
+        TableExtDesc tableExtDesc = (TableExtDesc) o;
+
+        return getResourcePath().equals(tableExtDesc.getResourcePath());
     }
 
     @Override


[kylin] 40/44: Prevent uncontrolled data used in path expression

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit ec7558e87245bacbc09b8472c858c2a85d5ded41
Author: nichunen <ni...@apache.org>
AuthorDate: Mon Jan 20 17:38:39 2020 +0800

    Prevent uncontrolled data used in path expression
---
 .../main/java/org/apache/kylin/job/execution/ExecutableManager.java    | 3 +++
 .../org/apache/kylin/metadata/badquery/BadQueryHistoryManager.java     | 3 ++-
 2 files changed, 5 insertions(+), 1 deletion(-)

diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java b/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java
index 6f8d789..3e116aa 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java
@@ -122,6 +122,7 @@ public class ExecutableManager {
 
     public void updateCheckpointJob(String jobId, List<AbstractExecutable> subTasksForCheck) {
         try {
+            jobId = jobId.replaceAll("[./]", "");
             final ExecutablePO job = executableDao.getJob(jobId);
             Preconditions.checkArgument(job != null, "there is no related job for job id:" + jobId);
 
@@ -140,6 +141,7 @@ public class ExecutableManager {
     //for ut
     public void deleteJob(String jobId) {
         try {
+            jobId = jobId.replaceAll("[./]", "");
             executableDao.deleteJob(jobId);
         } catch (PersistentException e) {
             logger.error("fail to delete job:" + jobId, e);
@@ -167,6 +169,7 @@ public class ExecutableManager {
 
     public Output getOutput(String uuid) {
         try {
+            uuid = uuid.replaceAll("[./]", "");
             final ExecutableOutputPO jobOutput = executableDao.getJobOutput(uuid);
             Preconditions.checkArgument(jobOutput != null, "there is no related output for job id:" + uuid);
             return parseOutput(jobOutput);
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/badquery/BadQueryHistoryManager.java b/core-metadata/src/main/java/org/apache/kylin/metadata/badquery/BadQueryHistoryManager.java
index 812d3c3..11c4d01 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/badquery/BadQueryHistoryManager.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/badquery/BadQueryHistoryManager.java
@@ -57,7 +57,6 @@ public class BadQueryHistoryManager {
     }
 
     public BadQueryHistory getBadQueriesForProject(String project) throws IOException {
-        project = project.replaceAll("[./]", "");
         BadQueryHistory badQueryHistory = getStore().getResource(getResourcePathForProject(project), BAD_QUERY_INSTANCE_SERIALIZER);
         if (badQueryHistory == null) {
             badQueryHistory = new BadQueryHistory(project);
@@ -88,10 +87,12 @@ public class BadQueryHistoryManager {
     }
 
     public void removeBadQueryHistory(String project) throws IOException {
+        project = project.replaceAll("[./]", "");
         getStore().deleteResource(getResourcePathForProject(project));
     }
 
     public String getResourcePathForProject(String project) {
+        project = project.replaceAll("[./]", "");
         return ResourceStore.BAD_QUERY_RESOURCE_ROOT + "/" + project + MetadataConstants.FILE_SURFIX;
     }
 }
\ No newline at end of file


[kylin] 08/44: KYLIN-4197 Set deadline for ClientEnvExtractor (#994)

Posted by ni...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 753e1a85cb7d5cde660e66ae11449bca0b786883
Author: Xiaoxiang Yu <hi...@126.com>
AuthorDate: Tue Dec 17 18:15:19 2019 +0800

    KYLIN-4197 Set deadline for ClientEnvExtractor (#994)
    
    * KYLIN-4197 Set deadline for ClientEnvExtractor
    
    * Add configuration for lambda mode.
---
 build/bin/kylin-port-replace-util.sh               |  2 +-
 .../org/apache/kylin/common/KylinConfigBase.java   |  4 ++
 .../rest/controller/StreamingV2Controller.java     | 11 +++-
 .../org/apache/kylin/tool/ClientEnvExtractor.java  | 52 ++++++++++++-----
 .../apache/kylin/tool/ClientEnvExtractorTest.java  | 68 ++++++++++++++++++++++
 5 files changed, 119 insertions(+), 18 deletions(-)

diff --git a/build/bin/kylin-port-replace-util.sh b/build/bin/kylin-port-replace-util.sh
index 0d6f005..28a53db 100755
--- a/build/bin/kylin-port-replace-util.sh
+++ b/build/bin/kylin-port-replace-util.sh
@@ -89,7 +89,7 @@ then
     #replace kylin.stream.node for Streaming Coordinator
     stream_node="kylin.stream.node=`hostname -f`:$new_kylin_port"
     echo "Using new kylin.stream.node: $stream_node"
-    line_count=$(awk '$0 ~ /kylin.stream.node/ {print $0}' ${KYLIN_CONFIG_FILE} | wc -l)
+    line_count=$(awk '$0 ~ /^kylin.stream.node/ {print $0}' ${KYLIN_CONFIG_FILE} | wc -l)
     if [[ $line_count -eq 0 ]]; then
         echo "kylin.stream.node=`hostname -f`:7070" >> ${KYLIN_CONFIG_FILE}
     fi
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index d44d944..4c8d437 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -2289,6 +2289,10 @@ public abstract class KylinConfigBase implements Serializable {
         return Boolean.parseBoolean(getOptional("kylin.stream.auto-resubmit-after-discard-enabled", "true"));
     }
 
+    public String getHiveDatabaseLambdaCube() {
+        return this.getOptional("kylin.stream.hive.database-for-lambda-cube", DEFAULT);
+    }
+
     // ============================================================================
     // Health Check CLI
     // ============================================================================
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/StreamingV2Controller.java b/server-base/src/main/java/org/apache/kylin/rest/controller/StreamingV2Controller.java
index cfd7086..846616e 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/StreamingV2Controller.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/StreamingV2Controller.java
@@ -188,9 +188,11 @@ public class StreamingV2Controller extends BasicController {
         // validate the compatibility for input table schema and the underline hive table schema
         if (tableDesc.getSourceType() == ISourceAware.ID_KAFKA_HIVE) {
             List<FieldSchema> fields;
+            String db = tableDesc.getDatabase();
             try {
                 HiveMetaStoreClient metaStoreClient = new HiveMetaStoreClient(new HiveConf());
-                fields = metaStoreClient.getFields(KylinConfig.getInstanceFromEnv().getHiveDatabaseForIntermediateTable(), tableDesc.getName());
+                fields = metaStoreClient.getFields(db, tableDesc.getName());
+                logger.info("Checking the {} in {}", tableDesc.getName(), db);
             } catch (NoSuchObjectException noObjectException) {
                 logger.info("table not exist in hive meta store for table:" + tableDesc.getIdentity(),
                         noObjectException);
@@ -209,10 +211,12 @@ public class StreamingV2Controller extends BasicController {
             for (ColumnDesc columnDesc : tableDesc.getColumns()) {
                 FieldSchema fieldSchema = fieldSchemaMap.get(columnDesc.getName().toUpperCase(Locale.ROOT));
                 if (fieldSchema == null) {
+                    // Partition column cannot be fetched via Hive Metadata API.
                     if (!TimeDerivedColumnType.isTimeDerivedColumn(columnDesc.getName())) {
-                        incompatibleMsgs.add("column not exist in hive table:" + columnDesc.getName());
+                        incompatibleMsgs.add("Column not exist in hive table:" + columnDesc.getName());
                         continue;
                     } else {
+                        logger.info("Column not exist in hive table: {}.", columnDesc.getName());
                         continue;
                     }
                 }
@@ -486,6 +490,7 @@ public class StreamingV2Controller extends BasicController {
 
     private TableDesc deserializeTableDesc(StreamingRequestV2 streamingRequest) {
         TableDesc desc = null;
+        String db = KylinConfig.getInstanceFromEnv().getHiveDatabaseLambdaCube();
         try {
             logger.debug("Saving TableDesc " + streamingRequest.getTableData());
             desc = JsonUtil.readValue(streamingRequest.getTableData(), TableDesc.class);
@@ -502,7 +507,7 @@ public class StreamingV2Controller extends BasicController {
 
         String[] dbTable = HadoopUtil.parseHiveTableName(desc.getName());
         desc.setName(dbTable[1]);
-        desc.setDatabase(dbTable[0]);
+        desc.setDatabase(db);
         desc.getIdentity();
         return desc;
     }
diff --git a/tool/src/main/java/org/apache/kylin/tool/ClientEnvExtractor.java b/tool/src/main/java/org/apache/kylin/tool/ClientEnvExtractor.java
index 78d02c5..80741ea 100644
--- a/tool/src/main/java/org/apache/kylin/tool/ClientEnvExtractor.java
+++ b/tool/src/main/java/org/apache/kylin/tool/ClientEnvExtractor.java
@@ -22,6 +22,12 @@ package org.apache.kylin.tool;
 import java.io.File;
 import java.io.IOException;
 import java.nio.charset.Charset;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang.StringUtils;
@@ -38,10 +44,12 @@ public class ClientEnvExtractor extends AbstractInfoExtractor {
     private static final Logger logger = LoggerFactory.getLogger(ClientEnvExtractor.class);
     private KylinConfig kylinConfig;
     private CliCommandExecutor cmdExecutor;
+    private ExecutorService executorService;
+    int maxWaitSeconds = 120;
 
     public ClientEnvExtractor() throws IOException {
         super();
-
+        executorService = Executors.newFixedThreadPool(1);
         packageType = "client";
         kylinConfig = KylinConfig.getInstanceFromEnv();
         cmdExecutor = kylinConfig.getCliCommandExecutor();
@@ -64,6 +72,7 @@ public class ClientEnvExtractor extends AbstractInfoExtractor {
         addShellOutput("hbase version", "hbase", "version");
         addShellOutput("hive --version", "hive", "version");
         addShellOutput("beeline --version", "hive", "beeline_version");
+        executorService.shutdownNow();
     }
 
     private void addLocalFile(String src, String destDir) {
@@ -83,20 +92,35 @@ public class ClientEnvExtractor extends AbstractInfoExtractor {
         }
     }
 
-    private void addShellOutput(String cmd, String destDir, String filename) {
-        try {
-            File destDirFile = null;
-            if (!StringUtils.isEmpty(destDir)) {
-                destDirFile = new File(exportDir, destDir);
-                FileUtils.forceMkdir(destDirFile);
-            } else {
-                destDirFile = exportDir;
+    void addShellOutput(String cmd, String destDir, String filename) {
+        Future f = executorService.submit(() -> {
+            try {
+                File destDirFile = null;
+                if (!StringUtils.isEmpty(destDir)) {
+                    destDirFile = new File(exportDir, destDir);
+                    FileUtils.forceMkdir(destDirFile);
+                } else {
+                    destDirFile = exportDir;
+                }
+                Pair<Integer, String> result = cmdExecutor.execute(cmd);
+                String output = result.getSecond();
+                FileUtils.writeStringToFile(new File(destDirFile, filename), output, Charset.defaultCharset());
+            } catch (IOException e) {
+                logger.warn("Failed to run command: " + cmd + ".", e);
             }
-            Pair<Integer, String> result = cmdExecutor.execute(cmd);
-            String output = result.getSecond();
-            FileUtils.writeStringToFile(new File(destDirFile, filename), output, Charset.defaultCharset());
-        } catch (Exception e) {
-            logger.warn("Failed to run command: " + cmd + ".", e);
+        });
+
+        try {
+            // assume most shell should return in two minutes
+            f.get(maxWaitSeconds, TimeUnit.SECONDS);
+        } catch (TimeoutException timeoutException) {
+            logger.error("Timeout for \"{}\" in {} seconds.", cmd, maxWaitSeconds);
+            executorService.shutdownNow();
+            executorService = Executors.newFixedThreadPool(1);
+        } catch (ExecutionException runtimeException) {
+            logger.error("Runtime error: {}", runtimeException.getLocalizedMessage());
+        } catch (InterruptedException otherException) {
+            // Ignore
         }
     }
 }
diff --git a/tool/src/test/java/org/apache/kylin/tool/ClientEnvExtractorTest.java b/tool/src/test/java/org/apache/kylin/tool/ClientEnvExtractorTest.java
new file mode 100644
index 0000000..d2b31b0
--- /dev/null
+++ b/tool/src/test/java/org/apache/kylin/tool/ClientEnvExtractorTest.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+package org.apache.kylin.tool;
+
+import org.apache.kylin.common.util.HBaseMetadataTestCase;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import java.io.File;
+import java.io.IOException;
+
+public class ClientEnvExtractorTest extends HBaseMetadataTestCase {
+
+    @Rule
+    public TemporaryFolder folder = new TemporaryFolder();
+
+    @Before
+    public void setup() throws Exception {
+        super.createTestMetadata();
+    }
+
+    @After
+    public void after() throws Exception {
+        super.cleanupTestMetadata();
+    }
+
+    @Test
+    public void testNormal() throws IOException {
+        File f = folder.newFolder("ClientEnvExtractorTest_testNormal");
+        ClientEnvExtractor executor = new ClientEnvExtractor();
+        executor.addShellOutput("pwd", f.getAbsolutePath(), "testNormal");
+    }
+
+    @Test(timeout = 5000)
+    public void testTimeout() throws IOException {
+        File f = folder.newFolder("ClientEnvExtractorTest_testTimeout");
+        ClientEnvExtractor executor = new ClientEnvExtractor();
+        executor.maxWaitSeconds = 2;
+        executor.addShellOutput("sleep 1000", f.getAbsolutePath(), "testTimeout");
+        executor.addShellOutput("pwd", f.getAbsolutePath(), "pwd");
+    }
+
+    @Test
+    public void testError() throws IOException {
+        File f = folder.newFolder("ClientEnvExtractorTest_testError");
+        ClientEnvExtractor executor = new ClientEnvExtractor();
+        executor.addShellOutput("CMD_NEVER_EXISTS", f.getAbsolutePath(), "testError");
+    }
+}
\ No newline at end of file