You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by li...@apache.org on 2017/02/09 02:31:14 UTC
[01/39] kylin git commit: KYLIN-2374 code review [Forced Update!]
Repository: kylin
Updated Branches:
refs/heads/master-hbase0.98 9323c7c2b -> 4e41c3637 (forced update)
KYLIN-2374 code review
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/5eae37ef
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/5eae37ef
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/5eae37ef
Branch: refs/heads/master-hbase0.98
Commit: 5eae37ef18ca51027c6bb2cfd3410fefc7982f2a
Parents: a2a59c4
Author: shaofengshi <sh...@apache.org>
Authored: Thu Jan 26 09:55:48 2017 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Thu Jan 26 09:55:48 2017 +0800
----------------------------------------------------------------------
build/conf/kylin.properties | 3 +-
build/deploy/spark-defaults.conf | 1 -
.../apache/kylin/common/KylinConfigBase.java | 8 --
.../kylin/common/persistence/ResourceStore.java | 3 +
.../org/apache/kylin/cube/model/CubeDesc.java | 2 +-
.../ExtendedColumnMeasureType.java | 8 +-
.../storage/hdfs/ITHDFSResourceStoreTest.java | 36 +++++++-
.../kylin/storage/hbase/HBaseResourceStore.java | 3 +-
.../kylin/storage/hdfs/HDFSResourceStore.java | 90 +++++++++++---------
9 files changed, 97 insertions(+), 57 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/5eae37ef/build/conf/kylin.properties
----------------------------------------------------------------------
diff --git a/build/conf/kylin.properties b/build/conf/kylin.properties
index eceb886..43ea17d 100644
--- a/build/conf/kylin.properties
+++ b/build/conf/kylin.properties
@@ -211,8 +211,9 @@ kylin.engine.spark-conf.spark.executor.memory=4G
kylin.engine.spark-conf.spark.executor.cores=4
kylin.engine.spark-conf.spark.executor.instances=8
kylin.engine.spark-conf.spark.storage.memoryFraction=0.3
-kylin.engine.spark-conf.spark.history.fs.logDirectory=hdfs\:///kylin/spark-history
+kylin.engine.spark-conf.spark.eventLog.enabled=true
kylin.engine.spark-conf.spark.eventLog.dir=hdfs\:///kylin/spark-history
+kylin.engine.spark-conf.spark.history.fs.logDirectory=hdfs\:///kylin/spark-history
## manually upload spark-assembly jar to HDFS and then set this property will avoid repeatedly uploading jar at runtime
#kylin.engine.spark-conf.spark.yarn.jar=hdfs://namenode:8020/kylin/spark/spark-assembly-1.6.3-hadoop2.6.0.jar
#kylin.engine.spark-conf.spark.io.compression.codec=org.apache.spark.io.SnappyCompressionCodec
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/kylin/blob/5eae37ef/build/deploy/spark-defaults.conf
----------------------------------------------------------------------
diff --git a/build/deploy/spark-defaults.conf b/build/deploy/spark-defaults.conf
index 36c0ab3..78a4bc9 100644
--- a/build/deploy/spark-defaults.conf
+++ b/build/deploy/spark-defaults.conf
@@ -1,5 +1,4 @@
spark.yarn.submit.file.replication=1
-spark.eventLog.enabled=true
spark.yarn.max.executor.failures=3
spark.driver.extraJavaOptions=-Dhdp.version=current
spark.yarn.am.extraJavaOptions=-Dhdp.version=current
http://git-wip-us.apache.org/repos/asf/kylin/blob/5eae37ef/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 5932197..b1acbbf 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -193,14 +193,6 @@ abstract public class KylinConfigBase implements Serializable {
return new StringBuffer(root).append(StringUtils.replaceChars(getMetadataUrlPrefix(), ':', '-')).append("/").toString();
}
- public String getRawHdfsWorkingDirectory() {
- String root = getRequired("kylin.env.hdfs-working-dir");
- if (!root.endsWith("/")) {
- root += "/";
- }
- return root;
- }
-
// ============================================================================
// METADATA
// ============================================================================
http://git-wip-us.apache.org/repos/asf/kylin/blob/5eae37ef/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java b/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java
index 25a0801..c441618 100644
--- a/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java
+++ b/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java
@@ -63,6 +63,9 @@ abstract public class ResourceStore {
public static final String CUBE_STATISTICS_ROOT = "/cube_statistics";
public static final String BAD_QUERY_RESOURCE_ROOT = "/bad_query";
+
+ protected static final String DEFAULT_STORE_NAME = "kylin_metadata";
+
private static final ConcurrentHashMap<KylinConfig, ResourceStore> CACHE = new ConcurrentHashMap<KylinConfig, ResourceStore>();
private static final ArrayList<Class<? extends ResourceStore>> knownImpl = new ArrayList<Class<? extends ResourceStore>>();
http://git-wip-us.apache.org/repos/asf/kylin/blob/5eae37ef/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
index 7e599da..5e970bf 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
@@ -891,7 +891,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
func.init(model);
allColumns.addAll(func.getParameter().getColRefs());
- if (ExtendedColumnMeasureType.FUNC_RAW.equalsIgnoreCase(m.getFunction().getExpression())) {
+ if (ExtendedColumnMeasureType.FUNC_EXTENDED_COLUMN.equalsIgnoreCase(m.getFunction().getExpression())) {
FunctionDesc functionDesc = m.getFunction();
List<TblColRef> hosts = ExtendedColumnMeasureType.getExtendedColumnHosts(functionDesc);
http://git-wip-us.apache.org/repos/asf/kylin/blob/5eae37ef/core-metadata/src/main/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnMeasureType.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnMeasureType.java b/core-metadata/src/main/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnMeasureType.java
index 1b2cda3..de5ee25 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnMeasureType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnMeasureType.java
@@ -47,8 +47,8 @@ public class ExtendedColumnMeasureType extends MeasureType<ByteArray> {
private static final Logger logger = LoggerFactory.getLogger(ExtendedColumnMeasureType.class);
- public static final String FUNC_RAW = "EXTENDED_COLUMN";
- public static final String DATATYPE_RAW = "extendedcolumn";
+ public static final String FUNC_EXTENDED_COLUMN = "EXTENDED_COLUMN";
+ public static final String DATATYPE_EXTENDED_COLUMN = "extendedcolumn";
private final DataType dataType;
public static class Factory extends MeasureTypeFactory<ByteArray> {
@@ -60,12 +60,12 @@ public class ExtendedColumnMeasureType extends MeasureType<ByteArray> {
@Override
public String getAggrFunctionName() {
- return FUNC_RAW;
+ return FUNC_EXTENDED_COLUMN;
}
@Override
public String getAggrDataTypeName() {
- return DATATYPE_RAW;
+ return DATATYPE_EXTENDED_COLUMN;
}
@Override
http://git-wip-us.apache.org/repos/asf/kylin/blob/5eae37ef/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITHDFSResourceStoreTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITHDFSResourceStoreTest.java b/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITHDFSResourceStoreTest.java
index ff66048..ec12722 100644
--- a/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITHDFSResourceStoreTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITHDFSResourceStoreTest.java
@@ -18,21 +18,28 @@
package org.apache.kylin.storage.hdfs;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.persistence.ResourceStoreTest;
import org.apache.kylin.common.util.HBaseMetadataTestCase;
+import org.apache.kylin.common.util.HadoopUtil;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
+import static junit.framework.TestCase.assertTrue;
+
public class ITHDFSResourceStoreTest extends HBaseMetadataTestCase {
KylinConfig kylinConfig;
+ FileSystem fs;
@Before
public void setup() throws Exception {
this.createTestMetadata();
kylinConfig = KylinConfig.getInstanceFromEnv();
+ fs = HadoopUtil.getWorkingFileSystem();
}
@After
@@ -41,12 +48,37 @@ public class ITHDFSResourceStoreTest extends HBaseMetadataTestCase {
}
@Test
- public void testResourceStoreBasic() throws Exception {
+ public void testBasic() throws Exception {
+ String oldUrl = kylinConfig.getMetadataUrl();
+ String path = "/kylin/kylin_metadata/metadata";
+ kylinConfig.setProperty("kylin.metadata.url", path + "@hdfs");
+ HDFSResourceStore store = new HDFSResourceStore(kylinConfig);
+ ResourceStoreTest.testAStore(store);
+ kylinConfig.setProperty("kylin.metadata.url", oldUrl);
+ assertTrue(fs.exists(new Path(path)));
+ }
+
+ @Test
+ public void testQalifiedName() throws Exception {
String oldUrl = kylinConfig.getMetadataUrl();
- kylinConfig.setProperty("kylin.metadata.url", "kylin_metadata@hdfs");
+ String path = "hdfs:///kylin/kylin_metadata/metadata_test1";
+ kylinConfig.setProperty("kylin.metadata.url", path + "@hdfs");
HDFSResourceStore store = new HDFSResourceStore(kylinConfig);
ResourceStoreTest.testAStore(store);
kylinConfig.setProperty("kylin.metadata.url", oldUrl);
+ assertTrue(fs.exists(new Path(path)));
}
+ @Test
+ public void testFullQalifiedName() throws Exception {
+ String oldUrl = kylinConfig.getMetadataUrl();
+ String path = "hdfs://sandbox.hortonworks.com:8020/kylin/kylin_metadata/metadata_test2";
+ kylinConfig.setProperty("kylin.metadata.url", path + "@hdfs");
+ HDFSResourceStore store = new HDFSResourceStore(kylinConfig);
+ ResourceStoreTest.testAStore(store);
+ kylinConfig.setProperty("kylin.metadata.url", oldUrl);
+ assertTrue(fs.exists(new Path(path)));
+ }
+
+
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/5eae37ef/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
index 0901b54..501f1e4 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
@@ -59,7 +59,6 @@ public class HBaseResourceStore extends ResourceStore {
private static final Logger logger = LoggerFactory.getLogger(HBaseResourceStore.class);
- private static final String DEFAULT_TABLE_NAME = "kylin_metadata";
private static final String FAMILY = "f";
private static final byte[] B_FAMILY = Bytes.toBytes(FAMILY);
private static final String COLUMN = "c";
@@ -80,7 +79,7 @@ public class HBaseResourceStore extends ResourceStore {
String metadataUrl = kylinConfig.getMetadataUrl();
// split TABLE@HBASE_URL
int cut = metadataUrl.indexOf('@');
- tableNameBase = cut < 0 ? DEFAULT_TABLE_NAME : metadataUrl.substring(0, cut);
+ tableNameBase = cut < 0 ? DEFAULT_STORE_NAME : metadataUrl.substring(0, cut);
hbaseUrl = cut < 0 ? metadataUrl : metadataUrl.substring(cut + 1);
if (!hbaseUrl.equals("hbase"))
throw new IOException("Can not create HBaseResourceStore. Url not match. Url:" + hbaseUrl);
http://git-wip-us.apache.org/repos/asf/kylin/blob/5eae37ef/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/HDFSResourceStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/HDFSResourceStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/HDFSResourceStore.java
index 38acfb0..d24d3b4 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/HDFSResourceStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/HDFSResourceStore.java
@@ -46,11 +46,7 @@ public class HDFSResourceStore extends ResourceStore {
private static final Logger logger = LoggerFactory.getLogger(HDFSResourceStore.class);
- private static final long DEFAULT_ACQUIRE_LOCK_TIMEOUT = 10;
-
- private static final String DEFAULT_FOLDER_NAME = "kylin_metadata";
-
- private static final String DEFAULT_METADATA_FOLDER_NAME = "hdfs_metadata";
+ private static final long DEFAULT_ACQUIRE_LOCK_TIMEOUT = 2;
private Path hdfsMetaPath;
@@ -62,42 +58,43 @@ public class HDFSResourceStore extends ResourceStore {
super(kylinConfig);
String metadataUrl = kylinConfig.getMetadataUrl();
int cut = metadataUrl.indexOf('@');
- String metaDirName = cut < 0 ? DEFAULT_FOLDER_NAME : metadataUrl.substring(0, cut);
- String hdfsUrl = cut < 0 ? metadataUrl : metadataUrl.substring(cut + 1);
- if (!hdfsUrl.equals("hdfs"))
- throw new IOException("Can not create HDFSResourceStore. Url not match. Url:" + hdfsUrl);
- metaDirName += "/" + DEFAULT_METADATA_FOLDER_NAME;
- logger.info("meta dir name :" + metaDirName);
- createMetaFolder(metaDirName, kylinConfig);
- }
-
- private void createMetaFolder(String metaDirName, KylinConfig kylinConfig) throws Exception {
- String hdfsWorkingDir = kylinConfig.getHdfsWorkingDirectory();
- fs = HadoopUtil.getFileSystem(hdfsWorkingDir);
- logger.info("hdfs working dir : " + hdfsWorkingDir);
- Path hdfsWorkingPath = new Path(hdfsWorkingDir);
- if (!fs.exists(hdfsWorkingPath)) {
- throw new IOException("HDFS working dir not exist");
+ if (cut < 0) {
+ throw new IOException("kylin.metadata.url not recognized for HDFSResourceStore: " + metadataUrl);
}
+ String suffix = metadataUrl.substring(cut + 1);
+ if (!suffix.equals("hdfs"))
+ throw new IOException("kylin.metadata.url not recognized for HDFSResourceStore:" + suffix);
+
+ String path = metadataUrl.substring(0, cut);
+ fs = HadoopUtil.getFileSystem(path);
+ Path metadataPath = new Path(path);
//creat lock manager
- this.lockManager = new LockManager(kylinConfig, kylinConfig.getRawHdfsWorkingDirectory() + metaDirName);
+ this.lockManager = new LockManager(kylinConfig, getRelativePath(metadataPath));
+ if (fs.exists(metadataPath) == false) {
+ logger.warn("Path not exist in HDFS, create it: " + path);
+ createMetaFolder(metadataPath, kylinConfig);
+ }
+
+ hdfsMetaPath = metadataPath;
+ logger.info("hdfs meta path : " + hdfsMetaPath.toString());
+
+ }
+
+
+
+ private void createMetaFolder(Path metaDirName, KylinConfig kylinConfig) throws Exception {
//create hdfs meta path
- hdfsMetaPath = new Path(hdfsWorkingPath, metaDirName);
- if (!fs.exists(hdfsMetaPath)) {
- ResourceLock lock = lockManager.getLock(lockManager.getLockPath("/"));
- try {
- if (lock.acquire(DEFAULT_ACQUIRE_LOCK_TIMEOUT, TimeUnit.MINUTES)) {
- logger.info("get root lock successfully");
- if (!fs.exists(hdfsMetaPath)) {
- fs.mkdirs(hdfsMetaPath);
- logger.info("create hdfs meta path");
- }
+ ResourceLock lock = lockManager.getLock(getRelativePath(metaDirName));
+ try {
+ if (lock.acquire(DEFAULT_ACQUIRE_LOCK_TIMEOUT, TimeUnit.SECONDS)) {
+ if (!fs.exists(metaDirName)) {
+ fs.mkdirs(metaDirName);
}
- } finally {
- lockManager.releaseLock(lock);
}
+ } finally {
+ lockManager.releaseLock(lock);
}
- logger.info("hdfs meta path : " + hdfsMetaPath.toString());
+ logger.info("hdfs meta path created: " + metaDirName.toString());
}
@Override
@@ -170,7 +167,7 @@ public class HDFSResourceStore extends ResourceStore {
ResourceLock lock = null;
try {
lock = lockManager.getLock(resPath);
- lock.acquire(DEFAULT_ACQUIRE_LOCK_TIMEOUT, TimeUnit.MINUTES);
+ lock.acquire(DEFAULT_ACQUIRE_LOCK_TIMEOUT, TimeUnit.SECONDS);
in = fs.open(p);
long t = in.readLong();
return t;
@@ -192,7 +189,7 @@ public class HDFSResourceStore extends ResourceStore {
ResourceLock lock = null;
try {
lock = lockManager.getLock(resPath);
- lock.acquire(DEFAULT_ACQUIRE_LOCK_TIMEOUT, TimeUnit.MINUTES);
+ lock.acquire(DEFAULT_ACQUIRE_LOCK_TIMEOUT, TimeUnit.SECONDS);
out = fs.create(p, true);
out.writeLong(ts);
IOUtils.copy(content, out);
@@ -228,7 +225,7 @@ public class HDFSResourceStore extends ResourceStore {
ResourceLock lock = null;
try {
lock = lockManager.getLock(resPath);
- lock.acquire(DEFAULT_ACQUIRE_LOCK_TIMEOUT, TimeUnit.MINUTES);
+ lock.acquire(DEFAULT_ACQUIRE_LOCK_TIMEOUT, TimeUnit.SECONDS);
Path p = getRealHDFSPath(resPath);
if (fs.exists(p)) {
fs.delete(p, true);
@@ -253,4 +250,21 @@ public class HDFSResourceStore extends ResourceStore {
return new Path(this.hdfsMetaPath, resourcePath);
}
+ private static String getRelativePath(Path hdfsPath) {
+ String path = hdfsPath.toString();
+ int index = path.indexOf("://");
+ if (index > 0) {
+ path = path.substring(index + 3);
+ }
+
+ if (path.startsWith("/") == false) {
+ if (path.indexOf("/") > 0) {
+ path = path.substring(path.indexOf("/"));
+ } else {
+ path = "/" + path;
+ }
+ }
+ return path;
+ }
+
}
[20/39] kylin git commit: Merge branch 'KYLIN-2361'
Posted by li...@apache.org.
Merge branch 'KYLIN-2361'
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/4047e8dc
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/4047e8dc
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/4047e8dc
Branch: refs/heads/master-hbase0.98
Commit: 4047e8dc5bf8aad7c8db79abb5ef2c3be15cd622
Parents: 1925284 9a3bd71
Author: Billy Liu <bi...@apache.org>
Authored: Mon Feb 6 14:37:46 2017 +0800
Committer: Billy Liu <bi...@apache.org>
Committed: Mon Feb 6 14:37:46 2017 +0800
----------------------------------------------------------------------
build/script/download-tomcat.sh | 8 +-
pom.xml | 2 +-
.../java/org/apache/kylin/rest/DebugTomcat.java | 16 +-
.../kylin/ext/CustomizedWebappClassloader.java | 9 +-
.../kylin/ext/OrderedWebResourceRoot.java | 286 +++++++++++++++++++
.../kylin/ext/WebappOrderedClassLoader.java | 66 +++++
6 files changed, 369 insertions(+), 18 deletions(-)
----------------------------------------------------------------------
[15/39] kylin git commit: KYLIN-2421 Add spark engine to Integration
Test
Posted by li...@apache.org.
KYLIN-2421 Add spark engine to Integration Test
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/5da53936
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/5da53936
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/5da53936
Branch: refs/heads/master-hbase0.98
Commit: 5da53936502136c0d56236e148da2751aa1462c9
Parents: 855301d
Author: shaofengshi <sh...@apache.org>
Authored: Fri Jan 20 11:28:57 2017 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Sat Feb 4 19:37:59 2017 +0800
----------------------------------------------------------------------
.../apache/kylin/common/KylinConfigBase.java | 10 ++
.../measure/bitmap/RoaringBitmapCounter.java | 3 +-
.../bitmap/RoaringBitmapCounterFactory.java | 3 +-
.../measure/percentile/PercentileCounter.java | 22 ++-
.../percentile/PercentileSerializer.java | 6 +-
.../kylin/measure/topn/TopNAggregator.java | 5 +-
.../percentile/PercentileCounterTest.java | 47 ++++++
.../kylin/engine/mr/BatchCubingJobBuilder2.java | 8 +-
.../engine/spark/KylinKryoRegistrator.java | 161 +++++++++++++++++++
.../spark/SparkBatchCubingJobBuilder2.java | 12 +-
.../apache/kylin/engine/spark/SparkCubing.java | 123 +-------------
.../kylin/engine/spark/SparkCubingByLayer.java | 65 ++++----
.../localmeta/cube_desc/ci_inner_join_cube.json | 14 +-
examples/test_case_data/sandbox/core-site.xml | 2 +
.../test_case_data/sandbox/kylin.properties | 29 ++--
kylin-it/pom.xml | 21 +++
.../kylin/provision/BuildCubeWithEngine.java | 25 +++
17 files changed, 355 insertions(+), 201 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/5da53936/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 6a88fc4..fe15b1e 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -68,6 +68,12 @@ abstract public class KylinConfigBase implements Serializable {
return sparkHome;
}
+ sparkHome = System.getProperty("SPARK_HOME");
+ if (StringUtils.isNotEmpty(sparkHome)) {
+ logger.info("SPARK_HOME was set to " + sparkHome);
+ return sparkHome;
+ }
+
return getKylinHome() + File.separator + "spark";
}
@@ -760,6 +766,10 @@ abstract public class KylinConfigBase implements Serializable {
return getOptional("kylin.engine.spark.env.hadoop-conf-dir", "");
}
+ public void setHadoopConfDir(String hadoopConfDir) {
+ setProperty("kylin.engine.spark.env.hadoop-conf-dir", hadoopConfDir);
+ }
+
public String getSparkAdditionalJars() {
return getOptional("kylin.engine.spark.additional-jars", "");
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/5da53936/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/RoaringBitmapCounter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/RoaringBitmapCounter.java b/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/RoaringBitmapCounter.java
index fb9dcfc..eec45f2 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/RoaringBitmapCounter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/RoaringBitmapCounter.java
@@ -24,6 +24,7 @@ import org.roaringbitmap.buffer.MutableRoaringBitmap;
import java.io.DataOutputStream;
import java.io.IOException;
+import java.io.Serializable;
import java.nio.BufferOverflowException;
import java.nio.ByteBuffer;
import java.util.Iterator;
@@ -31,7 +32,7 @@ import java.util.Iterator;
/**
* A {@link BitmapCounter} based on roaring bitmap.
*/
-public class RoaringBitmapCounter implements BitmapCounter {
+public class RoaringBitmapCounter implements BitmapCounter, Serializable {
private ImmutableRoaringBitmap bitmap;
http://git-wip-us.apache.org/repos/asf/kylin/blob/5da53936/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/RoaringBitmapCounterFactory.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/RoaringBitmapCounterFactory.java b/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/RoaringBitmapCounterFactory.java
index a71df95..822afa2 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/RoaringBitmapCounterFactory.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/RoaringBitmapCounterFactory.java
@@ -21,9 +21,10 @@ package org.apache.kylin.measure.bitmap;
import org.roaringbitmap.buffer.MutableRoaringBitmap;
import java.io.IOException;
+import java.io.Serializable;
import java.nio.ByteBuffer;
-public class RoaringBitmapCounterFactory implements BitmapCounterFactory {
+public class RoaringBitmapCounterFactory implements BitmapCounterFactory, Serializable {
public static final BitmapCounterFactory INSTANCE = new RoaringBitmapCounterFactory();
private RoaringBitmapCounterFactory() {}
http://git-wip-us.apache.org/repos/asf/kylin/blob/5da53936/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileCounter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileCounter.java b/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileCounter.java
index bf505cf..f86a796 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileCounter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileCounter.java
@@ -18,6 +18,9 @@
package org.apache.kylin.measure.percentile;
+import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.nio.ByteBuffer;
@@ -30,7 +33,7 @@ public class PercentileCounter implements Serializable {
double compression;
double quantileRatio;
- TDigest registers;
+ transient TDigest registers;
public PercentileCounter(double compression) {
this(compression, INVALID_QUANTILE_RATIO);
@@ -94,4 +97,21 @@ public class PercentileCounter implements Serializable {
public void clear() {
reInitRegisters();
}
+
+ private void writeObject(ObjectOutputStream out) throws IOException {
+ registers.compress();
+ int bound = registers.byteSize();
+ ByteBuffer buf = ByteBuffer.allocate(bound);
+ registers.asSmallBytes(buf);
+ out.defaultWriteObject();
+ out.writeInt(bound);
+ out.write(buf.array(), 0, bound);
+ }
+ private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
+ in.defaultReadObject();
+ int bound = in.readInt();
+ ByteBuffer buf = ByteBuffer.allocate(bound);
+ in.read(buf.array(), 0, bound);
+ registers = AVLTreeDigest.fromBytes(buf);
+ }
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/5da53936/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileSerializer.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileSerializer.java b/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileSerializer.java
index a0a2a77..d7e4204 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileSerializer.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileSerializer.java
@@ -25,7 +25,7 @@ import org.apache.kylin.metadata.datatype.DataTypeSerializer;
public class PercentileSerializer extends DataTypeSerializer<PercentileCounter> {
// be thread-safe and avoid repeated obj creation
- private ThreadLocal<PercentileCounter> current = new ThreadLocal<>();
+ private transient ThreadLocal<PercentileCounter> current = null;
private double compression;
@@ -49,6 +49,10 @@ public class PercentileSerializer extends DataTypeSerializer<PercentileCounter>
}
private PercentileCounter current() {
+ if (current == null) {
+ current = new ThreadLocal<>();
+ }
+
PercentileCounter counter = current.get();
if (counter == null) {
counter = new PercentileCounter(compression);
http://git-wip-us.apache.org/repos/asf/kylin/blob/5da53936/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNAggregator.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNAggregator.java b/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNAggregator.java
index b5e316f..bc2bc36 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNAggregator.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNAggregator.java
@@ -46,10 +46,11 @@ public class TopNAggregator extends MeasureAggregator<TopNCounter<ByteArray>> {
@Override
public TopNCounter<ByteArray> aggregate(TopNCounter<ByteArray> value1, TopNCounter<ByteArray> value2) {
- TopNCounter<ByteArray> aggregated = new TopNCounter<>(capacity * 2);
+ int thisCapacity = value1.getCapacity();
+ TopNCounter<ByteArray> aggregated = new TopNCounter<>(thisCapacity * 2);
aggregated.merge(value1);
aggregated.merge(value2);
- aggregated.retain(capacity);
+ aggregated.retain(thisCapacity);
return aggregated;
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/5da53936/core-metadata/src/test/java/org/apache/kylin/measure/percentile/PercentileCounterTest.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/test/java/org/apache/kylin/measure/percentile/PercentileCounterTest.java b/core-metadata/src/test/java/org/apache/kylin/measure/percentile/PercentileCounterTest.java
index abaa409..94a1233 100644
--- a/core-metadata/src/test/java/org/apache/kylin/measure/percentile/PercentileCounterTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/measure/percentile/PercentileCounterTest.java
@@ -20,11 +20,19 @@ package org.apache.kylin.measure.percentile;
import static org.junit.Assert.assertEquals;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
import java.util.Collections;
import java.util.List;
import java.util.Random;
+import org.apache.commons.io.IOUtils;
import org.apache.kylin.common.util.MathUtil;
+import org.junit.Assert;
import org.junit.Test;
import com.google.common.collect.Lists;
@@ -76,4 +84,43 @@ public class PercentileCounterTest {
assertEquals(expectedResult, actualResult, 0);
}
+
+ @Test
+ public void testSerialization() {
+ double compression = 100;
+ double quantile = 0.5;
+ ByteArrayOutputStream os = new ByteArrayOutputStream(1024);
+ ObjectOutputStream out = null;
+ PercentileCounter origin_counter = null;
+ try {
+ out = new ObjectOutputStream(os);
+
+ origin_counter = new PercentileCounter(compression, quantile);
+ out.writeObject(origin_counter);
+
+ } catch (IOException e) {
+ e.printStackTrace();
+ } finally {
+ IOUtils.closeQuietly(out);
+ }
+
+ InputStream is = new ByteArrayInputStream(os.toByteArray());
+ PercentileCounter serialized_counter = null;
+ ObjectInputStream in = null;
+ try {
+ in = new ObjectInputStream(is);
+ serialized_counter = (PercentileCounter)in.readObject();
+
+ Assert.assertNotNull(serialized_counter);
+ Assert.assertNotNull(serialized_counter.registers);
+ } catch (IOException e) {
+ e.printStackTrace();
+ } catch (ClassNotFoundException e) {
+ e.printStackTrace();
+ } finally {
+ IOUtils.closeQuietly(os);
+ IOUtils.closeQuietly(is);
+ }
+
+ }
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/5da53936/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchCubingJobBuilder2.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchCubingJobBuilder2.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchCubingJobBuilder2.java
index 0f604e2..106077c 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchCubingJobBuilder2.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchCubingJobBuilder2.java
@@ -31,7 +31,6 @@ import org.apache.kylin.engine.mr.steps.NDCuboidJob;
import org.apache.kylin.engine.mr.steps.SaveStatisticsStep;
import org.apache.kylin.job.constant.ExecutableConstants;
import org.apache.kylin.job.engine.JobEngineConfig;
-import org.apache.kylin.job.execution.AbstractExecutable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -65,7 +64,7 @@ public class BatchCubingJobBuilder2 extends JobBuilderSupport {
// Phase 3: Build Cube
addLayerCubingSteps(result, jobId, cuboidRootPath); // layer cubing, only selected algorithm will execute
- result.addTask(createInMemCubingStep(jobId, cuboidRootPath)); // inmem cubing, only selected algorithm will execute
+ addInMemCubingSteps(result, jobId, cuboidRootPath); // inmem cubing, only selected algorithm will execute
outputSide.addStepPhase3_BuildCube(result);
// Phase 4: Update Metadata & Cleanup
@@ -96,7 +95,7 @@ public class BatchCubingJobBuilder2 extends JobBuilderSupport {
return result;
}
- protected AbstractExecutable createInMemCubingStep(String jobId, String cuboidRootPath) {
+ protected void addInMemCubingSteps(final CubingJob result, String jobId, String cuboidRootPath) {
// base cuboid job
MapReduceExecutable cubeStep = new MapReduceExecutable();
@@ -113,8 +112,7 @@ public class BatchCubingJobBuilder2 extends JobBuilderSupport {
cubeStep.setMapReduceParams(cmd.toString());
cubeStep.setMapReduceJobClass(getInMemCuboidJob());
-// cubeStep.setCounterSaveAs(CubingJob.SOURCE_RECORD_COUNT + "," + CubingJob.SOURCE_SIZE_BYTES + "," + CubingJob.CUBE_SIZE_BYTES);
- return cubeStep;
+ result.addTask(cubeStep);
}
protected Class<? extends AbstractHadoopJob> getInMemCuboidJob() {
http://git-wip-us.apache.org/repos/asf/kylin/blob/5da53936/engine-spark/src/main/java/org/apache/kylin/engine/spark/KylinKryoRegistrator.java
----------------------------------------------------------------------
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/KylinKryoRegistrator.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/KylinKryoRegistrator.java
new file mode 100644
index 0000000..3d33aa8
--- /dev/null
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/KylinKryoRegistrator.java
@@ -0,0 +1,161 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.engine.spark;
+
+import com.esotericsoftware.kryo.Kryo;
+import com.google.common.collect.Sets;
+import com.google.common.hash.Hashing;
+import org.apache.kylin.measure.MeasureIngester;
+import org.apache.spark.serializer.KryoRegistrator;
+import org.reflections.Reflections;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.Serializable;
+import java.util.Set;
+
+/**
+ * Registor for registering classes and serializers to Kryo
+ */
+public class KylinKryoRegistrator implements KryoRegistrator {
+ protected static final Logger logger = LoggerFactory.getLogger(KylinKryoRegistrator.class);
+
+ @Override
+ public void registerClasses(Kryo kryo) {
+
+ Set<Class> kyroClasses = Sets.newLinkedHashSet();
+ kyroClasses.add(byte[].class);
+ kyroClasses.add(int[].class);
+ kyroClasses.add(byte[][].class);
+ kyroClasses.add(String[].class);
+ kyroClasses.add(String[][].class);
+ kyroClasses.add(Object[].class);
+ kyroClasses.add(java.math.BigDecimal.class);
+ kyroClasses.add(java.util.ArrayList.class);
+ kyroClasses.add(java.util.LinkedList.class);
+ kyroClasses.add(java.util.HashSet.class);
+ kyroClasses.add(java.util.LinkedHashSet.class);
+ kyroClasses.add(java.util.LinkedHashMap.class);
+ kyroClasses.add(java.util.HashMap.class);
+ kyroClasses.add(java.util.TreeMap.class);
+ kyroClasses.add(java.util.Properties.class);
+ kyroClasses.addAll(new Reflections("org.apache.kylin").getSubTypesOf(Serializable.class));
+ kyroClasses.addAll(new Reflections("org.apache.kylin.dimension").getSubTypesOf(Serializable.class));
+ kyroClasses.addAll(new Reflections("org.apache.kylin.cube").getSubTypesOf(Serializable.class));
+ kyroClasses.addAll(new Reflections("org.apache.kylin.cube.model").getSubTypesOf(Object.class));
+ kyroClasses.addAll(new Reflections("org.apache.kylin.metadata").getSubTypesOf(Object.class));
+ kyroClasses.addAll(new Reflections("org.apache.kylin.metadata.model").getSubTypesOf(Object.class));
+ kyroClasses.addAll(new Reflections("org.apache.kylin.metadata.measure").getSubTypesOf(Object.class));
+ kyroClasses.addAll(new Reflections("org.apache.kylin.metadata.datatype").getSubTypesOf(org.apache.kylin.common.util.BytesSerializer.class));
+ kyroClasses.addAll(new Reflections("org.apache.kylin.measure").getSubTypesOf(MeasureIngester.class));
+
+ kyroClasses.add(org.apache.spark.sql.Row[].class);
+ kyroClasses.add(org.apache.spark.sql.Row.class);
+ kyroClasses.add(org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema.class);
+ kyroClasses.add(org.apache.spark.sql.types.StructType.class);
+ kyroClasses.add(org.apache.spark.sql.types.StructField[].class);
+ kyroClasses.add(org.apache.spark.sql.types.StructField.class);
+ kyroClasses.add(org.apache.spark.sql.types.DateType$.class);
+ kyroClasses.add(org.apache.spark.sql.types.Metadata.class);
+ kyroClasses.add(org.apache.spark.sql.types.StringType$.class);
+ kyroClasses.add(Hashing.murmur3_128().getClass());
+ kyroClasses.add(org.apache.spark.sql.execution.columnar.CachedBatch.class);
+ kyroClasses.add(org.apache.spark.sql.types.Decimal.class);
+ kyroClasses.add(scala.math.BigDecimal.class);
+ kyroClasses.add(java.math.MathContext.class);
+ kyroClasses.add(java.math.RoundingMode.class);
+ kyroClasses.add(java.util.concurrent.ConcurrentHashMap.class);
+ kyroClasses.add(java.util.Random.class);
+ kyroClasses.add(java.util.concurrent.atomic.AtomicLong.class);
+
+ kyroClasses.add(org.apache.kylin.metadata.model.ColumnDesc[].class);
+ kyroClasses.add(org.apache.kylin.metadata.model.JoinTableDesc[].class);
+ kyroClasses.add(org.apache.kylin.metadata.model.TblColRef[].class);
+ kyroClasses.add(org.apache.kylin.metadata.model.DataModelDesc.RealizationCapacity.class);
+ kyroClasses.add(org.apache.kylin.metadata.model.DataModelDesc.TableKind.class);
+ kyroClasses.add(org.apache.kylin.metadata.model.PartitionDesc.DefaultPartitionConditionBuilder.class);
+ kyroClasses.add(org.apache.kylin.metadata.model.PartitionDesc.PartitionType.class);
+ kyroClasses.add(org.apache.kylin.cube.model.CubeDesc.DeriveInfo.class);
+ kyroClasses.add(org.apache.kylin.cube.model.CubeDesc.DeriveType.class);
+ kyroClasses.add(org.apache.kylin.cube.model.HBaseColumnFamilyDesc[].class);
+ kyroClasses.add(org.apache.kylin.cube.model.HBaseColumnDesc[].class);
+ kyroClasses.add(org.apache.kylin.metadata.model.MeasureDesc[].class);
+ kyroClasses.add(org.apache.kylin.cube.model.RowKeyColDesc[].class);
+ kyroClasses.add(org.apache.kylin.common.util.Array.class);
+ kyroClasses.add(org.apache.kylin.metadata.model.Segments.class);
+ kyroClasses.add(org.apache.kylin.metadata.realization.RealizationStatusEnum.class);
+ kyroClasses.add(org.apache.kylin.metadata.model.SegmentStatusEnum.class);
+ kyroClasses.add(org.apache.kylin.measure.BufferedMeasureCodec.class);
+ kyroClasses.add(org.apache.kylin.cube.kv.RowKeyColumnIO.class);
+ kyroClasses.add(org.apache.kylin.measure.MeasureCodec.class);
+ kyroClasses.add(org.apache.kylin.measure.MeasureAggregator[].class);
+ kyroClasses.add(org.apache.kylin.metadata.datatype.DataTypeSerializer[].class);
+ kyroClasses.add(org.apache.kylin.cube.kv.CubeDimEncMap.class);
+ kyroClasses.add(org.apache.kylin.measure.basic.BasicMeasureType.class);
+ kyroClasses.add(org.apache.kylin.common.util.SplittedBytes[].class);
+ kyroClasses.add(org.apache.kylin.common.util.SplittedBytes.class);
+ kyroClasses.add(org.apache.kylin.cube.kv.RowKeyEncoderProvider.class);
+ kyroClasses.add(org.apache.kylin.cube.kv.RowKeyEncoder.class);
+ kyroClasses.add(org.apache.kylin.measure.basic.BigDecimalIngester.class);
+ kyroClasses.add(org.apache.kylin.dimension.DictionaryDimEnc.class);
+ kyroClasses.add(org.apache.kylin.dimension.IntDimEnc.class);
+ kyroClasses.add(org.apache.kylin.dimension.BooleanDimEnc.class);
+ kyroClasses.add(org.apache.kylin.dimension.DateDimEnc.class);
+ kyroClasses.add(org.apache.kylin.dimension.FixedLenDimEnc.class);
+ kyroClasses.add(org.apache.kylin.dimension.FixedLenHexDimEnc.class);
+ kyroClasses.add(org.apache.kylin.dimension.IntegerDimEnc.class);
+ kyroClasses.add(org.apache.kylin.dimension.OneMoreByteVLongDimEnc.class);
+ kyroClasses.add(org.apache.kylin.dimension.TimeDimEnc.class);
+ kyroClasses.add(org.apache.kylin.cube.model.AggregationGroup.HierarchyMask.class);
+ kyroClasses.add(org.apache.kylin.measure.topn.DoubleDeltaSerializer.class);
+ kyroClasses.add(org.apache.kylin.measure.bitmap.RoaringBitmapCounter.class);
+ kyroClasses.add(org.roaringbitmap.buffer.MutableRoaringArray.class);
+ kyroClasses.add(org.roaringbitmap.buffer.MappeableContainer[].class);
+ kyroClasses.add(org.roaringbitmap.buffer.MutableRoaringBitmap.class);
+ kyroClasses.add(org.roaringbitmap.buffer.MappeableArrayContainer.class);
+ kyroClasses.add(org.apache.kylin.measure.bitmap.RoaringBitmapCounterFactory.class);
+ kyroClasses.add(org.apache.kylin.measure.topn.Counter.class);
+ kyroClasses.add(org.apache.kylin.measure.topn.TopNCounter.class);
+ kyroClasses.add(org.apache.kylin.measure.percentile.PercentileSerializer.class);
+ kyroClasses.add(com.tdunning.math.stats.AVLTreeDigest.class);
+ kyroClasses.add(com.tdunning.math.stats.Centroid.class);
+
+ addClassQuitely(kyroClasses, "com.google.common.collect.EmptyImmutableList");
+ addClassQuitely(kyroClasses, "java.nio.HeapShortBuffer");
+ addClassQuitely(kyroClasses, "scala.collection.immutable.Map$EmptyMap$");
+ addClassQuitely(kyroClasses, "org.apache.spark.sql.catalyst.expressions.GenericInternalRow");
+ addClassQuitely(kyroClasses, "org.apache.spark.unsafe.types.UTF8String");
+ addClassQuitely(kyroClasses, "com.tdunning.math.stats.AVLGroupTree");
+
+ for (Class kyroClass : kyroClasses) {
+ kryo.register(kyroClass);
+ }
+
+ // TODO: should use JavaSerializer for PercentileCounter after Kryo bug be fixed: https://github.com/EsotericSoftware/kryo/issues/489
+ // kryo.register(PercentileCounter.class, new JavaSerializer());
+ }
+
+ private static void addClassQuitely(Set<Class> kyroClasses, String className) {
+ try {
+ kyroClasses.add(Class.forName(className));
+ } catch (ClassNotFoundException e) {
+ logger.error("failed to load class", e);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/kylin/blob/5da53936/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkBatchCubingJobBuilder2.java
----------------------------------------------------------------------
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkBatchCubingJobBuilder2.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkBatchCubingJobBuilder2.java
index 208a0c9..76b73b6 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkBatchCubingJobBuilder2.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkBatchCubingJobBuilder2.java
@@ -26,7 +26,6 @@ import org.apache.kylin.engine.EngineFactory;
import org.apache.kylin.engine.mr.BatchCubingJobBuilder2;
import org.apache.kylin.engine.mr.CubingJob;
import org.apache.kylin.job.constant.ExecutableConstants;
-import org.apache.kylin.job.execution.AbstractExecutable;
import org.apache.kylin.metadata.model.IJoinedFlatTableDesc;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -43,11 +42,6 @@ public class SparkBatchCubingJobBuilder2 extends BatchCubingJobBuilder2 {
@Override
protected void addLayerCubingSteps(final CubingJob result, final String jobId, final String cuboidRootPath) {
-
- }
-
- @Override
- protected AbstractExecutable createInMemCubingStep(String jobId, String cuboidRootPath) {
IJoinedFlatTableDesc flatTableDesc = EngineFactory.getJoinedFlatTableDesc(seg);
final SparkExecutable sparkExecutable = new SparkExecutable();
sparkExecutable.setClassName(SparkCubingByLayer.class.getName());
@@ -71,7 +65,11 @@ public class SparkBatchCubingJobBuilder2 extends BatchCubingJobBuilder2 {
sparkExecutable.setJars(jars.toString());
sparkExecutable.setName(ExecutableConstants.STEP_NAME_BUILD_SPARK_CUBE);
- return sparkExecutable;
+ result.addTask(sparkExecutable);
+ }
+
+ @Override
+ protected void addInMemCubingSteps(final CubingJob result, String jobId, String cuboidRootPath) {
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/5da53936/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java
----------------------------------------------------------------------
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java
index 0437a80..2a0981a 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java
@@ -20,10 +20,8 @@ package org.apache.kylin.engine.spark;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
-import java.io.Serializable;
import java.nio.ByteBuffer;
import java.util.ArrayList;
-import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
@@ -31,17 +29,13 @@ import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
-import java.util.Set;
import java.util.UUID;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
-import javax.annotation.Nullable;
-
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
-import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsShell;
@@ -84,7 +78,6 @@ import org.apache.kylin.engine.spark.util.IteratorUtils;
import org.apache.kylin.measure.BufferedMeasureCodec;
import org.apache.kylin.measure.MeasureAggregators;
import org.apache.kylin.measure.hllc.HLLCounter;
-import org.apache.kylin.measure.MeasureIngester;
import org.apache.kylin.metadata.model.FunctionDesc;
import org.apache.kylin.metadata.model.IJoinedFlatTableDesc;
import org.apache.kylin.metadata.model.MeasureDesc;
@@ -108,16 +101,12 @@ import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.hive.HiveContext;
-import org.reflections.Reflections;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
-import com.google.common.base.Predicate;
-import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
import com.google.common.hash.HashFunction;
import com.google.common.hash.Hasher;
import com.google.common.hash.Hashing;
@@ -545,109 +534,6 @@ public class SparkCubing extends AbstractApplication {
}
}
- public static Collection<String> getKyroClasses() {
- Set<Class> kyroClasses = Sets.newHashSet();
- kyroClasses.addAll(new Reflections("org.apache.kylin").getSubTypesOf(Serializable.class));
- kyroClasses.addAll(new Reflections("org.apache.kylin.dimension").getSubTypesOf(Serializable.class));
- kyroClasses.addAll(new Reflections("org.apache.kylin.cube").getSubTypesOf(Serializable.class));
- kyroClasses.addAll(new Reflections("org.apache.kylin.cube.model").getSubTypesOf(Object.class));
- kyroClasses.addAll(new Reflections("org.apache.kylin.metadata").getSubTypesOf(Object.class));
- kyroClasses.addAll(new Reflections("org.apache.kylin.metadata.model").getSubTypesOf(Object.class));
- kyroClasses.addAll(new Reflections("org.apache.kylin.metadata.measure").getSubTypesOf(Object.class));
- kyroClasses.addAll(new Reflections("org.apache.kylin.metadata.datatype").getSubTypesOf(org.apache.kylin.common.util.BytesSerializer.class));
- kyroClasses.addAll(new Reflections("org.apache.kylin.measure").getSubTypesOf(MeasureIngester.class));
-
- kyroClasses.add(HashMap.class);
- kyroClasses.add(org.apache.spark.sql.Row[].class);
- kyroClasses.add(org.apache.spark.sql.Row.class);
- kyroClasses.add(org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema.class);
- kyroClasses.add(org.apache.spark.sql.types.StructType.class);
- kyroClasses.add(org.apache.spark.sql.types.StructField[].class);
- kyroClasses.add(org.apache.spark.sql.types.StructField.class);
- kyroClasses.add(org.apache.spark.sql.types.DateType$.class);
- kyroClasses.add(org.apache.spark.sql.types.Metadata.class);
- kyroClasses.add(org.apache.spark.sql.types.StringType$.class);
- kyroClasses.add(Hashing.murmur3_128().getClass());
- kyroClasses.add(org.apache.spark.sql.execution.columnar.CachedBatch.class);
- kyroClasses.add(Object[].class);
- kyroClasses.add(int[].class);
- kyroClasses.add(byte[].class);
- kyroClasses.add(byte[][].class);
- kyroClasses.add(String[].class);
- kyroClasses.add(String[][].class);
- kyroClasses.add(org.apache.spark.sql.types.Decimal.class);
- kyroClasses.add(scala.math.BigDecimal.class);
- kyroClasses.add(java.math.BigDecimal.class);
- kyroClasses.add(java.math.MathContext.class);
- kyroClasses.add(java.math.RoundingMode.class);
- kyroClasses.add(java.util.ArrayList.class);
- kyroClasses.add(java.util.LinkedList.class);
- kyroClasses.add(java.util.HashSet.class);
- kyroClasses.add(java.util.LinkedHashSet.class);
- kyroClasses.add(java.util.LinkedHashMap.class);
- kyroClasses.add(java.util.TreeMap.class);
- kyroClasses.add(java.util.concurrent.ConcurrentHashMap.class);
-
- kyroClasses.add(java.util.HashMap.class);
- kyroClasses.add(java.util.Properties.class);
- kyroClasses.add(org.apache.kylin.metadata.model.ColumnDesc[].class);
- kyroClasses.add(org.apache.kylin.metadata.model.JoinTableDesc[].class);
- kyroClasses.add(org.apache.kylin.metadata.model.TblColRef[].class);
- kyroClasses.add(org.apache.kylin.metadata.model.DataModelDesc.RealizationCapacity.class);
- kyroClasses.add(org.apache.kylin.metadata.model.DataModelDesc.TableKind.class);
- kyroClasses.add(org.apache.kylin.metadata.model.PartitionDesc.DefaultPartitionConditionBuilder.class);
- kyroClasses.add(org.apache.kylin.metadata.model.PartitionDesc.PartitionType.class);
- kyroClasses.add(org.apache.kylin.cube.model.CubeDesc.DeriveInfo.class);
- kyroClasses.add(org.apache.kylin.cube.model.CubeDesc.DeriveType.class);
- kyroClasses.add(org.apache.kylin.cube.model.HBaseColumnFamilyDesc[].class);
- kyroClasses.add(org.apache.kylin.cube.model.HBaseColumnDesc[].class);
- kyroClasses.add(org.apache.kylin.metadata.model.MeasureDesc[].class);
- kyroClasses.add(org.apache.kylin.cube.model.RowKeyColDesc[].class);
- kyroClasses.add(org.apache.kylin.common.util.Array.class);
- kyroClasses.add(org.apache.kylin.metadata.model.Segments.class);
- kyroClasses.add(org.apache.kylin.metadata.realization.RealizationStatusEnum.class);
- kyroClasses.add(org.apache.kylin.metadata.model.SegmentStatusEnum.class);
- kyroClasses.add(org.apache.kylin.measure.BufferedMeasureCodec.class);
- kyroClasses.add(org.apache.kylin.cube.kv.RowKeyColumnIO.class);
- kyroClasses.add(org.apache.kylin.measure.MeasureCodec.class);
- kyroClasses.add(org.apache.kylin.measure.MeasureAggregator[].class);
- kyroClasses.add(org.apache.kylin.metadata.datatype.DataTypeSerializer[].class);
- kyroClasses.add(org.apache.kylin.cube.kv.CubeDimEncMap.class);
- kyroClasses.add(org.apache.kylin.measure.basic.BasicMeasureType.class);
- kyroClasses.add(org.apache.kylin.common.util.SplittedBytes[].class);
- kyroClasses.add(org.apache.kylin.common.util.SplittedBytes.class);
- kyroClasses.add(org.apache.kylin.cube.kv.RowKeyEncoderProvider.class);
- kyroClasses.add(org.apache.kylin.cube.kv.RowKeyEncoder.class);
- kyroClasses.add(org.apache.kylin.measure.basic.BigDecimalIngester.class);
- kyroClasses.add(org.apache.kylin.dimension.DictionaryDimEnc.class);
- kyroClasses.add(org.apache.kylin.dimension.IntDimEnc.class);
- kyroClasses.add(org.apache.kylin.dimension.BooleanDimEnc.class);
- kyroClasses.add(org.apache.kylin.dimension.DateDimEnc.class);
- kyroClasses.add(org.apache.kylin.dimension.FixedLenDimEnc.class);
- kyroClasses.add(org.apache.kylin.dimension.FixedLenHexDimEnc.class);
- kyroClasses.add(org.apache.kylin.dimension.IntegerDimEnc.class);
- kyroClasses.add(org.apache.kylin.dimension.OneMoreByteVLongDimEnc.class);
- kyroClasses.add(org.apache.kylin.dimension.TimeDimEnc.class);
- kyroClasses.add(org.apache.kylin.cube.model.AggregationGroup.HierarchyMask.class);
- kyroClasses.add(org.apache.kylin.measure.topn.DoubleDeltaSerializer.class);
- kyroClasses.add(org.apache.kylin.measure.topn.Counter.class);
-
- try {
- kyroClasses.add(Class.forName("com.google.common.collect.EmptyImmutableList"));
- } catch (ClassNotFoundException e) {
- logger.error("failed to load class", e);
- }
-
- ArrayList<String> result = Lists.newArrayList();
- for (Class kyroClass : kyroClasses) {
- result.add(kyroClass.getName());
- }
- result.add("scala.collection.immutable.Map$EmptyMap$");
- result.add("org.apache.spark.sql.catalyst.expressions.GenericInternalRow");
- result.add("org.apache.spark.unsafe.types.UTF8String");
- return result;
- }
-
@Override
protected void execute(OptionsHelper optionsHelper) throws Exception {
final String hiveTable = optionsHelper.getOptionValue(OPTION_INPUT_PATH);
@@ -658,15 +544,8 @@ public class SparkCubing extends AbstractApplication {
//serialization conf
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
+ conf.set("spark.kryo.registrator", "org.apache.kylin.engine.spark.KylinKryoRegistrator");
conf.set("spark.kryo.registrationRequired", "true");
- final Iterable<String> allClasses = Iterables.filter(Iterables.concat(Lists.newArrayList(conf.get("spark.kryo.classesToRegister", "").split(",")), getKyroClasses()), new Predicate<String>() {
- @Override
- public boolean apply(@Nullable String input) {
- return input != null && input.trim().length() > 0;
- }
- });
- System.out.println("kyro classes:" + allClasses.toString());
- conf.set("spark.kryo.classesToRegister", StringUtils.join(allClasses, ","));
JavaSparkContext sc = new JavaSparkContext(conf);
HiveContext sqlContext = new HiveContext(sc.sc());
http://git-wip-us.apache.org/repos/asf/kylin/blob/5da53936/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
----------------------------------------------------------------------
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
index d6790aa..8892a73 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
@@ -17,13 +17,10 @@
*/
package org.apache.kylin.engine.spark;
-import com.google.common.base.Predicate;
-import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
-import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
@@ -71,7 +68,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.Tuple2;
-import javax.annotation.Nullable;
import java.io.File;
import java.io.FileFilter;
import java.io.Serializable;
@@ -79,7 +75,6 @@ import java.nio.ByteBuffer;
import java.util.Collection;
import java.util.List;
-import static org.apache.kylin.engine.spark.SparkCubing.getKyroClasses;
/**
*/
@@ -129,11 +124,12 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
}
private static final void prepare() {
- final File file = new File(SparkFiles.get("kylin.properties"));
- final String confPath = file.getParentFile().getAbsolutePath();
+ File file = new File(SparkFiles.get("kylin.properties"));
+ String confPath = file.getParentFile().getAbsolutePath();
logger.info("conf directory:" + confPath);
System.setProperty(KylinConfig.KYLIN_CONF, confPath);
ClassUtil.addClasspath(confPath);
+
}
@Override
@@ -144,17 +140,11 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
final String confPath = optionsHelper.getOptionValue(OPTION_CONF_PATH);
final String outputPath = optionsHelper.getOptionValue(OPTION_OUTPUT_PATH);
- SparkConf conf = new SparkConf().setAppName("Cubing for:" + cubeName + ", segment " + segmentId);
+ SparkConf conf = new SparkConf().setAppName("Cubing for:" + cubeName + " segment " + segmentId);
//serialization conf
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
+ conf.set("spark.kryo.registrator", "org.apache.kylin.engine.spark.KylinKryoRegistrator");
conf.set("spark.kryo.registrationRequired", "true");
- final Iterable<String> allClasses = Iterables.filter(Iterables.concat(Lists.newArrayList(conf.get("spark.kryo.classesToRegister", "").split(",")), getKyroClasses()), new Predicate<String>() {
- @Override
- public boolean apply(@Nullable String input) {
- return input != null && input.trim().length() > 0;
- }
- });
- conf.set("spark.kryo.classesToRegister", StringUtils.join(allClasses, ","));
JavaSparkContext sc = new JavaSparkContext(conf);
setupClasspath(sc, confPath);
@@ -176,11 +166,7 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
final NDCuboidBuilder ndCuboidBuilder = new NDCuboidBuilder(vCubeSegment.getValue(), new RowKeyEncoderProvider(vCubeSegment.getValue()));
final Broadcast<CuboidScheduler> vCuboidScheduler = sc.broadcast(new CuboidScheduler(vCubeDesc.getValue()));
-
- final long baseCuboidId = Cuboid.getBaseCuboidId(cubeDesc);
- final Cuboid baseCuboid = Cuboid.findById(cubeDesc, baseCuboidId);
final int measureNum = cubeDesc.getMeasures().size();
- final BaseCuboidBuilder baseCuboidBuilder = new BaseCuboidBuilder(kylinConfig, vCubeDesc.getValue(), vCubeSegment.getValue(), intermediateTableDesc, AbstractRowKeyEncoder.createInstance(cubeSegment, baseCuboid), MeasureIngester.create(cubeDesc.getMeasures()), cubeSegment.buildDictionaryMap());
int countMeasureIndex = 0;
for (MeasureDesc measureDesc : cubeDesc.getMeasures()) {
@@ -204,12 +190,20 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
// encode with dimension encoding, transform to <ByteArray, Object[]> RDD
final JavaPairRDD<ByteArray, Object[]> encodedBaseRDD = intermediateTable.javaRDD().mapToPair(new PairFunction<Row, ByteArray, Object[]>() {
transient boolean initialized = false;
+ BaseCuboidBuilder baseCuboidBuilder = null;
@Override
public Tuple2<ByteArray, Object[]> call(Row row) throws Exception {
if (initialized == false) {
- prepare();
- initialized = true;
+ synchronized (SparkCubingByLayer.class) {
+ if (initialized == false) {
+ prepare();
+ long baseCuboidId = Cuboid.getBaseCuboidId(cubeDesc);
+ Cuboid baseCuboid = Cuboid.findById(cubeDesc, baseCuboidId);
+ baseCuboidBuilder = new BaseCuboidBuilder(kylinConfig, cubeDesc, cubeSegment, intermediateTableDesc, AbstractRowKeyEncoder.createInstance(cubeSegment, baseCuboid), MeasureIngester.create(cubeDesc.getMeasures()), cubeSegment.buildDictionaryMap());
+ initialized = true;
+ }
+ }
}
String[] rowArray = rowToArray(row);
@@ -235,7 +229,7 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
});
logger.info("encodedBaseRDD partition number: " + encodedBaseRDD.getNumPartitions());
- Long totalCount = 0L;
+ Long totalCount = 0L;
if (kylinConfig.isSparkSanityCheckEnabled()) {
totalCount = encodedBaseRDD.count();
logger.info("encodedBaseRDD row count: " + encodedBaseRDD.count());
@@ -267,8 +261,8 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
partition = estimateRDDPartitionNum(level, cubeStatsReader, kylinConfig);
logger.info("Level " + level + " partition number: " + partition);
allRDDs[level] = allRDDs[level - 1].flatMapToPair(flatMapFunction).reduceByKey(reducerFunction2, partition).persist(storageLevel);
- if (kylinConfig.isSparkSanityCheckEnabled() == true) {
- sanityCheck(allRDDs[level], totalCount, level, cubeStatsReader, countMeasureIndex);
+ if (kylinConfig.isSparkSanityCheckEnabled() == true) {
+ sanityCheck(allRDDs[level], totalCount, level, cubeStatsReader, countMeasureIndex);
}
saveToHDFS(allRDDs[level], vCubeDesc.getValue(), outputPath, level, confOverwrite);
allRDDs[level - 1].unpersist();
@@ -288,17 +282,18 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
}
private static void saveToHDFS(final JavaPairRDD<ByteArray, Object[]> rdd, final CubeDesc cubeDesc, final String hdfsBaseLocation, int level, Configuration conf) {
- final String cuboidOutputPath = BatchCubingJobBuilder2.getCuboidOutputPathsByLevel(hdfsBaseLocation, level);
- rdd.mapToPair(new PairFunction<Tuple2<ByteArray, Object[]>, org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>() {
- BufferedMeasureCodec codec = new BufferedMeasureCodec(cubeDesc.getMeasures());
- @Override
- public Tuple2<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text> call(Tuple2<ByteArray, Object[]> tuple2) throws Exception {
- ByteBuffer valueBuf = codec.encode(tuple2._2());
- byte[] encodedBytes = new byte[valueBuf.position()];
- System.arraycopy(valueBuf.array(), 0, encodedBytes, 0, valueBuf.position());
- return new Tuple2<>(new org.apache.hadoop.io.Text(tuple2._1().array()), new org.apache.hadoop.io.Text(encodedBytes));
- }
- }).saveAsNewAPIHadoopFile(cuboidOutputPath, org.apache.hadoop.io.Text.class, org.apache.hadoop.io.Text.class, SequenceFileOutputFormat.class, conf);
+ final String cuboidOutputPath = BatchCubingJobBuilder2.getCuboidOutputPathsByLevel(hdfsBaseLocation, level);
+ rdd.mapToPair(new PairFunction<Tuple2<ByteArray, Object[]>, org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>() {
+ BufferedMeasureCodec codec = new BufferedMeasureCodec(cubeDesc.getMeasures());
+
+ @Override
+ public Tuple2<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text> call(Tuple2<ByteArray, Object[]> tuple2) throws Exception {
+ ByteBuffer valueBuf = codec.encode(tuple2._2());
+ byte[] encodedBytes = new byte[valueBuf.position()];
+ System.arraycopy(valueBuf.array(), 0, encodedBytes, 0, valueBuf.position());
+ return new Tuple2<>(new org.apache.hadoop.io.Text(tuple2._1().array()), new org.apache.hadoop.io.Text(encodedBytes));
+ }
+ }).saveAsNewAPIHadoopFile(cuboidOutputPath, org.apache.hadoop.io.Text.class, org.apache.hadoop.io.Text.class, SequenceFileOutputFormat.class, conf);
logger.info("Persisting RDD for level " + level + " into " + cuboidOutputPath);
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/5da53936/examples/test_case_data/localmeta/cube_desc/ci_inner_join_cube.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/cube_desc/ci_inner_join_cube.json b/examples/test_case_data/localmeta/cube_desc/ci_inner_join_cube.json
index 0fda3b3..99013ce 100644
--- a/examples/test_case_data/localmeta/cube_desc/ci_inner_join_cube.json
+++ b/examples/test_case_data/localmeta/cube_desc/ci_inner_join_cube.json
@@ -267,16 +267,6 @@
},
"returntype" : "raw"
}
- }, {
- "name" : "GVM_PERCENTILE",
- "function" : {
- "expression" : "PERCENTILE",
- "parameter" : {
- "type" : "column",
- "value" : "TEST_KYLIN_FACT.PRICE"
- },
- "returntype" : "percentile(100)"
- }
} ],
"dictionaries": [ {
"column": "TEST_KYLIN_FACT.TEST_COUNT_DISTINCT_BITMAP",
@@ -368,7 +358,7 @@
"name" : "f3",
"columns" : [ {
"qualifier" : "m",
- "measure_refs" : [ "TEST_EXTENDED_COLUMN", "TRANS_ID_RAW", "PRICE_RAW", "CAL_DT_RAW", "BUYER_CONTACT", "SELLER_CONTACT", "GVM_PERCENTILE" ]
+ "measure_refs" : [ "TEST_EXTENDED_COLUMN", "TRANS_ID_RAW", "PRICE_RAW", "CAL_DT_RAW", "BUYER_CONTACT", "SELLER_CONTACT" ]
} ]
} ]
},
@@ -448,7 +438,7 @@
"status_need_notify" : [ ],
"auto_merge_time_ranges" : null,
"retention_range" : 0,
- "engine_type" : 2,
+ "engine_type" : 4,
"storage_type" : 2,
"override_kylin_properties": {
"kylin.cube.algorithm": "LAYER"
http://git-wip-us.apache.org/repos/asf/kylin/blob/5da53936/examples/test_case_data/sandbox/core-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/core-site.xml b/examples/test_case_data/sandbox/core-site.xml
index 7660a7e..a4ad5c6 100644
--- a/examples/test_case_data/sandbox/core-site.xml
+++ b/examples/test_case_data/sandbox/core-site.xml
@@ -178,9 +178,11 @@
<value>false</value>
</property>
+ <!--
<property>
<name>net.topology.script.file.name</name>
<value>/etc/hadoop/conf/topology_script.py</value>
</property>
+ -->
</configuration>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/kylin/blob/5da53936/examples/test_case_data/sandbox/kylin.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties
index 6cb5148..91566ae 100644
--- a/examples/test_case_data/sandbox/kylin.properties
+++ b/examples/test_case_data/sandbox/kylin.properties
@@ -162,23 +162,24 @@ kylin.server.query-metrics-percentiles-intervals=60, 360, 3600
kylin.env=DEV
kylin.source.hive.keep-flat-table=false
-### Spark as Engine ###
-kylin.engine.spark.env.hadoop-conf-dir=../examples/test_case_data/sandbox
-kylin.engine.spark.sanity-check-enabled=false
+
+# Estimate the RDD partition numbers, the test cubes have a couple memory-hungry measure so the estimation is wild
+kylin.engine.spark.rdd-partition-cut-mb=100
### Spark conf overwrite for cube engine
+kylin.engine.spark-conf.spark.yarn.submit.file.replication=1
kylin.engine.spark-conf.spark.master=yarn
-kylin.engine.spark-conf.spark.submit.deployMode=client
-kylin.engine.spark-conf.spark.yarn.executor.memoryOverhead=512
-kylin.engine.spark-conf.spark.yarn.driver.memoryOverhead=384
-kylin.engine.spark-conf.spark.executor.memory=1G
+kylin.engine.spark-conf.spark.submit.deployMode=cluster
+kylin.engine.spark-conf.spark.yarn.executor.memoryOverhead=384
+kylin.engine.spark-conf.spark.yarn.driver.memoryOverhead=256
+kylin.engine.spark-conf.spark.executor.memory=768M
kylin.engine.spark-conf.spark.executor.cores=1
kylin.engine.spark-conf.spark.executor.instances=1
kylin.engine.spark-conf.spark.storage.memoryFraction=0.3
-kylin.engine.spark-conf.spark.history.fs.logDirectory=hdfs\:///kylin/spark-history
-kylin.engine.spark-conf.spark.eventLog.dir=hdfs\:///kylin/spark-history
-#kylin.engine.spark-conf.spark.yarn.queue=default
-#kylin.engine.spark-conf.spark.yarn.jar=hdfs://sandbox.hortonworks.com:8020/kylin/spark/spark-assembly-1.6.3-hadoop2.6.0.jar
-#kylin.engine.spark-conf.spark.io.compression.codec=org.apache.spark.io.SnappyCompressionCodec
-
-
+kylin.engine.spark-conf.spark.eventLog.enabled=true
+kylin.engine.spark-conf.spark.history.fs.logDirectory=hdfs\:///spark-history
+kylin.engine.spark-conf.spark.eventLog.dir=hdfs\:///spark-history
+kylin.engine.spark-conf.spark.yarn.jar=hdfs://sandbox.hortonworks.com:8020/kylin/spark/spark-assembly-1.6.3-hadoop2.6.0.jar
+kylin.engine.spark-conf.spark.driver.extraJavaOptions=-Dhdp.version=current
+kylin.engine.spark-conf.spark.yarn.am.extraJavaOptions=-Dhdp.version=current
+kylin.engine.spark-conf.spark.executor.extraJavaOptions=-Dhdp.version=current
http://git-wip-us.apache.org/repos/asf/kylin/blob/5da53936/kylin-it/pom.xml
----------------------------------------------------------------------
diff --git a/kylin-it/pom.xml b/kylin-it/pom.xml
index 9662806..91104ba 100644
--- a/kylin-it/pom.xml
+++ b/kylin-it/pom.xml
@@ -36,6 +36,7 @@
<properties>
<hdp.version/>
<fastBuildMode/>
+ <engineType/>
</properties>
<!-- Dependencies. -->
@@ -238,6 +239,25 @@
<artifactId>kafka_2.10</artifactId>
<scope>provided</scope>
</dependency>
+
+ <!-- Spark dependency -->
+ <dependency>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-core_2.10</artifactId>
+ <scope>provided</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-sql_2.10</artifactId>
+ <scope>provided</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-hive_2.10</artifactId>
+ <scope>provided</scope>
+ </dependency>
</dependencies>
@@ -296,6 +316,7 @@
<arguments>
<argument>-Dhdp.version=${hdp.version}</argument>
<argument>-DfastBuildMode=${fastBuildMode}</argument>
+ <argument>-DengineType=${engineType}</argument>
<argument>-Dlog4j.configuration=file:${project.basedir}/..//build/conf/kylin-tools-log4j.properties</argument>
<argument>-classpath</argument>
<classpath/>
http://git-wip-us.apache.org/repos/asf/kylin/blob/5da53936/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
index 08cc6b9..726d72f 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
@@ -40,10 +40,12 @@ import org.apache.kylin.common.util.ClassUtil;
import org.apache.kylin.common.util.HBaseMetadataTestCase;
import org.apache.kylin.common.util.HadoopUtil;
import org.apache.kylin.common.util.Pair;
+import org.apache.kylin.cube.CubeDescManager;
import org.apache.kylin.cube.CubeInstance;
import org.apache.kylin.cube.CubeManager;
import org.apache.kylin.cube.CubeSegment;
import org.apache.kylin.cube.CubeUpdate;
+import org.apache.kylin.cube.model.CubeDesc;
import org.apache.kylin.engine.EngineFactory;
import org.apache.kylin.engine.mr.CubingJob;
import org.apache.kylin.job.DeployUtil;
@@ -68,9 +70,11 @@ import com.google.common.collect.Lists;
public class BuildCubeWithEngine {
private CubeManager cubeManager;
+ private CubeDescManager cubeDescManager;
private DefaultScheduler scheduler;
protected ExecutableManager jobService;
private static boolean fastBuildMode = false;
+ private static int engineType;
private static final Logger logger = LoggerFactory.getLogger(BuildCubeWithEngine.class);
@@ -110,7 +114,15 @@ public class BuildCubeWithEngine {
logger.info("Will not use fast build mode");
}
+ String specifiedEngineType = System.getProperty("engineType");
+ if (StringUtils.isNotEmpty(specifiedEngineType)) {
+ engineType = Integer.parseInt(specifiedEngineType);
+ } else {
+ engineType = 2;
+ }
+
System.setProperty(KylinConfig.KYLIN_CONF, HBaseMetadataTestCase.SANDBOX_TEST_DATA);
+ System.setProperty("SPARK_HOME", "/usr/local/spark"); // need manually create and put spark to this folder on Jenkins
if (StringUtils.isEmpty(System.getProperty("hdp.version"))) {
throw new RuntimeException("No hdp.version set; Please set hdp.version in your jvm option, for example: -Dhdp.version=2.4.0.0-169");
}
@@ -154,6 +166,7 @@ public class BuildCubeWithEngine {
}
}
+ cubeDescManager = CubeDescManager.getInstance(kylinConfig);
}
public void after() {
@@ -251,6 +264,9 @@ public class BuildCubeWithEngine {
String cubeName = "ci_left_join_cube";
clearSegment(cubeName);
+ // ci_left_join_cube has percentile which isn't supported by Spark engine now
+ // updateCubeEngineType(cubeName);
+
SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
f.setTimeZone(TimeZone.getTimeZone("GMT"));
long date1 = 0;
@@ -278,6 +294,7 @@ public class BuildCubeWithEngine {
String cubeName = "ci_inner_join_cube";
clearSegment(cubeName);
+ //updateCubeEngineType(cubeName);
SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
f.setTimeZone(TimeZone.getTimeZone("GMT"));
@@ -295,6 +312,14 @@ public class BuildCubeWithEngine {
return false;
}
+ private void updateCubeEngineType(String cubeName) throws IOException {
+ CubeDesc cubeDesc = cubeDescManager.getCubeDesc(cubeName);
+ if (cubeDesc.getEngineType() != engineType) {
+ cubeDesc.setEngineType(engineType);
+ cubeDescManager.updateCubeDesc(cubeDesc);
+ }
+ }
+
private void clearSegment(String cubeName) throws Exception {
CubeInstance cube = cubeManager.getCube(cubeName);
// remove all existing segments
[33/39] kylin git commit: minor,
use local metadata to avoid conflict when running concurrently
Posted by li...@apache.org.
minor, use local metadata to avoid conflict when running concurrently
Signed-off-by: lidongsjtu <li...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/d135bdb0
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/d135bdb0
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/d135bdb0
Branch: refs/heads/master-hbase0.98
Commit: d135bdb0eed6118245a606fb0bca996420dee70f
Parents: 8263752
Author: etherge <et...@163.com>
Authored: Wed Feb 8 18:40:07 2017 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Wed Feb 8 18:54:06 2017 +0800
----------------------------------------------------------------------
kylin-it/pom.xml | 3 +++
.../kylin/job/BaseTestDistributedScheduler.java | 14 ++++++++++----
2 files changed, 13 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/d135bdb0/kylin-it/pom.xml
----------------------------------------------------------------------
diff --git a/kylin-it/pom.xml b/kylin-it/pom.xml
index 91104ba..d58a895 100644
--- a/kylin-it/pom.xml
+++ b/kylin-it/pom.xml
@@ -36,6 +36,7 @@
<properties>
<hdp.version/>
<fastBuildMode/>
+ <buildCubeUsingProvidedData/>
<engineType/>
</properties>
@@ -316,6 +317,7 @@
<arguments>
<argument>-Dhdp.version=${hdp.version}</argument>
<argument>-DfastBuildMode=${fastBuildMode}</argument>
+ <argument>-DbuildCubeUsingProvidedData=${buildCubeUsingProvidedData}</argument>
<argument>-DengineType=${engineType}</argument>
<argument>-Dlog4j.configuration=file:${project.basedir}/..//build/conf/kylin-tools-log4j.properties</argument>
<argument>-classpath</argument>
@@ -338,6 +340,7 @@
<arguments>
<argument>-Dhdp.version=${hdp.version}</argument>
<argument>-DfastBuildMode=${fastBuildMode}</argument>
+ <argument>-DbuildCubeUsingProvidedData=${buildCubeUsingProvidedData}</argument>
<argument>-Dlog4j.configuration=file:${project.basedir}/..//build/conf/kylin-tools-log4j.properties</argument>
<argument>-classpath</argument>
<classpath/>
http://git-wip-us.apache.org/repos/asf/kylin/blob/d135bdb0/kylin-it/src/test/java/org/apache/kylin/job/BaseTestDistributedScheduler.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/job/BaseTestDistributedScheduler.java b/kylin-it/src/test/java/org/apache/kylin/job/BaseTestDistributedScheduler.java
index 1a0a39d..2f37a50 100644
--- a/kylin-it/src/test/java/org/apache/kylin/job/BaseTestDistributedScheduler.java
+++ b/kylin-it/src/test/java/org/apache/kylin/job/BaseTestDistributedScheduler.java
@@ -24,6 +24,7 @@ import java.util.Arrays;
import javax.annotation.Nullable;
+import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.curator.RetryPolicy;
import org.apache.curator.framework.CuratorFramework;
@@ -48,6 +49,7 @@ import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
+import com.google.common.io.Files;
public class BaseTestDistributedScheduler extends HBaseMetadataTestCase {
static ExecutableManager execMgr;
@@ -57,13 +59,13 @@ public class BaseTestDistributedScheduler extends HBaseMetadataTestCase {
static KylinConfig kylinConfig1;
static KylinConfig kylinConfig2;
static CuratorFramework zkClient;
+ static File localMetaDir;
static final String SEGMENT_ID = "segmentId";
static final String segmentId1 = "segmentId1";
static final String segmentId2 = "segmentId2";
static final String serverName1 = "serverName1";
static final String serverName2 = "serverName2";
- static final String confSrcPath = "../examples/test_case_data/sandbox/kylin.properties";
static final String confDstPath1 = "target/kylin_metadata_dist_lock_test1/kylin.properties";
static final String confDstPath2 = "target/kylin_metadata_dist_lock_test2/kylin.properties";
@@ -77,14 +79,17 @@ public class BaseTestDistributedScheduler extends HBaseMetadataTestCase {
new File(confDstPath1).getParentFile().mkdirs();
new File(confDstPath2).getParentFile().mkdirs();
KylinConfig srcConfig = KylinConfig.getInstanceFromEnv();
+
+ localMetaDir = Files.createTempDir();
String backup = srcConfig.getMetadataUrl();
- srcConfig.setProperty("kylin.metadata.url", "kylin_metadata_dist_lock_test@hbase");
+ srcConfig.setProperty("kylin.metadata.url", localMetaDir.getAbsolutePath());
srcConfig.writeProperties(new File(confDstPath1));
srcConfig.writeProperties(new File(confDstPath2));
srcConfig.setProperty("kylin.metadata.url", backup);
+
kylinConfig1 = KylinConfig.createInstanceFromUri(new File(confDstPath1).getAbsolutePath());
kylinConfig2 = KylinConfig.createInstanceFromUri(new File(confDstPath2).getAbsolutePath());
-
+
initZk();
if (jobLock == null)
@@ -130,7 +135,8 @@ public class BaseTestDistributedScheduler extends HBaseMetadataTestCase {
zkClient.close();
zkClient = null;
}
-
+
+ FileUtils.deleteDirectory(localMetaDir);
System.clearProperty("kylin.job.lock");
staticCleanupTestMetadata();
}
[19/39] kylin git commit: KYLIN-2426 fix hardcode path
Posted by li...@apache.org.
KYLIN-2426 fix hardcode path
Signed-off-by: lidongsjtu <li...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/19252848
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/19252848
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/19252848
Branch: refs/heads/master-hbase0.98
Commit: 19252848e27423edeb6f3ebed6dea453db1904e9
Parents: 9cd6c70
Author: etherge <et...@163.com>
Authored: Mon Feb 6 11:38:42 2017 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Mon Feb 6 13:48:38 2017 +0800
----------------------------------------------------------------------
.../storage/hdfs/ITHDFSResourceStoreTest.java | 39 +++++++++++---------
1 file changed, 21 insertions(+), 18 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/19252848/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITHDFSResourceStoreTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITHDFSResourceStoreTest.java b/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITHDFSResourceStoreTest.java
index ec12722..aa5a104 100644
--- a/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITHDFSResourceStoreTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITHDFSResourceStoreTest.java
@@ -18,6 +18,9 @@
package org.apache.kylin.storage.hdfs;
+import static junit.framework.TestCase.assertTrue;
+
+import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.kylin.common.KylinConfig;
@@ -28,18 +31,18 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
-import static junit.framework.TestCase.assertTrue;
-
public class ITHDFSResourceStoreTest extends HBaseMetadataTestCase {
KylinConfig kylinConfig;
FileSystem fs;
+ String workingDir;
@Before
public void setup() throws Exception {
this.createTestMetadata();
kylinConfig = KylinConfig.getInstanceFromEnv();
fs = HadoopUtil.getWorkingFileSystem();
+ workingDir = getHdfsWorkingDirWithoutScheme(kylinConfig);
}
@After
@@ -47,32 +50,33 @@ public class ITHDFSResourceStoreTest extends HBaseMetadataTestCase {
this.cleanupTestMetadata();
}
+ private String getHdfsWorkingDirWithoutScheme(KylinConfig kylinConfig) {
+ String hdfsWorkingDir = kylinConfig.getHdfsWorkingDirectory();
+ int thirdIndex = StringUtils.ordinalIndexOf(hdfsWorkingDir, "/", 3);
+ int fourthIndex = StringUtils.ordinalIndexOf(hdfsWorkingDir, "/", 5);
+ return hdfsWorkingDir.substring(thirdIndex, fourthIndex);
+ }
+
@Test
public void testBasic() throws Exception {
- String oldUrl = kylinConfig.getMetadataUrl();
- String path = "/kylin/kylin_metadata/metadata";
- kylinConfig.setProperty("kylin.metadata.url", path + "@hdfs");
- HDFSResourceStore store = new HDFSResourceStore(kylinConfig);
- ResourceStoreTest.testAStore(store);
- kylinConfig.setProperty("kylin.metadata.url", oldUrl);
- assertTrue(fs.exists(new Path(path)));
+ String path = workingDir + "/metadata_test1";
+ doTestWithPath(path);
}
@Test
public void testQalifiedName() throws Exception {
- String oldUrl = kylinConfig.getMetadataUrl();
- String path = "hdfs:///kylin/kylin_metadata/metadata_test1";
- kylinConfig.setProperty("kylin.metadata.url", path + "@hdfs");
- HDFSResourceStore store = new HDFSResourceStore(kylinConfig);
- ResourceStoreTest.testAStore(store);
- kylinConfig.setProperty("kylin.metadata.url", oldUrl);
- assertTrue(fs.exists(new Path(path)));
+ String path = "hdfs://" + workingDir + "/metadata_test2";
+ doTestWithPath(path);
}
@Test
public void testFullQalifiedName() throws Exception {
+ String path = fs.getUri() + workingDir + "/metadata_test3";
+ doTestWithPath(path);
+ }
+
+ private void doTestWithPath(String path) throws Exception {
String oldUrl = kylinConfig.getMetadataUrl();
- String path = "hdfs://sandbox.hortonworks.com:8020/kylin/kylin_metadata/metadata_test2";
kylinConfig.setProperty("kylin.metadata.url", path + "@hdfs");
HDFSResourceStore store = new HDFSResourceStore(kylinConfig);
ResourceStoreTest.testAStore(store);
@@ -80,5 +84,4 @@ public class ITHDFSResourceStoreTest extends HBaseMetadataTestCase {
assertTrue(fs.exists(new Path(path)));
}
-
}
[09/39] kylin git commit: KYLIN-2361 Upgrade Tomcat 8.5.9
Posted by li...@apache.org.
KYLIN-2361 Upgrade Tomcat 8.5.9
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/2b60ac6a
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/2b60ac6a
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/2b60ac6a
Branch: refs/heads/master-hbase0.98
Commit: 2b60ac6a42741ca70d63e6680a0fbe9aeed7d46e
Parents: a058bfb
Author: Billy Liu <bi...@apache.org>
Authored: Wed Jan 18 17:19:04 2017 +0800
Committer: Billy Liu <bi...@apache.org>
Committed: Sat Feb 4 11:42:02 2017 +0800
----------------------------------------------------------------------
build/script/download-tomcat.sh | 8 ++++----
pom.xml | 2 +-
.../java/org/apache/kylin/rest/DebugTomcat.java | 16 +++++++++-------
.../kylin/ext/CustomizedWebappClassloader.java | 5 ++---
4 files changed, 16 insertions(+), 15 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/2b60ac6a/build/script/download-tomcat.sh
----------------------------------------------------------------------
diff --git a/build/script/download-tomcat.sh b/build/script/download-tomcat.sh
index b3aa509..bdfe351 100755
--- a/build/script/download-tomcat.sh
+++ b/build/script/download-tomcat.sh
@@ -27,19 +27,19 @@ if [[ `uname -a` =~ "Darwin" ]]; then
alias md5cmd="md5 -q"
fi
-tomcat_pkg_version="7.0.69"
-tomcat_pkg_md5="10a071e5169a1a8b14ff35a0ad181052"
+tomcat_pkg_version="8.5.9"
+tomcat_pkg_md5="b41270a64b7774c964e4bec813eea2ed"
if [ ! -f "build/apache-tomcat-${tomcat_pkg_version}.tar.gz" ]
then
echo "no binary file found"
- wget --directory-prefix=build/ http://archive.apache.org/dist/tomcat/tomcat-7/v${tomcat_pkg_version}/bin/apache-tomcat-${tomcat_pkg_version}.tar.gz || echo "Download tomcat failed"
+ wget --directory-prefix=build/ http://archive.apache.org/dist/tomcat/tomcat-8/v${tomcat_pkg_version}/bin/apache-tomcat-${tomcat_pkg_version}.tar.gz || echo "Download tomcat failed"
else
if [ `md5cmd build/apache-tomcat-${tomcat_pkg_version}.tar.gz | awk '{print $1}'` != "${tomcat_pkg_md5}" ]
then
echo "md5 check failed"
rm build/apache-tomcat-${tomcat_pkg_version}.tar.gz
- wget --directory-prefix=build/ http://archive.apache.org/dist/tomcat/tomcat-7/v${tomcat_pkg_version}/bin/apache-tomcat-${tomcat_pkg_version}.tar.gz || echo "download tomcat failed"
+ wget --directory-prefix=build/ http://archive.apache.org/dist/tomcat/tomcat-8/v${tomcat_pkg_version}/bin/apache-tomcat-${tomcat_pkg_version}.tar.gz || echo "download tomcat failed"
fi
fi
unalias md5cmd
http://git-wip-us.apache.org/repos/asf/kylin/blob/2b60ac6a/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index bf33e07..b82eee2 100644
--- a/pom.xml
+++ b/pom.xml
@@ -101,7 +101,7 @@
<cglib.version>3.2.4</cglib.version>
<supercsv.version>2.4.0</supercsv.version>
<cors.version>2.5</cors.version>
- <tomcat.version>7.0.69</tomcat.version>
+ <tomcat.version>8.5.9</tomcat.version>
<t-digest.version>3.1</t-digest.version>
<!-- REST Service -->
http://git-wip-us.apache.org/repos/asf/kylin/blob/2b60ac6a/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java b/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
index 3461e1d..1b47f79 100644
--- a/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
+++ b/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
@@ -21,11 +21,13 @@ package org.apache.kylin.rest;
import org.apache.catalina.Context;
import org.apache.catalina.core.AprLifecycleListener;
import org.apache.catalina.core.StandardServer;
-import org.apache.catalina.deploy.ErrorPage;
import org.apache.catalina.startup.Tomcat;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.util.Shell;
import org.apache.kylin.common.KylinConfig;
+import org.apache.tomcat.JarScanFilter;
+import org.apache.tomcat.JarScanType;
+import org.apache.tomcat.util.descriptor.web.ErrorPage;
import java.io.File;
import java.lang.reflect.Field;
@@ -127,12 +129,12 @@ public class DebugTomcat {
notFound.setLocation("/index.html");
webContext.addErrorPage(notFound);
webContext.addWelcomeFile("index.html");
-// webContext.getJarScanner().setJarScanFilter(new JarScanFilter() {
-// @Override
-// public boolean check(JarScanType arg0, String arg1) {
-// return false;
-// }
-// });
+ webContext.getJarScanner().setJarScanFilter(new JarScanFilter() {
+ @Override
+ public boolean check(JarScanType arg0, String arg1) {
+ return false;
+ }
+ });
// tomcat start
tomcat.start();
http://git-wip-us.apache.org/repos/asf/kylin/blob/2b60ac6a/tomcat-ext/src/main/java/org/apache/kylin/ext/CustomizedWebappClassloader.java
----------------------------------------------------------------------
diff --git a/tomcat-ext/src/main/java/org/apache/kylin/ext/CustomizedWebappClassloader.java b/tomcat-ext/src/main/java/org/apache/kylin/ext/CustomizedWebappClassloader.java
index 816601f..f241865 100644
--- a/tomcat-ext/src/main/java/org/apache/kylin/ext/CustomizedWebappClassloader.java
+++ b/tomcat-ext/src/main/java/org/apache/kylin/ext/CustomizedWebappClassloader.java
@@ -45,7 +45,7 @@ public class CustomizedWebappClassloader extends ParallelWebappClassLoader {
* @param name class name
* @return true if the class should be filtered
*/
- protected boolean filter(String name) {
+ protected boolean filter(String name, boolean isClassName) {
if (name == null)
return false;
@@ -62,7 +62,6 @@ public class CustomizedWebappClassloader extends ParallelWebappClassLoader {
return true;
}
- //return super.filter(name, isClassName);
- return false;
+ return super.filter(name, isClassName);
}
}
[30/39] kylin git commit: KYLIN-2388 Hot load kylin config from web
Posted by li...@apache.org.
KYLIN-2388 Hot load kylin config from web
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/39afa519
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/39afa519
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/39afa519
Branch: refs/heads/master-hbase0.98
Commit: 39afa5197e197d5143843a3bb62c528749b03418
Parents: 7b860ad
Author: kangkaisen <ka...@live.com>
Authored: Thu Jan 12 14:35:59 2017 +0800
Committer: kangkaisen <ka...@163.com>
Committed: Tue Feb 7 19:57:29 2017 +0800
----------------------------------------------------------------------
.../org/apache/kylin/common/KylinConfig.java | 6 +-
.../apache/kylin/common/KylinConfigTest.java | 28 ++++-----
.../util/HotLoadKylinPropertiesTestCase.java | 64 ++++++++++++++++++++
.../kylin/cube/CubeSpecificConfigTest.java | 37 ++++++-----
.../apache/kylin/job/JobEngineConfigTest.java | 47 ++++++++++++++
.../kylin/rest/controller/CacheController.java | 9 ++-
webapp/app/js/controllers/admin.js | 26 ++++++++
webapp/app/js/services/cache.js | 3 +-
webapp/app/partials/admin/admin.html | 3 +
9 files changed, 191 insertions(+), 32 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/39afa519/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
index 0f40654..c7a18c6 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
@@ -201,7 +201,7 @@ public class KylinConfig extends KylinConfigBase {
return kylinHome + File.separator + "conf";
}
- static File getKylinPropertiesFile() {
+ public static File getKylinPropertiesFile() {
String kylinConfHome = System.getProperty(KYLIN_CONF);
if (!StringUtils.isEmpty(kylinConfHome)) {
logger.info("Use KYLIN_CONF=" + kylinConfHome);
@@ -385,4 +385,8 @@ public class KylinConfig extends KylinConfigBase {
// }
// logger.info(buf.toString());
}
+
+ public synchronized void hotLoadKylinProperties() {
+ reloadKylinConfig(getKylinProperties());
+ }
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/39afa519/core-common/src/test/java/org/apache/kylin/common/KylinConfigTest.java
----------------------------------------------------------------------
diff --git a/core-common/src/test/java/org/apache/kylin/common/KylinConfigTest.java b/core-common/src/test/java/org/apache/kylin/common/KylinConfigTest.java
index a426fc6..4d5f130 100644
--- a/core-common/src/test/java/org/apache/kylin/common/KylinConfigTest.java
+++ b/core-common/src/test/java/org/apache/kylin/common/KylinConfigTest.java
@@ -26,24 +26,12 @@ import static org.junit.Assert.assertTrue;
import java.util.Map;
-import org.apache.kylin.common.util.LocalFileMetadataTestCase;
-import org.junit.After;
-import org.junit.Before;
+import org.apache.kylin.common.util.HotLoadKylinPropertiesTestCase;
import org.junit.Test;
import com.google.common.collect.Maps;
-public class KylinConfigTest extends LocalFileMetadataTestCase {
- @Before
- public void setUp() throws Exception {
- this.createTestMetadata();
- }
-
- @After
- public void after() throws Exception {
- this.cleanupTestMetadata();
- }
-
+public class KylinConfigTest extends HotLoadKylinPropertiesTestCase{
@Test
public void testMRConfigOverride() {
KylinConfig config = KylinConfig.getInstanceFromEnv();
@@ -78,9 +66,19 @@ public class KylinConfigTest extends LocalFileMetadataTestCase {
KylinConfig config = KylinConfig.getInstanceFromEnv();
Map<String, String> override = Maps.newHashMap();
KylinConfig configExt = KylinConfigExt.createInstance(config, override);
-
assertTrue(config.properties == configExt.properties);
config.setProperty("1234", "1234");
assertEquals("1234", configExt.getOptional("1234"));
}
+
+ @Test
+ public void testPropertiesHotLoad() {
+ KylinConfig config = KylinConfig.getInstanceFromEnv();
+ assertEquals("whoami@kylin.apache.org", config.getKylinOwner());
+
+ updateProperty("kylin.storage.hbase.owner-tag", "kylin@kylin.apache.org");
+ KylinConfig.getInstanceFromEnv().hotLoadKylinProperties();
+
+ assertEquals("kylin@kylin.apache.org", config.getKylinOwner());
+ }
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/39afa519/core-common/src/test/java/org/apache/kylin/common/util/HotLoadKylinPropertiesTestCase.java
----------------------------------------------------------------------
diff --git a/core-common/src/test/java/org/apache/kylin/common/util/HotLoadKylinPropertiesTestCase.java b/core-common/src/test/java/org/apache/kylin/common/util/HotLoadKylinPropertiesTestCase.java
new file mode 100644
index 0000000..9f5b278
--- /dev/null
+++ b/core-common/src/test/java/org/apache/kylin/common/util/HotLoadKylinPropertiesTestCase.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.common.util;
+
+import org.apache.kylin.common.KylinConfig;
+import org.junit.After;
+import org.junit.Before;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.util.Properties;
+
+/**
+ * @author kangkaisen
+ */
+
+public class HotLoadKylinPropertiesTestCase extends LocalFileMetadataTestCase {
+ @Before
+ public void setUp() throws Exception {
+ this.createTestMetadata();
+ }
+
+ @After
+ public void after() throws Exception {
+ this.cleanupTestMetadata();
+ }
+
+ protected void updateProperty(String key, String value) {
+ File propFile = KylinConfig.getKylinPropertiesFile();
+ Properties conf = new Properties();
+
+ //load
+ try (FileInputStream is = new FileInputStream(propFile)) {
+ conf.load(is);
+ conf.setProperty(key, value);
+ } catch (Exception e) {
+ System.err.println(e.getMessage());
+ }
+
+ //store
+ try (FileOutputStream out = new FileOutputStream(propFile)) {
+ conf.store(out, null);
+ } catch (Exception e) {
+ System.err.println(e.getMessage());
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/kylin/blob/39afa519/core-cube/src/test/java/org/apache/kylin/cube/CubeSpecificConfigTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/CubeSpecificConfigTest.java b/core-cube/src/test/java/org/apache/kylin/cube/CubeSpecificConfigTest.java
index c61f07f..17c02cc 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/CubeSpecificConfigTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/CubeSpecificConfigTest.java
@@ -21,24 +21,13 @@ package org.apache.kylin.cube;
import static org.junit.Assert.assertEquals;
import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.LocalFileMetadataTestCase;
+import org.apache.kylin.common.util.HotLoadKylinPropertiesTestCase;
import org.apache.kylin.cube.model.CubeDesc;
-import org.junit.After;
-import org.junit.Before;
import org.junit.Test;
-public class CubeSpecificConfigTest extends LocalFileMetadataTestCase {
-
- @Before
- public void setUp() throws Exception {
- this.createTestMetadata();
- }
-
- @After
- public void after() throws Exception {
- this.cleanupTestMetadata();
- }
+import java.io.IOException;
+public class CubeSpecificConfigTest extends HotLoadKylinPropertiesTestCase {
@Test
public void test() {
KylinConfig baseConfig = KylinConfig.getInstanceFromEnv();
@@ -57,4 +46,24 @@ public class CubeSpecificConfigTest extends LocalFileMetadataTestCase {
assertEquals("snappy", base.getHbaseDefaultCompressionCodec());
assertEquals("lz4", override.getHbaseDefaultCompressionCodec());
}
+
+ @Test
+ public void testPropertiesHotLoad() throws IOException {
+ KylinConfig baseConfig = KylinConfig.getInstanceFromEnv();
+ KylinConfig oldCubeDescConfig = CubeDescManager.getInstance(baseConfig).getCubeDesc("ssb").getConfig();
+ assertEquals(10, oldCubeDescConfig.getMaxConcurrentJobLimit());
+
+ //hot load Properties
+ updateProperty("kylin.job.max-concurrent-jobs", "20");
+ KylinConfig.getInstanceFromEnv().hotLoadKylinProperties();
+ CubeDescManager.getInstance(baseConfig).reloadCubeDescLocal("ssb");
+
+ //test cubeDescConfig
+ KylinConfig newCubeDescConfig = CubeDescManager.getInstance(baseConfig).getCubeDesc("ssb").getConfig();
+ assertEquals(20, newCubeDescConfig.getMaxConcurrentJobLimit());
+
+ //test cubeConfig
+ KylinConfig newCubeConfig = CubeManager.getInstance(baseConfig).getCube("ssb").getConfig();
+ assertEquals(20, newCubeConfig.getMaxConcurrentJobLimit());
+ }
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/39afa519/core-job/src/test/java/org/apache/kylin/job/JobEngineConfigTest.java
----------------------------------------------------------------------
diff --git a/core-job/src/test/java/org/apache/kylin/job/JobEngineConfigTest.java b/core-job/src/test/java/org/apache/kylin/job/JobEngineConfigTest.java
new file mode 100644
index 0000000..77914ef
--- /dev/null
+++ b/core-job/src/test/java/org/apache/kylin/job/JobEngineConfigTest.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.HotLoadKylinPropertiesTestCase;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.junit.Test;
+
+import java.io.IOException;
+
+import static org.junit.Assert.assertEquals;
+
+/**
+ * @author kangkaisen
+ */
+
+public class JobEngineConfigTest extends HotLoadKylinPropertiesTestCase {
+
+ @Test
+ public void testPropertiesHotLoad() throws IOException {
+ KylinConfig baseConfig = KylinConfig.getInstanceFromEnv();
+ JobEngineConfig jobEngineConfig = new JobEngineConfig(baseConfig);
+ assertEquals(10, jobEngineConfig.getMaxConcurrentJobLimit());
+
+ updateProperty("kylin.job.max-concurrent-jobs", "20");
+ KylinConfig.getInstanceFromEnv().hotLoadKylinProperties();
+
+ assertEquals(20, jobEngineConfig.getMaxConcurrentJobLimit());
+ }
+}
http://git-wip-us.apache.org/repos/asf/kylin/blob/39afa519/server-base/src/main/java/org/apache/kylin/rest/controller/CacheController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/CacheController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/CacheController.java
index 254aabf..8d5f00e 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/CacheController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/CacheController.java
@@ -20,6 +20,7 @@ package org.apache.kylin.rest.controller;
import java.io.IOException;
+import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.metadata.cachesync.Broadcaster;
import org.apache.kylin.rest.service.CacheService;
import org.slf4j.Logger;
@@ -39,7 +40,7 @@ import org.springframework.web.bind.annotation.ResponseBody;
@Controller
@RequestMapping(value = "/cache")
public class CacheController extends BasicController {
-
+
@SuppressWarnings("unused")
private static final Logger logger = LoggerFactory.getLogger(CacheController.class);
@@ -64,6 +65,12 @@ public class CacheController extends BasicController {
cacheService.notifyMetadataChange(entity, Broadcaster.Event.getEvent(event), cacheKey);
}
+ @RequestMapping(value = "/announce/config", method = { RequestMethod.POST })
+ public void hotLoadKylinConfig() throws IOException {
+ KylinConfig.getInstanceFromEnv().hotLoadKylinProperties();
+ cacheService.notifyMetadataChange(Broadcaster.SYNC_ALL, Broadcaster.Event.UPDATE, Broadcaster.SYNC_ALL);
+ }
+
public void setCacheService(CacheService cacheService) {
this.cacheService = cacheService;
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/39afa519/webapp/app/js/controllers/admin.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/controllers/admin.js b/webapp/app/js/controllers/admin.js
index 0d36e0d..783ab17 100644
--- a/webapp/app/js/controllers/admin.js
+++ b/webapp/app/js/controllers/admin.js
@@ -58,6 +58,32 @@ KylinApp.controller('AdminCtrl', function ($scope, AdminService, CacheService, T
});
}
+ $scope.reloadConfig = function () {
+ SweetAlert.swal({
+ title: '',
+ text: 'Are you sure to reload config',
+ type: '',
+ showCancelButton: true,
+ confirmButtonColor: '#DD6B55',
+ confirmButtonText: "Yes",
+ closeOnConfirm: true
+ }, function (isConfirm) {
+ if (isConfirm) {
+ CacheService.reloadConfig({}, function () {
+ SweetAlert.swal('Success!', 'config reload successfully', 'success');
+ }, function (e) {
+ if (e.data && e.data.exception) {
+ var message = e.data.exception;
+ var msg = !!(message) ? message : 'Failed to take action.';
+ SweetAlert.swal('Oops...', msg, 'error');
+ } else {
+ SweetAlert.swal('Oops...', "Failed to take action.", 'error');
+ }
+ });
+ }
+ });
+ }
+
$scope.reloadMeta = function () {
SweetAlert.swal({
title: '',
http://git-wip-us.apache.org/repos/asf/kylin/blob/39afa519/webapp/app/js/services/cache.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/services/cache.js b/webapp/app/js/services/cache.js
index bcec603..38dc0b8 100644
--- a/webapp/app/js/services/cache.js
+++ b/webapp/app/js/services/cache.js
@@ -18,6 +18,7 @@
KylinApp.factory('CacheService', ['$resource', function ($resource, config) {
return $resource(Config.service.url + 'cache/announce/:type/:name/:action', {}, {
- clean: {method: 'PUT', params: {type: 'all', name: 'all', action: 'update'}, isArray: false}
+ clean: {method: 'PUT', params: {type: 'all', name: 'all', action: 'update'}, isArray: false},
+ reloadConfig: {method: 'POST', params: {type: 'config'}, isArray: false}
});
}]);
http://git-wip-us.apache.org/repos/asf/kylin/blob/39afa519/webapp/app/partials/admin/admin.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/admin/admin.html b/webapp/app/partials/admin/admin.html
index f2be3d9..b4bca8d 100644
--- a/webapp/app/partials/admin/admin.html
+++ b/webapp/app/partials/admin/admin.html
@@ -58,6 +58,9 @@
<a class="btn btn-primary btn-lg btn-block" tooltip="Update Server Config" class="btn btn-primary btn-lg" ng-click="toSetConfig()">Set Config</a>
</div>
<div style="padding-top: 10px;width: 260px;">
+ <a class="btn btn-primary btn-lg btn-block" tooltip="Reload Server Config" class="btn btn-primary btn-lg" ng-click="reloadConfig()">Reload Config</a>
+ </div>
+ <div style="padding-top: 10px;width: 260px;">
<a ng-click="downloadBadQueryFiles();" tooltip="Download Diagnosis Info For Current Project" class="btn btn-primary btn-lg btn-block"><i class="fa fa-ambulance"></i> Diagnosis</a>
</div>
[39/39] kylin git commit: KYLIN-2307 Create a branch for master with
HBase 0.98 API
Posted by li...@apache.org.
KYLIN-2307 Create a branch for master with HBase 0.98 API
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/4e41c363
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/4e41c363
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/4e41c363
Branch: refs/heads/master-hbase0.98
Commit: 4e41c36370eb458307998acd09e140667d33b7ea
Parents: 722efb8
Author: lidongsjtu <li...@apache.org>
Authored: Mon Jan 23 13:17:37 2017 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Thu Feb 9 10:23:07 2017 +0800
----------------------------------------------------------------------
dev-support/test_all_against_hdp_2_2_4_2_2.sh | 25 ++++
dev-support/test_all_against_hdp_2_4_0_0_169.sh | 25 ----
.../sandbox/capacity-scheduler.xml | 17 ++-
examples/test_case_data/sandbox/core-site.xml | 28 +---
examples/test_case_data/sandbox/hbase-site.xml | 119 +++++------------
examples/test_case_data/sandbox/hdfs-site.xml | 84 +++++-------
examples/test_case_data/sandbox/hive-site.xml | 89 +++++--------
examples/test_case_data/sandbox/mapred-site.xml | 57 +++------
examples/test_case_data/sandbox/yarn-site.xml | 127 +++----------------
.../kylin/provision/BuildCubeWithEngine.java | 17 +--
pom.xml | 122 +-----------------
.../kylin/rest/security/AclHBaseStorage.java | 4 +-
.../rest/security/MockAclHBaseStorage.java | 8 +-
.../apache/kylin/rest/security/MockHTable.java | 95 +++++++++++---
.../rest/security/RealAclHBaseStorage.java | 9 +-
.../apache/kylin/rest/service/AclService.java | 25 ++--
.../apache/kylin/rest/service/CubeService.java | 35 +++--
.../apache/kylin/rest/service/QueryService.java | 24 ++--
.../apache/kylin/rest/service/UserService.java | 17 +--
.../kylin/storage/hbase/HBaseConnection.java | 44 +++----
.../kylin/storage/hbase/HBaseResourceStore.java | 31 +++--
.../storage/hbase/cube/SimpleHBaseStore.java | 20 +--
.../hbase/cube/v2/CubeHBaseEndpointRPC.java | 13 +-
.../storage/hbase/cube/v2/CubeHBaseScanRPC.java | 9 +-
.../coprocessor/endpoint/CubeVisitService.java | 4 +-
.../storage/hbase/steps/CubeHTableUtil.java | 16 +--
.../storage/hbase/steps/DeprecatedGCStep.java | 24 ++--
.../storage/hbase/steps/HBaseCuboidWriter.java | 7 +-
.../kylin/storage/hbase/steps/MergeGCStep.java | 23 ++--
.../storage/hbase/util/CleanHtableCLI.java | 12 +-
.../storage/hbase/util/CubeMigrationCLI.java | 37 +++---
.../hbase/util/CubeMigrationCheckCLI.java | 17 +--
.../hbase/util/DeployCoprocessorCLI.java | 27 ++--
.../hbase/util/ExtendCubeToHybridCLI.java | 8 +-
.../hbase/util/GridTableHBaseBenchmark.java | 34 ++---
.../kylin/storage/hbase/util/HBaseClean.java | 18 ++-
.../hbase/util/HBaseRegionSizeCalculator.java | 35 +++--
.../kylin/storage/hbase/util/HBaseUsage.java | 9 +-
.../storage/hbase/util/HbaseStreamingInput.java | 30 ++---
.../hbase/util/HtableAlterMetadataCLI.java | 9 +-
.../storage/hbase/util/OrphanHBaseCleanJob.java | 19 +--
.../kylin/storage/hbase/util/PingHBaseCLI.java | 15 +--
.../kylin/storage/hbase/util/RowCounterCLI.java | 11 +-
.../storage/hbase/util/StorageCleanupJob.java | 20 ++-
.../storage/hbase/util/UpdateHTableHostCLI.java | 17 +--
tool/pom.xml | 10 --
.../org/apache/kylin/tool/CubeMigrationCLI.java | 19 +--
.../kylin/tool/ExtendCubeToHybridCLI.java | 8 +-
48 files changed, 596 insertions(+), 877 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/dev-support/test_all_against_hdp_2_2_4_2_2.sh
----------------------------------------------------------------------
diff --git a/dev-support/test_all_against_hdp_2_2_4_2_2.sh b/dev-support/test_all_against_hdp_2_2_4_2_2.sh
new file mode 100755
index 0000000..f7780dd
--- /dev/null
+++ b/dev-support/test_all_against_hdp_2_2_4_2_2.sh
@@ -0,0 +1,25 @@
+#!/bin/bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+dir=$(dirname ${0})
+cd ${dir}
+cd ..
+
+mvn clean install -DskipTests 2>&1 | tee mci.log
+mvn verify -Dhdp.version=${HDP_VERSION:-"2.2.4.2-2"} -fae 2>&1 | tee mvnverify.log
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/dev-support/test_all_against_hdp_2_4_0_0_169.sh
----------------------------------------------------------------------
diff --git a/dev-support/test_all_against_hdp_2_4_0_0_169.sh b/dev-support/test_all_against_hdp_2_4_0_0_169.sh
deleted file mode 100755
index 2a3d24b..0000000
--- a/dev-support/test_all_against_hdp_2_4_0_0_169.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-dir=$(dirname ${0})
-cd ${dir}
-cd ..
-
-mvn clean install -DskipTests 2>&1 | tee mci.log
-mvn verify -Dhdp.version=${HDP_VERSION:-"2.4.0.0-169"} -fae 2>&1 | tee mvnverify.log
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/examples/test_case_data/sandbox/capacity-scheduler.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/capacity-scheduler.xml b/examples/test_case_data/sandbox/capacity-scheduler.xml
index e042aa5..7cb985c 100644
--- a/examples/test_case_data/sandbox/capacity-scheduler.xml
+++ b/examples/test_case_data/sandbox/capacity-scheduler.xml
@@ -47,6 +47,16 @@
</property>
<property>
+ <name>yarn.scheduler.capacity.root.accessible-node-labels.default.capacity</name>
+ <value>-1</value>
+ </property>
+
+ <property>
+ <name>yarn.scheduler.capacity.root.accessible-node-labels.default.maximum-capacity</name>
+ <value>-1</value>
+ </property>
+
+ <property>
<name>yarn.scheduler.capacity.root.acl_administer_queue</name>
<value>*</value>
</property>
@@ -57,6 +67,11 @@
</property>
<property>
+ <name>yarn.scheduler.capacity.root.default-node-label-expression</name>
+ <value></value>
+ </property>
+
+ <property>
<name>yarn.scheduler.capacity.root.default.acl_administer_jobs</name>
<value>*</value>
</property>
@@ -96,4 +111,4 @@
<value>default</value>
</property>
-</configuration>
\ No newline at end of file
+</configuration>
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/examples/test_case_data/sandbox/core-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/core-site.xml b/examples/test_case_data/sandbox/core-site.xml
index a4ad5c6..0c5f62b 100644
--- a/examples/test_case_data/sandbox/core-site.xml
+++ b/examples/test_case_data/sandbox/core-site.xml
@@ -19,6 +19,7 @@
<property>
<name>fs.defaultFS</name>
<value>hdfs://sandbox.hortonworks.com:8020</value>
+ <final>true</final>
</property>
<property>
@@ -38,7 +39,7 @@
<property>
<name>hadoop.proxyuser.falcon.groups</name>
- <value>*</value>
+ <value>users</value>
</property>
<property>
@@ -48,7 +49,7 @@
<property>
<name>hadoop.proxyuser.hbase.groups</name>
- <value>*</value>
+ <value>users</value>
</property>
<property>
@@ -67,23 +68,13 @@
</property>
<property>
- <name>hadoop.proxyuser.hdfs.groups</name>
- <value>*</value>
- </property>
-
- <property>
- <name>hadoop.proxyuser.hdfs.hosts</name>
- <value>*</value>
- </property>
-
- <property>
<name>hadoop.proxyuser.hive.groups</name>
- <value>*</value>
+ <value>users</value>
</property>
<property>
<name>hadoop.proxyuser.hive.hosts</name>
- <value>sandbox.hortonworks.com</value>
+ <value>*</value>
</property>
<property>
@@ -132,15 +123,8 @@
</property>
<property>
- <name>hadoop.security.key.provider.path</name>
- <value></value>
- </property>
-
- <property>
<name>io.compression.codecs</name>
- <value>
- org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec
- </value>
+ <value>org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec</value>
</property>
<property>
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/examples/test_case_data/sandbox/hbase-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/hbase-site.xml b/examples/test_case_data/sandbox/hbase-site.xml
index 568de2e..46d5345 100644
--- a/examples/test_case_data/sandbox/hbase-site.xml
+++ b/examples/test_case_data/sandbox/hbase-site.xml
@@ -22,33 +22,8 @@
</property>
<property>
- <name>hbase.bucketcache.ioengine</name>
- <value></value>
- </property>
-
- <property>
- <name>hbase.bucketcache.percentage.in.combinedcache</name>
- <value></value>
- </property>
-
- <property>
- <name>hbase.bucketcache.size</name>
- <value></value>
- </property>
-
- <property>
- <name>hbase.bulkload.staging.dir</name>
- <value>/apps/hbase/staging</value>
- </property>
-
- <property>
<name>hbase.client.keyvalue.maxsize</name>
- <value>1048576</value>
- </property>
-
- <property>
- <name>hbase.client.retries.number</name>
- <value>35</value>
+ <value>10485760</value>
</property>
<property>
@@ -63,19 +38,12 @@
<property>
<name>hbase.coprocessor.master.classes</name>
- <value>org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor</value>
+ <value>com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor</value>
</property>
<property>
<name>hbase.coprocessor.region.classes</name>
- <value>
- org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor
- </value>
- </property>
-
- <property>
- <name>hbase.coprocessor.regionserver.classes</name>
- <value></value>
+ <value>com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor</value>
</property>
<property>
@@ -119,11 +87,6 @@
</property>
<property>
- <name>hbase.hstore.compaction.max</name>
- <value>10</value>
- </property>
-
- <property>
<name>hbase.hstore.compactionThreshold</name>
<value>3</value>
</property>
@@ -140,42 +103,32 @@
<property>
<name>hbase.master.info.port</name>
- <value>16010</value>
+ <value>60010</value>
</property>
<property>
<name>hbase.master.port</name>
- <value>16000</value>
+ <value>60000</value>
</property>
<property>
- <name>hbase.region.server.rpc.scheduler.factory.class</name>
- <value></value>
+ <name>hbase.regionserver.global.memstore.lowerLimit</name>
+ <value>0.38</value>
</property>
<property>
- <name>hbase.regionserver.global.memstore.size</name>
+ <name>hbase.regionserver.global.memstore.upperLimit</name>
<value>0.4</value>
</property>
<property>
<name>hbase.regionserver.handler.count</name>
- <value>30</value>
+ <value>60</value>
</property>
<property>
<name>hbase.regionserver.info.port</name>
- <value>16030</value>
- </property>
-
- <property>
- <name>hbase.regionserver.port</name>
- <value>16020</value>
- </property>
-
- <property>
- <name>hbase.regionserver.wal.codec</name>
- <value>org.apache.hadoop.hbase.regionserver.wal.WALCellCodec</value>
+ <value>60030</value>
</property>
<property>
@@ -184,26 +137,11 @@
</property>
<property>
- <name>hbase.rpc.controllerfactory.class</name>
- <value></value>
- </property>
-
- <property>
- <name>hbase.rpc.engine</name>
- <value>org.apache.hadoop.hbase.ipc.SecureRpcEngine</value>
- </property>
-
- <property>
<name>hbase.rpc.protection</name>
<value>PRIVACY</value>
</property>
<property>
- <name>hbase.rpc.timeout</name>
- <value>90000</value>
- </property>
-
- <property>
<name>hbase.security.authentication</name>
<value>simple</value>
</property>
@@ -220,7 +158,7 @@
<property>
<name>hbase.tmp.dir</name>
- <value>/tmp/hbase-${user.name}</value>
+ <value>/hadoop/hbase</value>
</property>
<property>
@@ -240,27 +178,34 @@
<property>
<name>hfile.block.cache.size</name>
- <value>0.4</value>
- </property>
-
- <property>
- <name>phoenix.functions.allowUserDefinedFunctions</name>
- <value></value>
- </property>
-
- <property>
- <name>phoenix.query.timeoutMs</name>
- <value>60000</value>
+ <value>0.40</value>
</property>
<property>
<name>zookeeper.session.timeout</name>
- <value>60000</value>
+ <value>30000</value>
</property>
<property>
<name>zookeeper.znode.parent</name>
<value>/hbase-unsecure</value>
</property>
-
-</configuration>
\ No newline at end of file
+ <property>
+ <name>hbase.client.pause</name>
+ <value>100</value>
+ <description>General client pause value. Used mostly as value to wait
+ before running a retry of a failed get, region lookup, etc.
+ See hbase.client.retries.number for description of how we backoff from
+ this initial pause amount and how this pause works w/ retries.</description>
+ </property>
+ <property>
+ <name>hbase.client.retries.number</name>
+ <value>5</value>
+ <description>Maximum retries. Used as maximum for all retryable
+ operations such as the getting of a cell's value, starting a row update,
+ etc. Retry interval is a rough function based on hbase.client.pause. At
+ first we retry at this interval but then with backoff, we pretty quickly reach
+ retrying every ten seconds. See HConstants#RETRY_BACKOFF for how the backup
+ ramps up. Change this setting and hbase.client.pause to suit your workload.</description>
+ </property>
+</configuration>
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/examples/test_case_data/sandbox/hdfs-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/hdfs-site.xml b/examples/test_case_data/sandbox/hdfs-site.xml
index 1d9040a..1175fff 100644
--- a/examples/test_case_data/sandbox/hdfs-site.xml
+++ b/examples/test_case_data/sandbox/hdfs-site.xml
@@ -18,7 +18,12 @@
<property>
<name>dfs.block.access.token.enable</name>
- <value>true</value>
+ <value>false</value>
+ </property>
+
+ <property>
+ <name>dfs.block.size</name>
+ <value>34217472</value>
</property>
<property>
@@ -42,21 +47,11 @@
</property>
<property>
- <name>dfs.client.retry.policy.enabled</name>
- <value>false</value>
- </property>
-
- <property>
<name>dfs.cluster.administrators</name>
<value>hdfs</value>
</property>
<property>
- <name>dfs.content-summary.limit</name>
- <value>5000</value>
- </property>
-
- <property>
<name>dfs.datanode.address</name>
<value>0.0.0.0:50010</value>
</property>
@@ -69,6 +64,7 @@
<property>
<name>dfs.datanode.data.dir</name>
<value>/hadoop/hdfs/data</value>
+ <final>true</final>
</property>
<property>
@@ -84,6 +80,7 @@
<property>
<name>dfs.datanode.failed.volumes.tolerated</name>
<value>0</value>
+ <final>true</final>
</property>
<property>
@@ -107,18 +104,13 @@
</property>
<property>
- <name>dfs.domain.socket.path</name>
- <value>/var/lib/hadoop-hdfs/dn_socket</value>
- </property>
-
- <property>
- <name>dfs.encrypt.data.transfer.cipher.suites</name>
- <value>AES/CTR/NoPadding</value>
+ <name>dfs.datanode.max.xcievers</name>
+ <value>1024</value>
</property>
<property>
- <name>dfs.encryption.key.provider.uri</name>
- <value></value>
+ <name>dfs.domain.socket.path</name>
+ <value>/var/lib/hadoop-hdfs/dn_socket</value>
</property>
<property>
@@ -158,12 +150,7 @@
<property>
<name>dfs.namenode.accesstime.precision</name>
- <value>0</value>
- </property>
-
- <property>
- <name>dfs.namenode.audit.log.async</name>
- <value>true</value>
+ <value>3600000</value>
</property>
<property>
@@ -197,11 +184,6 @@
</property>
<property>
- <name>dfs.namenode.fslock.fair</name>
- <value>false</value>
- </property>
-
- <property>
<name>dfs.namenode.handler.count</name>
<value>100</value>
</property>
@@ -209,6 +191,7 @@
<property>
<name>dfs.namenode.http-address</name>
<value>sandbox.hortonworks.com:50070</value>
+ <final>true</final>
</property>
<property>
@@ -217,13 +200,9 @@
</property>
<property>
- <name>dfs.namenode.inode.attributes.provider.class</name>
- <value>org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer</value>
- </property>
-
- <property>
<name>dfs.namenode.name.dir</name>
<value>/hadoop/hdfs/namenode</value>
+ <final>true</final>
</property>
<property>
@@ -232,13 +211,8 @@
</property>
<property>
- <name>dfs.namenode.rpc-address</name>
- <value>sandbox.hortonworks.com:8020</value>
- </property>
-
- <property>
<name>dfs.namenode.safemode.threshold-pct</name>
- <value>0.999</value>
+ <value>1.0f</value>
</property>
<property>
@@ -262,6 +236,16 @@
</property>
<property>
+ <name>dfs.nfs.exports.allowed.hosts</name>
+ <value>* rw</value>
+ </property>
+
+ <property>
+ <name>dfs.nfs3.dump.dir</name>
+ <value>/tmp/.hdfs-nfs</value>
+ </property>
+
+ <property>
<name>dfs.permissions.enabled</name>
<value>true</value>
</property>
@@ -273,7 +257,7 @@
<property>
<name>dfs.replication</name>
- <value>3</value>
+ <value>1</value>
</property>
<property>
@@ -284,11 +268,13 @@
<property>
<name>dfs.support.append</name>
<value>true</value>
+ <final>true</final>
</property>
<property>
<name>dfs.webhdfs.enabled</name>
<value>true</value>
+ <final>true</final>
</property>
<property>
@@ -296,14 +282,4 @@
<value>022</value>
</property>
- <property>
- <name>nfs.exports.allowed.hosts</name>
- <value>* rw</value>
- </property>
-
- <property>
- <name>nfs.file.dump.dir</name>
- <value>/tmp/.hdfs-nfs</value>
- </property>
-
-</configuration>
\ No newline at end of file
+</configuration>
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/examples/test_case_data/sandbox/hive-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/hive-site.xml b/examples/test_case_data/sandbox/hive-site.xml
index a8c210e..1e78107 100644
--- a/examples/test_case_data/sandbox/hive-site.xml
+++ b/examples/test_case_data/sandbox/hive-site.xml
@@ -22,46 +22,11 @@
</property>
<property>
- <name>atlas.cluster.name</name>
- <value>Sandbox</value>
- </property>
-
- <property>
- <name>atlas.hook.hive.maxThreads</name>
- <value>1</value>
- </property>
-
- <property>
- <name>atlas.hook.hive.minThreads</name>
- <value>1</value>
- </property>
-
- <property>
- <name>atlas.hook.hive.synchronous</name>
- <value>true</value>
- </property>
-
- <property>
- <name>atlas.rest.address</name>
- <value>http://sandbox.hortonworks.com:21000</value>
- </property>
-
- <property>
- <name>datanucleus.autoCreateSchema</name>
- <value>false</value>
- </property>
-
- <property>
<name>datanucleus.cache.level2.type</name>
<value>none</value>
</property>
<property>
- <name>datanucleus.fixedDatastore</name>
- <value>true</value>
- </property>
-
- <property>
<name>hive.auto.convert.join</name>
<value>true</value>
</property>
@@ -73,7 +38,7 @@
<property>
<name>hive.auto.convert.join.noconditionaltask.size</name>
- <value>357913941</value>
+ <value>1000000000</value>
</property>
<property>
@@ -162,16 +127,6 @@
</property>
<property>
- <name>hive.default.fileformat</name>
- <value>TextFile</value>
- </property>
-
- <property>
- <name>hive.default.fileformat.managed</name>
- <value>TextFile</value>
- </property>
-
- <property>
<name>hive.enforce.bucketing</name>
<value>true</value>
</property>
@@ -207,6 +162,11 @@
</property>
<property>
+ <name>hive.exec.failure.hooks</name>
+ <value>org.apache.hadoop.hive.ql.hooks.ATSHook</value>
+ </property>
+
+ <property>
<name>hive.exec.max.created.files</name>
<value>100000</value>
</property>
@@ -237,11 +197,6 @@
</property>
<property>
- <name>hive.exec.orc.encoding.strategy</name>
- <value>SPEED</value>
- </property>
-
- <property>
<name>hive.exec.parallel</name>
<value>false</value>
</property>
@@ -252,6 +207,16 @@
</property>
<property>
+ <name>hive.exec.post.hooks</name>
+ <value>org.apache.hadoop.hive.ql.hooks.ATSHook</value>
+ </property>
+
+ <property>
+ <name>hive.exec.pre.hooks</name>
+ <value>org.apache.hadoop.hive.ql.hooks.ATSHook</value>
+ </property>
+
+ <property>
<name>hive.exec.reducers.bytes.per.reducer</name>
<value>67108864</value>
</property>
@@ -297,6 +262,11 @@
</property>
<property>
+ <name>hive.heapsize</name>
+ <value>250</value>
+ </property>
+
+ <property>
<name>hive.limit.optimize.enable</name>
<value>true</value>
</property>
@@ -508,7 +478,7 @@
<property>
<name>hive.prewarm.numcontainers</name>
- <value>3</value>
+ <value>10</value>
</property>
<property>
@@ -518,7 +488,7 @@
<property>
<name>hive.security.authorization.enabled</name>
- <value>true</value>
+ <value>false</value>
</property>
<property>
@@ -538,7 +508,7 @@
<property>
<name>hive.security.metastore.authorization.manager</name>
- <value>org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider</value>
+ <value>org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider,org.apache.hadoop.hive.ql.security.authorization.MetaStoreAuthzAPIAuthorizerEmbedOnly</value>
</property>
<property>
@@ -563,7 +533,12 @@
<property>
<name>hive.server2.enable.doAs</name>
- <value>false</value>
+ <value>true</value>
+ </property>
+
+ <property>
+ <name>hive.server2.enable.impersonation</name>
+ <value>true</value>
</property>
<property>
@@ -573,7 +548,7 @@
<property>
<name>hive.server2.logging.operation.log.location</name>
- <value>/tmp/hive/operation_logs</value>
+ <value>${system:java.io.tmpdir}/${system:user.name}/operation_logs</value>
</property>
<property>
@@ -678,7 +653,7 @@
<property>
<name>hive.tez.container.size</name>
- <value>1024</value>
+ <value>250</value>
</property>
<property>
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/examples/test_case_data/sandbox/mapred-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/mapred-site.xml b/examples/test_case_data/sandbox/mapred-site.xml
index be470f9..e90f594 100644
--- a/examples/test_case_data/sandbox/mapred-site.xml
+++ b/examples/test_case_data/sandbox/mapred-site.xml
@@ -18,7 +18,7 @@
<property>
<name>io.sort.mb</name>
- <value>64</value>
+ <value>128</value>
</property>
<property>
@@ -27,13 +27,13 @@
</property>
<property>
- <name>mapred.job.map.memory.mb</name>
- <value>250</value>
+ <name>mapreduce.map.memory.mb</name>
+ <value>512</value>
</property>
<property>
- <name>mapred.job.reduce.memory.mb</name>
- <value>250</value>
+ <name>mapreduce.reduce.memory.mb</name>
+ <value>512</value>
</property>
<property>
@@ -48,9 +48,7 @@
<property>
<name>mapreduce.admin.user.env</name>
- <value>
- LD_LIBRARY_PATH=/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-amd64-64
- </value>
+ <value>LD_LIBRARY_PATH=/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-amd64-64</value>
</property>
<property>
@@ -60,9 +58,7 @@
<property>
<name>mapreduce.application.classpath</name>
- <value>
- $PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/${hdp.version}/hadoop/lib/hadoop-lzo-0.6.0.${hdp.version}.jar:/etc/hadoop/conf/secure
- </value>
+ <value>/tmp/kylin/*,$HADOOP_CONF_DIR,/usr/hdp/${hdp.version}/hbase/lib/hbase-common.jar,/usr/hdp/current/hive-client/conf/,$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:/usr/hdp/${hdp.version}/hadoop/lib/hadoop-lzo-0.6.0.${hdp.version}.jar:/usr/hdp/${hdp.version}/hadoop/lib/snappy-java-1.0.4.1.jar:/etc/hadoop/conf/secure</value>
</property>
<property>
@@ -81,18 +77,14 @@
</property>
<property>
- <name>mapreduce.job.counters.max</name>
- <value>130</value>
- </property>
-
- <property>
<name>mapreduce.job.emit-timeline-data</name>
<value>false</value>
</property>
+ <!--the default value on hdp is 0.05, however for test environments we need to be conservative on resource -->
<property>
<name>mapreduce.job.reduce.slowstart.completedmaps</name>
- <value>0.05</value>
+ <value>1</value>
</property>
<property>
@@ -116,28 +108,13 @@
</property>
<property>
- <name>mapreduce.jobhistory.recovery.enable</name>
- <value>true</value>
- </property>
-
- <property>
- <name>mapreduce.jobhistory.recovery.store.class</name>
- <value>org.apache.hadoop.mapreduce.v2.hs.HistoryServerLeveldbStateStoreService</value>
- </property>
-
- <property>
- <name>mapreduce.jobhistory.recovery.store.leveldb.path</name>
- <value>/hadoop/mapreduce/jhs</value>
- </property>
-
- <property>
<name>mapreduce.jobhistory.webapp.address</name>
<value>sandbox.hortonworks.com:19888</value>
</property>
<property>
<name>mapreduce.map.java.opts</name>
- <value>-Xmx1228m</value>
+ <value>-Xmx512m</value>
</property>
<property>
@@ -147,7 +124,7 @@
<property>
<name>mapreduce.map.memory.mb</name>
- <value>1536</value>
+ <value>512</value>
</property>
<property>
@@ -182,7 +159,7 @@
<property>
<name>mapreduce.reduce.java.opts</name>
- <value>-Xmx1638m</value>
+ <value>-Xmx200m</value>
</property>
<property>
@@ -192,7 +169,7 @@
<property>
<name>mapreduce.reduce.memory.mb</name>
- <value>2048</value>
+ <value>512</value>
</property>
<property>
@@ -242,7 +219,7 @@
<property>
<name>mapreduce.task.io.sort.mb</name>
- <value>859</value>
+ <value>128</value>
</property>
<property>
@@ -257,7 +234,7 @@
<property>
<name>yarn.app.mapreduce.am.command-opts</name>
- <value>-Xmx819m -Dhdp.version=${hdp.version}</value>
+ <value>-Xmx512m</value>
</property>
<property>
@@ -267,7 +244,7 @@
<property>
<name>yarn.app.mapreduce.am.resource.mb</name>
- <value>1024</value>
+ <value>512</value>
</property>
<property>
@@ -275,4 +252,4 @@
<value>/user</value>
</property>
-</configuration>
\ No newline at end of file
+</configuration>
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/examples/test_case_data/sandbox/yarn-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/yarn-site.xml b/examples/test_case_data/sandbox/yarn-site.xml
index ebdf44a..8256158 100644
--- a/examples/test_case_data/sandbox/yarn-site.xml
+++ b/examples/test_case_data/sandbox/yarn-site.xml
@@ -18,7 +18,7 @@
<property>
<name>hadoop.registry.rm.enabled</name>
- <value>true</value>
+ <value>false</value>
</property>
<property>
@@ -28,29 +28,22 @@
<property>
<name>yarn.acl.enable</name>
- <value>true</value>
+ <value>false</value>
</property>
<property>
<name>yarn.admin.acl</name>
- <value>*</value>
+ <value></value>
</property>
<property>
<name>yarn.application.classpath</name>
- <value>
- $HADOOP_CONF_DIR,/usr/hdp/current/hadoop-client/*,/usr/hdp/current/hadoop-client/lib/*,/usr/hdp/current/hadoop-hdfs-client/*,/usr/hdp/current/hadoop-hdfs-client/lib/*,/usr/hdp/current/hadoop-yarn-client/*,/usr/hdp/current/hadoop-yarn-client/lib/*
- </value>
- </property>
-
- <property>
- <name>yarn.authorization-provider</name>
- <value>org.apache.ranger.authorization.yarn.authorizer.RangerYarnAuthorizer</value>
+ <value>$HADOOP_CONF_DIR,/usr/hdp/current/hadoop-client/*,/usr/hdp/current/hadoop-client/lib/*,/usr/hdp/current/hadoop-hdfs-client/*,/usr/hdp/current/hadoop-hdfs-client/lib/*,/usr/hdp/current/hadoop-yarn-client/*,/usr/hdp/current/hadoop-yarn-client/lib/*</value>
</property>
<property>
<name>yarn.client.nodemanager-connect.max-wait-ms</name>
- <value>120000</value>
+ <value>60000</value>
</property>
<property>
@@ -79,11 +72,6 @@
</property>
<property>
- <name>yarn.node-labels.enabled</name>
- <value>false</value>
- </property>
-
- <property>
<name>yarn.node-labels.fs-store.retry-policy-spec</name>
<value>2000, 500</value>
</property>
@@ -94,6 +82,11 @@
</property>
<property>
+ <name>yarn.node-labels.manager-class</name>
+ <value>org.apache.hadoop.yarn.server.resourcemanager.nodelabels.MemoryRMNodeLabelsManager</value>
+ </property>
+
+ <property>
<name>yarn.nodemanager.address</name>
<value>0.0.0.0:45454</value>
</property>
@@ -105,7 +98,7 @@
<property>
<name>yarn.nodemanager.aux-services</name>
- <value>mapreduce_shuffle,spark_shuffle</value>
+ <value>mapreduce_shuffle</value>
</property>
<property>
@@ -114,11 +107,6 @@
</property>
<property>
- <name>yarn.nodemanager.aux-services.spark_shuffle.class</name>
- <value>org.apache.spark.network.yarn.YarnShuffleService</value>
- </property>
-
- <property>
<name>yarn.nodemanager.bind-host</name>
<value>0.0.0.0</value>
</property>
@@ -160,7 +148,7 @@
<property>
<name>yarn.nodemanager.health-checker.script.timeout-ms</name>
- <value>120000</value>
+ <value>60000</value>
</property>
<property>
@@ -255,12 +243,12 @@
<property>
<name>yarn.nodemanager.resource.memory-mb</name>
- <value>7168</value>
+ <value>9216</value>
</property>
<property>
<name>yarn.nodemanager.resource.percentage-physical-cpu-limit</name>
- <value>80</value>
+ <value>100</value>
</property>
<property>
@@ -349,11 +337,6 @@
</property>
<property>
- <name>yarn.resourcemanager.scheduler.monitor.enable</name>
- <value>false</value>
- </property>
-
- <property>
<name>yarn.resourcemanager.state-store.max-completed-applications</name>
<value>${yarn.resourcemanager.max-completed-applications}</value>
</property>
@@ -385,7 +368,7 @@
<property>
<name>yarn.resourcemanager.webapp.https.address</name>
- <value>sandbox.hortonworks.com:8090</value>
+ <value>localhost:8090</value>
</property>
<property>
@@ -425,7 +408,7 @@
<property>
<name>yarn.resourcemanager.zk-address</name>
- <value>sandbox.hortonworks.com:2181</value>
+ <value>localhost:2181</value>
</property>
<property>
@@ -450,22 +433,12 @@
<property>
<name>yarn.scheduler.maximum-allocation-mb</name>
- <value>7168</value>
- </property>
-
- <property>
- <name>yarn.scheduler.maximum-allocation-vcores</name>
- <value>3</value>
+ <value>9216</value>
</property>
<property>
<name>yarn.scheduler.minimum-allocation-mb</name>
- <value>1024</value>
- </property>
-
- <property>
- <name>yarn.scheduler.minimum-allocation-vcores</name>
- <value>1</value>
+ <value>1536</value>
</property>
<property>
@@ -494,41 +467,6 @@
</property>
<property>
- <name>yarn.timeline-service.entity-group-fs-store.active-dir</name>
- <value>/ats/active/</value>
- </property>
-
- <property>
- <name>yarn.timeline-service.entity-group-fs-store.cleaner-interval-seconds</name>
- <value>3600</value>
- </property>
-
- <property>
- <name>yarn.timeline-service.entity-group-fs-store.done-dir</name>
- <value>/ats/done/</value>
- </property>
-
- <property>
- <name>yarn.timeline-service.entity-group-fs-store.group-id-plugin-classes</name>
- <value>org.apache.tez.dag.history.logging.ats.TimelineCachePluginImpl</value>
- </property>
-
- <property>
- <name>yarn.timeline-service.entity-group-fs-store.retain-seconds</name>
- <value>604800</value>
- </property>
-
- <property>
- <name>yarn.timeline-service.entity-group-fs-store.scan-interval-seconds</name>
- <value>60</value>
- </property>
-
- <property>
- <name>yarn.timeline-service.entity-group-fs-store.summary-store</name>
- <value>org.apache.hadoop.yarn.server.timeline.RollingLevelDBTimelineStore</value>
- </property>
-
- <property>
<name>yarn.timeline-service.generic-application-history.store-class</name>
<value>org.apache.hadoop.yarn.server.applicationhistoryservice.NullApplicationHistoryStore</value>
</property>
@@ -544,11 +482,6 @@
</property>
<property>
- <name>yarn.timeline-service.leveldb-state-store.path</name>
- <value>/hadoop/yarn/timeline</value>
- </property>
-
- <property>
<name>yarn.timeline-service.leveldb-timeline-store.path</name>
<value>/hadoop/yarn/timeline</value>
</property>
@@ -574,23 +507,8 @@
</property>
<property>
- <name>yarn.timeline-service.plugin.enabled</name>
- <value>true</value>
- </property>
-
- <property>
- <name>yarn.timeline-service.recovery.enabled</name>
- <value>true</value>
- </property>
-
- <property>
- <name>yarn.timeline-service.state-store-class</name>
- <value>org.apache.hadoop.yarn.server.timeline.recovery.LeveldbTimelineStateStore</value>
- </property>
-
- <property>
<name>yarn.timeline-service.store-class</name>
- <value>org.apache.hadoop.yarn.server.timeline.EntityGroupFSTimelineStore</value>
+ <value>org.apache.hadoop.yarn.server.timeline.LeveldbTimelineStore</value>
</property>
<property>
@@ -604,11 +522,6 @@
</property>
<property>
- <name>yarn.timeline-service.version</name>
- <value>1.5</value>
- </property>
-
- <property>
<name>yarn.timeline-service.webapp.address</name>
<value>sandbox.hortonworks.com:8188</value>
</property>
@@ -618,4 +531,4 @@
<value>sandbox.hortonworks.com:8190</value>
</property>
-</configuration>
\ No newline at end of file
+</configuration>
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
index 726d72f..d43bc1e 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
@@ -32,9 +32,11 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HTable;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.util.ClassUtil;
import org.apache.kylin.common.util.HBaseMetadataTestCase;
@@ -58,7 +60,6 @@ import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
import org.apache.kylin.source.ISource;
import org.apache.kylin.source.SourceFactory;
import org.apache.kylin.source.SourcePartition;
-import org.apache.kylin.storage.hbase.HBaseConnection;
import org.apache.kylin.storage.hbase.util.HBaseRegionSizeCalculator;
import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
import org.apache.kylin.tool.StorageCleanupJob;
@@ -95,10 +96,10 @@ public class BuildCubeWithEngine {
logger.error("error", e);
exitCode = 1;
}
-
+
long millis = System.currentTimeMillis() - start;
System.out.println("Time elapsed: " + (millis / 1000) + " sec - in " + BuildCubeWithEngine.class.getName());
-
+
System.exit(exitCode);
}
@@ -358,10 +359,10 @@ public class BuildCubeWithEngine {
@SuppressWarnings("unused")
private void checkHFilesInHBase(CubeSegment segment) throws IOException {
- try (Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl())) {
- String tableName = segment.getStorageLocationIdentifier();
-
- HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(tableName, conn);
+ Configuration conf = HBaseConfiguration.create(HadoopUtil.getCurrentConfiguration());
+ String tableName = segment.getStorageLocationIdentifier();
+ try (HTable table = new HTable(conf, tableName)) {
+ HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(table);
Map<byte[], Long> sizeMap = cal.getRegionSizeMap();
long totalSize = 0;
for (Long size : sizeMap.values()) {
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index b82eee2..f967575 100644
--- a/pom.xml
+++ b/pom.xml
@@ -46,20 +46,20 @@
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<!-- Hadoop versions -->
- <hadoop2.version>2.7.1</hadoop2.version>
- <yarn.version>2.7.1</yarn.version>
+ <hadoop2.version>2.6.0</hadoop2.version>
+ <yarn.version>2.6.0</yarn.version>
<!-- Hive versions -->
- <hive.version>1.2.1</hive.version>
- <hive-hcatalog.version>1.2.1</hive-hcatalog.version>
+ <hive.version>0.14.0</hive.version>
+ <hive-hcatalog.version>0.14.0</hive-hcatalog.version>
<!-- HBase versions -->
- <hbase-hadoop2.version>1.1.1</hbase-hadoop2.version>
+ <hbase-hadoop2.version>0.98.8-hadoop2</hbase-hadoop2.version>
<kafka.version>0.10.1.0</kafka.version>
<!-- Hadoop deps, keep compatible with hadoop2.version -->
<zookeeper.version>3.4.6</zookeeper.version>
- <curator.version>2.7.1</curator.version>
+ <curator.version>2.6.0</curator.version>
<jackson.version>2.2.4</jackson.version>
<jsr305.version>3.0.1</jsr305.version>
<guava.version>14.0</guava.version>
@@ -355,11 +355,6 @@
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-common</artifactId>
- <version>${hadoop2.version}</version>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>${hadoop2.version}</version>
</dependency>
@@ -817,11 +812,6 @@
<id>conjars</id>
<url>http://conjars.org/repo/</url>
</repository>
-
- <repository>
- <id>cloudera</id>
- <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
- </repository>
</repositories>
<build>
@@ -1174,106 +1164,6 @@
</build>
</profile>
<profile>
- <id>cdh5.7</id>
- <properties>
- <hadoop2.version>2.6.0-cdh5.7.0</hadoop2.version>
- <yarn.version>2.6.0-cdh5.7.0</yarn.version>
- <hive.version>1.1.0-cdh5.7.0</hive.version>
- <hive-hcatalog.version>1.1.0-cdh5.7.0</hive-hcatalog.version>
- <hbase-hadoop2.version>1.2.0-cdh5.7.0</hbase-hadoop2.version>
- <zookeeper.version>3.4.5-cdh5.7.0</zookeeper.version>
- </properties>
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-compiler-plugin</artifactId>
- <configuration>
- <fork>true</fork>
- <meminitial>1024m</meminitial>
- <maxmem>2048m</maxmem>
- </configuration>
- </plugin>
-
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-dependency-plugin</artifactId>
- <executions>
- <execution>
- <id>copy-jamm</id>
- <goals>
- <goal>copy</goal>
- </goals>
- <phase>generate-test-resources</phase>
- <configuration>
- <artifactItems>
- <artifactItem>
- <groupId>com.github.jbellis</groupId>
- <artifactId>jamm</artifactId>
- <outputDirectory>${project.build.testOutputDirectory}</outputDirectory>
- <destFileName>jamm.jar</destFileName>
- </artifactItem>
- </artifactItems>
- </configuration>
- </execution>
- </executions>
- </plugin>
-
- <plugin>
- <groupId>org.jacoco</groupId>
- <artifactId>jacoco-maven-plugin</artifactId>
- <configuration>
- <append>true</append>
- <destFile>
- ${sonar.jacoco.reportPath}
- </destFile>
- </configuration>
- <executions>
- <execution>
- <id>pre-test</id>
- <goals>
- <goal>prepare-agent</goal>
- </goals>
- <configuration>
- <propertyName>surefireArgLine</propertyName>
- </configuration>
- </execution>
- <execution>
- <id>post-test</id>
- <phase>test</phase>
- <goals>
- <goal>report</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
-
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-surefire-plugin</artifactId>
- <version>2.19.1</version>
- <configuration>
- <reportsDirectory>${project.basedir}/../target/surefire-reports</reportsDirectory>
- <excludes>
- <exclude>**/IT*.java</exclude>
- </excludes>
- <systemProperties>
- <property>
- <name>buildCubeUsingProvidedData</name>
- <value>false</value>
- </property>
- <property>
- <name>log4j.configuration</name>
- <value>file:${project.basedir}/../build/conf/kylin-tools-log4j.properties</value>
- </property>
- </systemProperties>
- <argLine>-javaagent:${project.build.testOutputDirectory}/jamm.jar ${argLine} ${surefireArgLine}</argLine>
- </configuration>
- </plugin>
- </plugins>
- </build>
- </profile>
- <profile>
<!-- This profile adds/overrides few features of the 'apache-release'
profile in the parent pom. -->
<id>apache-release</id>
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
index 8095bf8..ea68855 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
@@ -20,7 +20,7 @@ package org.apache.kylin.rest.security;
import java.io.IOException;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HTableInterface;
/**
*/
@@ -36,6 +36,6 @@ public interface AclHBaseStorage {
String prepareHBaseTable(Class<?> clazz) throws IOException;
- Table getTable(String tableName) throws IOException;
+ HTableInterface getTable(String tableName) throws IOException;
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
index cc76b87..d9326f5 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
@@ -21,7 +21,7 @@ package org.apache.kylin.rest.security;
import java.io.IOException;
import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.rest.service.AclService;
import org.apache.kylin.rest.service.QueryService;
@@ -34,8 +34,8 @@ public class MockAclHBaseStorage implements AclHBaseStorage {
private static final String aclTableName = "MOCK-ACL-TABLE";
private static final String userTableName = "MOCK-USER-TABLE";
- private Table mockedAclTable;
- private Table mockedUserTable;
+ private HTableInterface mockedAclTable;
+ private HTableInterface mockedUserTable;
private RealAclHBaseStorage realAcl;
public MockAclHBaseStorage() {
@@ -65,7 +65,7 @@ public class MockAclHBaseStorage implements AclHBaseStorage {
}
@Override
- public Table getTable(String tableName) throws IOException {
+ public HTableInterface getTable(String tableName) throws IOException {
if (realAcl != null) {
return realAcl.getTable(tableName);
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java b/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
index 972eea9..d0aa0ed 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
@@ -51,7 +51,7 @@ import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
@@ -91,7 +91,7 @@ import com.google.protobuf.ServiceException;
* <li>remove some methods for loading data, checking values ...</li>
* </ul>
*/
-public class MockHTable implements Table {
+public class MockHTable implements HTableInterface {
private final String tableName;
private final List<String> columnFamilies = new ArrayList<>();
@@ -114,6 +114,14 @@ public class MockHTable implements Table {
this.columnFamilies.add(columnFamily);
}
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public byte[] getTableName() {
+ return tableName.getBytes();
+ }
+
@Override
public TableName getName() {
return null;
@@ -192,8 +200,8 @@ public class MockHTable implements Table {
}
@Override
- public boolean[] existsAll(List<Get> list) throws IOException {
- return new boolean[0];
+ public Boolean[] exists(List<Get> gets) throws IOException {
+ return new Boolean[0];
}
/**
@@ -298,6 +306,15 @@ public class MockHTable implements Table {
* {@inheritDoc}
*/
@Override
+ public Result getRowOrBefore(byte[] row, byte[] family) throws IOException {
+ // FIXME: implement
+ return null;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
public ResultScanner getScanner(Scan scan) throws IOException {
final List<Result> ret = new ArrayList<Result>();
byte[] st = scan.getStartRow();
@@ -429,7 +446,7 @@ public class MockHTable implements Table {
*/
}
if (filter.hasFilterRow() && !filteredOnRowKey) {
- filter.filterRow();
+ filter.filterRow(nkvs);
}
if (filter.filterRow() || filteredOnRowKey) {
nkvs.clear();
@@ -518,11 +535,6 @@ public class MockHTable implements Table {
return false;
}
- @Override
- public boolean checkAndPut(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, byte[] bytes3, Put put) throws IOException {
- return false;
- }
-
/**
* {@inheritDoc}
*/
@@ -543,7 +555,7 @@ public class MockHTable implements Table {
continue;
}
for (KeyValue kv : delete.getFamilyMap().get(family)) {
- if (kv.isDelete()) {
+ if (kv.isDeleteFamily()) {
data.get(row).get(kv.getFamily()).clear();
} else {
data.get(row).get(kv.getFamily()).remove(kv.getQualifier());
@@ -580,11 +592,6 @@ public class MockHTable implements Table {
return false;
}
- @Override
- public boolean checkAndDelete(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, byte[] bytes3, Delete delete) throws IOException {
- return false;
- }
-
/**
* {@inheritDoc}
*/
@@ -598,7 +605,7 @@ public class MockHTable implements Table {
*/
@Override
public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount) throws IOException {
- return incrementColumnValue(row, family, qualifier, amount, null);
+ return incrementColumnValue(row, family, qualifier, amount, true);
}
@Override
@@ -610,6 +617,37 @@ public class MockHTable implements Table {
* {@inheritDoc}
*/
@Override
+ public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount, boolean writeToWAL) throws IOException {
+ if (check(row, family, qualifier, null)) {
+ Put put = new Put(row);
+ put.add(family, qualifier, Bytes.toBytes(amount));
+ put(put);
+ return amount;
+ }
+ long newValue = Bytes.toLong(data.get(row).get(family).get(qualifier).lastEntry().getValue()) + amount;
+ data.get(row).get(family).get(qualifier).put(System.currentTimeMillis(), Bytes.toBytes(newValue));
+ return newValue;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public boolean isAutoFlush() {
+ return true;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void flushCommits() throws IOException {
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
public void close() throws IOException {
}
@@ -635,6 +673,29 @@ public class MockHTable implements Table {
* {@inheritDoc}
*/
@Override
+ public void setAutoFlush(boolean autoFlush) {
+ throw new NotImplementedException();
+
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void setAutoFlush(boolean autoFlush, boolean clearBufferOnFail) {
+ throw new NotImplementedException();
+
+ }
+
+ @Override
+ public void setAutoFlushTo(boolean autoFlush) {
+ throw new NotImplementedException();
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
public long getWriteBufferSize() {
throw new NotImplementedException();
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
index d1a1384..1d520c4 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
@@ -21,8 +21,7 @@ package org.apache.kylin.rest.security;
import java.io.IOException;
import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.rest.service.AclService;
import org.apache.kylin.rest.service.QueryService;
@@ -59,11 +58,11 @@ public class RealAclHBaseStorage implements AclHBaseStorage {
}
@Override
- public Table getTable(String tableName) throws IOException {
+ public HTableInterface getTable(String tableName) throws IOException {
if (StringUtils.equals(tableName, aclTableName)) {
- return HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(aclTableName));
+ return HBaseConnection.get(hbaseUrl).getTable(aclTableName);
} else if (StringUtils.equals(tableName, userTableName)) {
- return HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
+ return HBaseConnection.get(hbaseUrl).getTable(userTableName);
} else {
throw new IllegalStateException("getTable failed" + tableName);
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java b/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
index 3e3efec..d693a67 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
@@ -33,7 +33,7 @@ import javax.annotation.PostConstruct;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
@@ -124,7 +124,7 @@ public class AclService implements MutableAclService {
@Override
public List<ObjectIdentity> findChildren(ObjectIdentity parentIdentity) {
List<ObjectIdentity> oids = new ArrayList<ObjectIdentity>();
- Table htable = null;
+ HTableInterface htable = null;
try {
htable = aclHBaseStorage.getTable(aclTableName);
@@ -173,7 +173,7 @@ public class AclService implements MutableAclService {
@Override
public Map<ObjectIdentity, Acl> readAclsById(List<ObjectIdentity> oids, List<Sid> sids) throws NotFoundException {
Map<ObjectIdentity, Acl> aclMaps = new HashMap<ObjectIdentity, Acl>();
- Table htable = null;
+ HTableInterface htable = null;
Result result = null;
try {
htable = aclHBaseStorage.getTable(aclTableName);
@@ -226,16 +226,17 @@ public class AclService implements MutableAclService {
Authentication auth = SecurityContextHolder.getContext().getAuthentication();
PrincipalSid sid = new PrincipalSid(auth);
- Table htable = null;
+ HTableInterface htable = null;
try {
htable = aclHBaseStorage.getTable(aclTableName);
Put put = new Put(Bytes.toBytes(String.valueOf(objectIdentity.getIdentifier())));
- put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_TYPE_COLUMN), Bytes.toBytes(objectIdentity.getType()));
- put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_OWNER_COLUMN), sidSerializer.serialize(new SidInfo(sid)));
- put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN), Bytes.toBytes(true));
+ put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_TYPE_COLUMN), Bytes.toBytes(objectIdentity.getType()));
+ put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_OWNER_COLUMN), sidSerializer.serialize(new SidInfo(sid)));
+ put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN), Bytes.toBytes(true));
htable.put(put);
+ htable.flushCommits();
logger.debug("ACL of " + objectIdentity + " created successfully.");
} catch (IOException e) {
@@ -249,7 +250,7 @@ public class AclService implements MutableAclService {
@Override
public void deleteAcl(ObjectIdentity objectIdentity, boolean deleteChildren) throws ChildrenExistException {
- Table htable = null;
+ HTableInterface htable = null;
try {
htable = aclHBaseStorage.getTable(aclTableName);
@@ -265,6 +266,7 @@ public class AclService implements MutableAclService {
}
htable.delete(delete);
+ htable.flushCommits();
logger.debug("ACL of " + objectIdentity + " deleted successfully.");
} catch (IOException e) {
@@ -282,7 +284,7 @@ public class AclService implements MutableAclService {
throw e;
}
- Table htable = null;
+ HTableInterface htable = null;
try {
htable = aclHBaseStorage.getTable(aclTableName);
@@ -293,16 +295,17 @@ public class AclService implements MutableAclService {
Put put = new Put(Bytes.toBytes(String.valueOf(acl.getObjectIdentity().getIdentifier())));
if (null != acl.getParentAcl()) {
- put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN), domainObjSerializer.serialize(new DomainObjectInfo(acl.getParentAcl().getObjectIdentity())));
+ put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN), domainObjSerializer.serialize(new DomainObjectInfo(acl.getParentAcl().getObjectIdentity())));
}
for (AccessControlEntry ace : acl.getEntries()) {
AceInfo aceInfo = new AceInfo(ace);
- put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY), Bytes.toBytes(aceInfo.getSidInfo().getSid()), aceSerializer.serialize(aceInfo));
+ put.add(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY), Bytes.toBytes(aceInfo.getSidInfo().getSid()), aceSerializer.serialize(aceInfo));
}
if (!put.isEmpty()) {
htable.put(put);
+ htable.flushCommits();
logger.debug("ACL of " + acl.getObjectIdentity() + " updated successfully.");
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
index c8c87cb..d28c87c 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -27,7 +27,9 @@ import java.util.List;
import java.util.Map;
import java.util.WeakHashMap;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.HTable;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.util.Pair;
import org.apache.kylin.cube.CubeInstance;
@@ -393,24 +395,33 @@ public class CubeService extends BasicService {
if (htableInfoCache.containsKey(tableName)) {
return htableInfoCache.get(tableName);
}
- Connection conn = HBaseConnection.get(this.getConfig().getStorageUrl());
+
+ Configuration hconf = HBaseConnection.getCurrentHBaseConfiguration();
+ HTable table = null;
HBaseResponse hr = null;
long tableSize = 0;
int regionCount = 0;
- HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(tableName, conn);
- Map<byte[], Long> sizeMap = cal.getRegionSizeMap();
+ try {
+ table = new HTable(hconf, tableName);
- for (long s : sizeMap.values()) {
- tableSize += s;
- }
+ HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(table);
+ Map<byte[], Long> sizeMap = cal.getRegionSizeMap();
- regionCount = sizeMap.size();
+ for (long s : sizeMap.values()) {
+ tableSize += s;
+ }
+
+ regionCount = sizeMap.size();
+
+ // Set response.
+ hr = new HBaseResponse();
+ hr.setTableSize(tableSize);
+ hr.setRegionCount(regionCount);
+ } finally {
+ IOUtils.closeQuietly(table);
+ }
- // Set response.
- hr = new HBaseResponse();
- hr.setTableSize(tableSize);
- hr.setRegionCount(regionCount);
htableInfoCache.put(tableName, hr);
return hr;
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
index 7ce38ea..98eb7cb 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
@@ -49,11 +49,11 @@ import javax.sql.DataSource;
import org.apache.calcite.avatica.ColumnMetaData.Rep;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Table;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.QueryContext;
import org.apache.kylin.common.debug.BackdoorToggles;
@@ -164,13 +164,14 @@ public class QueryService extends BasicService {
Query[] queryArray = new Query[queries.size()];
byte[] bytes = querySerializer.serialize(queries.toArray(queryArray));
- Table htable = null;
+ HTableInterface htable = null;
try {
- htable = HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
+ htable = HBaseConnection.get(hbaseUrl).getTable(userTableName);
Put put = new Put(Bytes.toBytes(creator));
- put.addColumn(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
+ put.add(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
htable.put(put);
+ htable.flushCommits();
} finally {
IOUtils.closeQuietly(htable);
}
@@ -196,13 +197,14 @@ public class QueryService extends BasicService {
Query[] queryArray = new Query[queries.size()];
byte[] bytes = querySerializer.serialize(queries.toArray(queryArray));
- Table htable = null;
+ HTableInterface htable = null;
try {
- htable = HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
+ htable = HBaseConnection.get(hbaseUrl).getTable(userTableName);
Put put = new Put(Bytes.toBytes(creator));
- put.addColumn(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
+ put.add(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
htable.put(put);
+ htable.flushCommits();
} finally {
IOUtils.closeQuietly(htable);
}
@@ -214,12 +216,12 @@ public class QueryService extends BasicService {
}
List<Query> queries = new ArrayList<Query>();
- Table htable = null;
+ HTableInterface htable = null;
try {
- org.apache.hadoop.hbase.client.Connection conn = HBaseConnection.get(hbaseUrl);
+ HConnection conn = HBaseConnection.get(hbaseUrl);
HBaseConnection.createHTableIfNeeded(conn, userTableName, USER_QUERY_FAMILY);
- htable = HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
+ htable = conn.getTable(userTableName);
Get get = new Get(Bytes.toBytes(creator));
get.addFamily(Bytes.toBytes(USER_QUERY_FAMILY));
Result result = htable.get(get);
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java b/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
index ab54882..07c7c6f 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
@@ -30,11 +30,11 @@ import javax.annotation.PostConstruct;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
import org.apache.kylin.common.util.Bytes;
import org.apache.kylin.common.util.Pair;
import org.apache.kylin.rest.security.AclHBaseStorage;
@@ -72,7 +72,7 @@ public class UserService implements UserDetailsManager {
@Override
public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException {
- Table htable = null;
+ HTableInterface htable = null;
try {
htable = aclHBaseStorage.getTable(userTableName);
@@ -144,16 +144,16 @@ public class UserService implements UserDetailsManager {
@Override
public void updateUser(UserDetails user) {
- Table htable = null;
+ HTableInterface htable = null;
try {
htable = aclHBaseStorage.getTable(userTableName);
Pair<byte[], byte[]> pair = userToHBaseRow(user);
Put put = new Put(pair.getKey());
-
- put.addColumn(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN), pair.getSecond());
+ put.add(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN), pair.getSecond());
htable.put(put);
+ htable.flushCommits();
} catch (IOException e) {
throw new RuntimeException(e.getMessage(), e);
} finally {
@@ -163,13 +163,14 @@ public class UserService implements UserDetailsManager {
@Override
public void deleteUser(String username) {
- Table htable = null;
+ HTableInterface htable = null;
try {
htable = aclHBaseStorage.getTable(userTableName);
Delete delete = new Delete(Bytes.toBytes(username));
htable.delete(delete);
+ htable.flushCommits();
} catch (IOException e) {
throw new RuntimeException(e.getMessage(), e);
} finally {
@@ -184,7 +185,7 @@ public class UserService implements UserDetailsManager {
@Override
public boolean userExists(String username) {
- Table htable = null;
+ HTableInterface htable = null;
try {
htable = aclHBaseStorage.getTable(userTableName);
@@ -215,7 +216,7 @@ public class UserService implements UserDetailsManager {
s.addColumn(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN));
List<UserDetails> all = new ArrayList<UserDetails>();
- Table htable = null;
+ HTableInterface htable = null;
ResultScanner scanner = null;
try {
htable = aclHBaseStorage.getTable(userTableName);
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
index 53c95cb..335bfe7 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
@@ -40,9 +40,9 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.kylin.common.KylinConfig;
@@ -64,7 +64,7 @@ public class HBaseConnection {
private static final Logger logger = LoggerFactory.getLogger(HBaseConnection.class);
private static final Map<String, Configuration> configCache = new ConcurrentHashMap<String, Configuration>();
- private static final Map<String, Connection> connPool = new ConcurrentHashMap<String, Connection>();
+ private static final Map<String, HConnection> connPool = new ConcurrentHashMap<String, HConnection>();
private static final ThreadLocal<Configuration> configThreadLocal = new ThreadLocal<>();
private static ExecutorService coprocessorPool = null;
@@ -75,7 +75,7 @@ public class HBaseConnection {
public void run() {
closeCoprocessorPool();
- for (Connection conn : connPool.values()) {
+ for (HConnection conn : connPool.values()) {
try {
conn.close();
} catch (IOException e) {
@@ -144,7 +144,7 @@ public class HBaseConnection {
// using a hbase:xxx URL is deprecated, instead hbase config is always loaded from hbase-site.xml in classpath
if (!(StringUtils.isEmpty(url) || "hbase".equals(url)))
throw new IllegalArgumentException("to use hbase storage, pls set 'kylin.storage.url=hbase' in kylin.properties");
-
+
Configuration conf = HBaseConfiguration.create(HadoopUtil.getCurrentConfiguration());
addHBaseClusterNNHAConfiguration(conf);
@@ -213,9 +213,9 @@ public class HBaseConnection {
// ============================================================================
- // returned Connection can be shared by multiple threads and does not require close()
+ // returned HConnection can be shared by multiple threads and does not require close()
@SuppressWarnings("resource")
- public static Connection get(String url) {
+ public static HConnection get(String url) {
// find configuration
Configuration conf = configCache.get(url);
if (conf == null) {
@@ -223,13 +223,13 @@ public class HBaseConnection {
configCache.put(url, conf);
}
- Connection connection = connPool.get(url);
+ HConnection connection = connPool.get(url);
try {
while (true) {
// I don't use DCL since recreate a connection is not a big issue.
if (connection == null || connection.isClosed()) {
logger.info("connection is null or closed, creating a new one");
- connection = ConnectionFactory.createConnection(conf);
+ connection = HConnectionManager.createConnection(conf);
connPool.put(url, connection);
}
@@ -248,8 +248,8 @@ public class HBaseConnection {
return connection;
}
- public static boolean tableExists(Connection conn, String tableName) throws IOException {
- Admin hbase = conn.getAdmin();
+ public static boolean tableExists(HConnection conn, String tableName) throws IOException {
+ HBaseAdmin hbase = new HBaseAdmin(conn);
try {
return hbase.tableExists(TableName.valueOf(tableName));
} finally {
@@ -269,18 +269,18 @@ public class HBaseConnection {
deleteTable(HBaseConnection.get(hbaseUrl), tableName);
}
- public static void createHTableIfNeeded(Connection conn, String table, String... families) throws IOException {
- Admin hbase = conn.getAdmin();
- TableName tableName = TableName.valueOf(table);
+ public static void createHTableIfNeeded(HConnection conn, String table, String... families) throws IOException {
+ HBaseAdmin hbase = new HBaseAdmin(conn);
+
try {
if (tableExists(conn, table)) {
logger.debug("HTable '" + table + "' already exists");
- Set<String> existingFamilies = getFamilyNames(hbase.getTableDescriptor(tableName));
+ Set<String> existingFamilies = getFamilyNames(hbase.getTableDescriptor(TableName.valueOf(table)));
boolean wait = false;
for (String family : families) {
if (existingFamilies.contains(family) == false) {
logger.debug("Adding family '" + family + "' to HTable '" + table + "'");
- hbase.addColumn(tableName, newFamilyDescriptor(family));
+ hbase.addColumn(table, newFamilyDescriptor(family));
// addColumn() is async, is there a way to wait it finish?
wait = true;
}
@@ -333,8 +333,8 @@ public class HBaseConnection {
return fd;
}
- public static void deleteTable(Connection conn, String tableName) throws IOException {
- Admin hbase = conn.getAdmin();
+ public static void deleteTable(HConnection conn, String tableName) throws IOException {
+ HBaseAdmin hbase = new HBaseAdmin(conn);
try {
if (!tableExists(conn, tableName)) {
@@ -344,10 +344,10 @@ public class HBaseConnection {
logger.debug("delete HTable '" + tableName + "'");
- if (hbase.isTableEnabled(TableName.valueOf(tableName))) {
- hbase.disableTable(TableName.valueOf(tableName));
+ if (hbase.isTableEnabled(tableName)) {
+ hbase.disableTable(tableName);
}
- hbase.deleteTable(TableName.valueOf(tableName));
+ hbase.deleteTable(tableName);
logger.debug("HTable '" + tableName + "' deleted");
} finally {
[07/39] kylin git commit: KYLIN-2419 rollback KYLIN-2292
Posted by li...@apache.org.
KYLIN-2419 rollback KYLIN-2292
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/7611338b
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/7611338b
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/7611338b
Branch: refs/heads/master-hbase0.98
Commit: 7611338b5f022d216f5c9564a13c161374751adf
Parents: a058bfb
Author: Li Yang <li...@apache.org>
Authored: Sat Feb 4 11:03:25 2017 +0800
Committer: Li Yang <li...@apache.org>
Committed: Sat Feb 4 11:03:25 2017 +0800
----------------------------------------------------------------------
.../adapter/enumerable/EnumerableWindow.java | 981 -------------------
.../calcite/adapter/enumerable/PhysType.java | 209 ----
.../adapter/enumerable/PhysTypeImpl.java | 654 -------------
3 files changed, 1844 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/7611338b/atopcalcite/src/main/java/org/apache/calcite/adapter/enumerable/EnumerableWindow.java
----------------------------------------------------------------------
diff --git a/atopcalcite/src/main/java/org/apache/calcite/adapter/enumerable/EnumerableWindow.java b/atopcalcite/src/main/java/org/apache/calcite/adapter/enumerable/EnumerableWindow.java
deleted file mode 100644
index 216b07c..0000000
--- a/atopcalcite/src/main/java/org/apache/calcite/adapter/enumerable/EnumerableWindow.java
+++ /dev/null
@@ -1,981 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.calcite.adapter.enumerable;
-
-import org.apache.calcite.adapter.enumerable.impl.WinAggAddContextImpl;
-import org.apache.calcite.adapter.enumerable.impl.WinAggResetContextImpl;
-import org.apache.calcite.adapter.enumerable.impl.WinAggResultContextImpl;
-import org.apache.calcite.adapter.java.JavaTypeFactory;
-import org.apache.calcite.linq4j.tree.BinaryExpression;
-import org.apache.calcite.linq4j.tree.BlockBuilder;
-import org.apache.calcite.linq4j.tree.BlockStatement;
-import org.apache.calcite.linq4j.tree.DeclarationStatement;
-import org.apache.calcite.linq4j.tree.Expression;
-import org.apache.calcite.linq4j.tree.Expressions;
-import org.apache.calcite.linq4j.tree.ParameterExpression;
-import org.apache.calcite.linq4j.tree.Primitive;
-import org.apache.calcite.linq4j.tree.Statement;
-import org.apache.calcite.linq4j.tree.Types;
-import org.apache.calcite.plan.RelOptCluster;
-import org.apache.calcite.plan.RelOptCost;
-import org.apache.calcite.plan.RelOptPlanner;
-import org.apache.calcite.plan.RelTraitSet;
-import org.apache.calcite.prepare.CalcitePrepareImpl;
-import org.apache.calcite.rel.RelFieldCollation;
-import org.apache.calcite.rel.RelNode;
-import org.apache.calcite.rel.core.AggregateCall;
-import org.apache.calcite.rel.core.Window;
-import org.apache.calcite.rel.metadata.RelMetadataQuery;
-import org.apache.calcite.rel.type.RelDataType;
-import org.apache.calcite.rel.type.RelDataTypeFactory;
-import org.apache.calcite.rex.RexInputRef;
-import org.apache.calcite.rex.RexLiteral;
-import org.apache.calcite.rex.RexNode;
-import org.apache.calcite.rex.RexWindowBound;
-import org.apache.calcite.runtime.SortedMultiMap;
-import org.apache.calcite.sql.SqlAggFunction;
-import org.apache.calcite.util.BuiltInMethod;
-import org.apache.calcite.util.Pair;
-import org.apache.calcite.util.Util;
-
-import com.google.common.base.Function;
-import com.google.common.collect.ImmutableList;
-
-import java.lang.reflect.Modifier;
-import java.lang.reflect.Type;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Iterator;
-import java.util.List;
-
-/*
- * OVERRIDE POINT: patching CALCITE-1540 on calcite 1.8.0
- */
-
-/** Implementation of {@link org.apache.calcite.rel.core.Window} in
- * {@link org.apache.calcite.adapter.enumerable.EnumerableConvention enumerable calling convention}. */
-public class EnumerableWindow extends Window implements EnumerableRel {
- /** Creates an EnumerableWindowRel. */
- EnumerableWindow(RelOptCluster cluster, RelTraitSet traits, RelNode child,
- List<RexLiteral> constants, RelDataType rowType, List<Group> groups) {
- super(cluster, traits, child, constants, rowType, groups);
- }
-
- @Override public RelNode copy(RelTraitSet traitSet, List<RelNode> inputs) {
- return new EnumerableWindow(getCluster(), traitSet, sole(inputs),
- constants, rowType, groups);
- }
-
- @Override
- public RelOptCost computeSelfCost(RelOptPlanner planner, RelMetadataQuery mq) {
- return super.computeSelfCost(planner, mq)
- .multiplyBy(EnumerableConvention.COST_MULTIPLIER);
- }
-
- /** Implementation of {@link RexToLixTranslator.InputGetter}
- * suitable for generating implementations of windowed aggregate
- * functions. */
- private static class WindowRelInputGetter
- implements RexToLixTranslator.InputGetter {
- private final Expression row;
- private final PhysType rowPhysType;
- private final int actualInputFieldCount;
- private final List<Expression> constants;
-
- private WindowRelInputGetter(Expression row,
- PhysType rowPhysType, int actualInputFieldCount,
- List<Expression> constants) {
- this.row = row;
- this.rowPhysType = rowPhysType;
- this.actualInputFieldCount = actualInputFieldCount;
- this.constants = constants;
- }
-
- @Override
- public Expression field(BlockBuilder list, int index, Type storageType) {
- if (index < actualInputFieldCount) {
- Expression current = list.append("current", row);
- return rowPhysType.fieldReference(current, index, storageType);
- }
- return constants.get(index - actualInputFieldCount);
- }
- }
-
- private void sampleOfTheGeneratedWindowedAggregate() {
- // Here's overview of the generated code
- // For each list of rows that have the same partitioning key, evaluate
- // all of the windowed aggregate functions.
-
- // builder
- Iterator<Integer[]> iterator = null;
-
- // builder3
- Integer[] rows = iterator.next();
-
- int prevStart = -1;
- int prevEnd = -1;
-
- for (int i = 0; i < rows.length; i++) {
- // builder4
- Integer row = rows[i];
-
- int start = 0;
- int end = 100;
- if (start != prevStart || end != prevEnd) {
- // builder5
- int actualStart = 0;
- if (start != prevStart || end < prevEnd) {
- // builder6
- // recompute
- actualStart = start;
- // implementReset
- } else { // must be start == prevStart && end > prevEnd
- actualStart = prevEnd + 1;
- }
- prevStart = start;
- prevEnd = end;
-
- if (start != -1) {
- for (int j = actualStart; j <= end; j++) {
- // builder7
- // implementAdd
- }
- }
- // implementResult
- // list.add(new Xxx(row.deptno, row.empid, sum, count));
- }
- }
- // multiMap.clear(); // allows gc
- // source = Linq4j.asEnumerable(list);
- }
-
- @Override
- public Result implement(EnumerableRelImplementor implementor, Prefer pref) {
- final JavaTypeFactory typeFactory = implementor.getTypeFactory();
- final EnumerableRel child = (EnumerableRel) getInput();
- final BlockBuilder builder = new BlockBuilder();
- final Result result = implementor.visitChild(this, 0, child, pref);
- Expression source_ = builder.append("source", result.block);
-
- final List<Expression> translatedConstants =
- new ArrayList<>(constants.size());
- for (RexLiteral constant : constants) {
- translatedConstants.add(
- RexToLixTranslator.translateLiteral(constant, constant.getType(),
- typeFactory, RexImpTable.NullAs.NULL));
- }
-
- PhysType inputPhysType = result.physType;
-
- ParameterExpression prevStart =
- Expressions.parameter(int.class, builder.newName("prevStart"));
- ParameterExpression prevEnd =
- Expressions.parameter(int.class, builder.newName("prevEnd"));
-
- builder.add(Expressions.declare(0, prevStart, null));
- builder.add(Expressions.declare(0, prevEnd, null));
-
- for (int windowIdx = 0; windowIdx < groups.size(); windowIdx++) {
- Group group = groups.get(windowIdx);
- // Comparator:
- // final Comparator<JdbcTest.Employee> comparator =
- // new Comparator<JdbcTest.Employee>() {
- // public int compare(JdbcTest.Employee o1,
- // JdbcTest.Employee o2) {
- // return Integer.compare(o1.empid, o2.empid);
- // }
- // };
- final Expression comparator_ =
- builder.append(
- "comparator",
- inputPhysType.generateComparator(
- group.collation()));
-
- Pair<Expression, Expression> partitionIterator =
- getPartitionIterator(builder, source_, inputPhysType, group,
- comparator_);
- final Expression collectionExpr = partitionIterator.left;
- final Expression iterator_ = partitionIterator.right;
-
- List<AggImpState> aggs = new ArrayList<AggImpState>();
- List<AggregateCall> aggregateCalls = group.getAggregateCalls(this);
- for (int aggIdx = 0; aggIdx < aggregateCalls.size(); aggIdx++) {
- AggregateCall call = aggregateCalls.get(aggIdx);
- aggs.add(new AggImpState(aggIdx, call, true));
- }
-
- // The output from this stage is the input plus the aggregate functions.
- final RelDataTypeFactory.FieldInfoBuilder typeBuilder =
- typeFactory.builder();
- typeBuilder.addAll(inputPhysType.getRowType().getFieldList());
- for (AggImpState agg : aggs) {
- typeBuilder.add(agg.call.name, agg.call.type);
- }
- RelDataType outputRowType = typeBuilder.build();
- final PhysType outputPhysType =
- PhysTypeImpl.of(
- typeFactory, outputRowType, pref.prefer(result.format));
-
- final Expression list_ =
- builder.append(
- "list",
- Expressions.new_(
- ArrayList.class,
- Expressions.call(
- collectionExpr, BuiltInMethod.COLLECTION_SIZE.method)),
- false);
-
- Pair<Expression, Expression> collationKey =
- getRowCollationKey(builder, inputPhysType, group, windowIdx);
- Expression keySelector = collationKey.left;
- Expression keyComparator = collationKey.right;
- final BlockBuilder builder3 = new BlockBuilder();
- final Expression rows_ =
- builder3.append(
- "rows",
- Expressions.convert_(
- Expressions.call(
- iterator_, BuiltInMethod.ITERATOR_NEXT.method),
- Object[].class),
- false);
-
- builder3.add(
- Expressions.statement(
- Expressions.assign(prevStart, Expressions.constant(-1))));
- builder3.add(
- Expressions.statement(
- Expressions.assign(prevEnd,
- Expressions.constant(Integer.MAX_VALUE))));
-
- final BlockBuilder builder4 = new BlockBuilder();
-
- final ParameterExpression i_ =
- Expressions.parameter(int.class, builder4.newName("i"));
-
- final Expression row_ =
- builder4.append(
- "row",
- RexToLixTranslator.convert(
- Expressions.arrayIndex(rows_, i_),
- inputPhysType.getJavaRowType()));
-
- final RexToLixTranslator.InputGetter inputGetter =
- new WindowRelInputGetter(row_, inputPhysType,
- result.physType.getRowType().getFieldCount(),
- translatedConstants);
-
- final RexToLixTranslator translator =
- RexToLixTranslator.forAggregation(typeFactory, builder4,
- inputGetter);
-
- final List<Expression> outputRow = new ArrayList<Expression>();
- int fieldCountWithAggResults =
- inputPhysType.getRowType().getFieldCount();
- for (int i = 0; i < fieldCountWithAggResults; i++) {
- outputRow.add(
- inputPhysType.fieldReference(
- row_, i,
- outputPhysType.getJavaFieldType(i)));
- }
-
- declareAndResetState(typeFactory, builder, result, windowIdx, aggs,
- outputPhysType, outputRow);
-
- // There are assumptions that minX==0. If ever change this, look for
- // frameRowCount, bounds checking, etc
- final Expression minX = Expressions.constant(0);
- final Expression partitionRowCount =
- builder3.append("partRows", Expressions.field(rows_, "length"));
- final Expression maxX = builder3.append("maxX",
- Expressions.subtract(
- partitionRowCount, Expressions.constant(1)));
-
- final Expression startUnchecked = builder4.append("start",
- translateBound(translator, i_, row_, minX, maxX, rows_,
- group, true,
- inputPhysType, comparator_, keySelector, keyComparator));
- final Expression endUnchecked = builder4.append("end",
- translateBound(translator, i_, row_, minX, maxX, rows_,
- group, false,
- inputPhysType, comparator_, keySelector, keyComparator));
-
- final Expression startX;
- final Expression endX;
- final Expression hasRows;
- if (group.isAlwaysNonEmpty()) {
- startX = startUnchecked;
- endX = endUnchecked;
- hasRows = Expressions.constant(true);
- } else {
- Expression startTmp =
- group.lowerBound.isUnbounded() || startUnchecked == i_
- ? startUnchecked
- : builder4.append("startTmp",
- Expressions.call(null, BuiltInMethod.MATH_MAX.method,
- startUnchecked, minX));
- Expression endTmp =
- group.upperBound.isUnbounded() || endUnchecked == i_
- ? endUnchecked
- : builder4.append("endTmp",
- Expressions.call(null, BuiltInMethod.MATH_MIN.method,
- endUnchecked, maxX));
-
- ParameterExpression startPe = Expressions.parameter(0, int.class,
- builder4.newName("startChecked"));
- ParameterExpression endPe = Expressions.parameter(0, int.class,
- builder4.newName("endChecked"));
- builder4.add(Expressions.declare(Modifier.FINAL, startPe, null));
- builder4.add(Expressions.declare(Modifier.FINAL, endPe, null));
-
- hasRows = builder4.append("hasRows",
- Expressions.lessThanOrEqual(startTmp, endTmp));
- builder4.add(
- Expressions.ifThenElse(hasRows,
- Expressions.block(
- Expressions.statement(
- Expressions.assign(startPe, startTmp)),
- Expressions.statement(
- Expressions.assign(endPe, endTmp))),
- Expressions.block(
- Expressions.statement(
- Expressions.assign(startPe, Expressions.constant(-1))),
- Expressions.statement(
- Expressions.assign(endPe, Expressions.constant(-1))))));
- startX = startPe;
- endX = endPe;
- }
-
- final BlockBuilder builder5 = new BlockBuilder(true, builder4);
-
- BinaryExpression rowCountWhenNonEmpty = Expressions.add(
- startX == minX ? endX : Expressions.subtract(endX, startX),
- Expressions.constant(1));
-
- final Expression frameRowCount;
-
- if (hasRows.equals(Expressions.constant(true))) {
- frameRowCount =
- builder4.append("totalRows", rowCountWhenNonEmpty);
- } else {
- frameRowCount =
- builder4.append("totalRows",
- Expressions.condition(hasRows, rowCountWhenNonEmpty,
- Expressions.constant(0)));
- }
-
- ParameterExpression actualStart = Expressions.parameter(
- 0, int.class, builder5.newName("actualStart"));
-
- final BlockBuilder builder6 = new BlockBuilder(true, builder5);
- builder6.add(
- Expressions.statement(Expressions.assign(actualStart, startX)));
-
- for (final AggImpState agg : aggs) {
- agg.implementor.implementReset(agg.context,
- new WinAggResetContextImpl(builder6, agg.state, i_, startX, endX,
- hasRows, partitionRowCount, frameRowCount));
- }
-
- Expression lowerBoundCanChange =
- group.lowerBound.isUnbounded() && group.lowerBound.isPreceding()
- ? Expressions.constant(false)
- : Expressions.notEqual(startX, prevStart);
- Expression needRecomputeWindow = Expressions.orElse(
- lowerBoundCanChange,
- Expressions.lessThan(endX, prevEnd));
-
- BlockStatement resetWindowState = builder6.toBlock();
- if (resetWindowState.statements.size() == 1) {
- builder5.add(
- Expressions.declare(0, actualStart,
- Expressions.condition(needRecomputeWindow, startX,
- Expressions.add(prevEnd, Expressions.constant(1)))));
- } else {
- builder5.add(
- Expressions.declare(0, actualStart, null));
- builder5.add(
- Expressions.ifThenElse(needRecomputeWindow,
- resetWindowState,
- Expressions.statement(
- Expressions.assign(actualStart,
- Expressions.add(prevEnd, Expressions.constant(1))))));
- }
-
- if (lowerBoundCanChange instanceof BinaryExpression) {
- builder5.add(
- Expressions.statement(Expressions.assign(prevStart, startX)));
- }
- builder5.add(
- Expressions.statement(Expressions.assign(prevEnd, endX)));
-
- final BlockBuilder builder7 = new BlockBuilder(true, builder5);
- final DeclarationStatement jDecl =
- Expressions.declare(0, "j", actualStart);
-
- final PhysType inputPhysTypeFinal = inputPhysType;
- final Function<BlockBuilder, WinAggFrameResultContext>
- resultContextBuilder =
- getBlockBuilderWinAggFrameResultContextFunction(typeFactory, result,
- translatedConstants, comparator_, rows_, i_, startX, endX,
- minX, maxX,
- hasRows, frameRowCount, partitionRowCount,
- jDecl, inputPhysTypeFinal);
-
- final Function<AggImpState, List<RexNode>> rexArguments =
- new Function<AggImpState, List<RexNode>>() {
- public List<RexNode> apply(AggImpState agg) {
- List<Integer> argList = agg.call.getArgList();
- List<RelDataType> inputTypes =
- EnumUtils.fieldRowTypes(
- result.physType.getRowType(),
- constants,
- argList);
- List<RexNode> args = new ArrayList<RexNode>(
- inputTypes.size());
- for (int i = 0; i < argList.size(); i++) {
- Integer idx = argList.get(i);
- args.add(new RexInputRef(idx, inputTypes.get(i)));
- }
- return args;
- }
- };
-
- implementAdd(aggs, builder7, resultContextBuilder, rexArguments, jDecl);
-
- BlockStatement forBlock = builder7.toBlock();
- if (!forBlock.statements.isEmpty()) {
- // For instance, row_number does not use for loop to compute the value
- Statement forAggLoop = Expressions.for_(
- Arrays.asList(jDecl),
- Expressions.lessThanOrEqual(jDecl.parameter, endX),
- Expressions.preIncrementAssign(jDecl.parameter),
- forBlock);
- if (!hasRows.equals(Expressions.constant(true))) {
- forAggLoop = Expressions.ifThen(hasRows, forAggLoop);
- }
- builder5.add(forAggLoop);
- }
-
- if (implementResult(aggs, builder5, resultContextBuilder, rexArguments,
- true)) {
- builder4.add(
- Expressions.ifThen(
- Expressions.orElse(lowerBoundCanChange,
- Expressions.notEqual(endX, prevEnd)),
- builder5.toBlock()));
- }
-
- implementResult(aggs, builder4, resultContextBuilder, rexArguments,
- false);
-
- builder4.add(
- Expressions.statement(
- Expressions.call(
- list_,
- BuiltInMethod.COLLECTION_ADD.method,
- outputPhysType.record(outputRow))));
-
- builder3.add(
- Expressions.for_(
- Expressions.declare(0, i_, Expressions.constant(0)),
- Expressions.lessThan(
- i_,
- Expressions.field(rows_, "length")),
- Expressions.preIncrementAssign(i_),
- builder4.toBlock()));
-
- builder.add(
- Expressions.while_(
- Expressions.call(
- iterator_,
- BuiltInMethod.ITERATOR_HAS_NEXT.method),
- builder3.toBlock()));
- builder.add(
- Expressions.statement(
- Expressions.call(
- collectionExpr,
- BuiltInMethod.MAP_CLEAR.method)));
-
- // We're not assigning to "source". For each group, create a new
- // final variable called "source" or "sourceN".
- source_ =
- builder.append(
- "source",
- Expressions.call(
- BuiltInMethod.AS_ENUMERABLE.method, list_));
-
- inputPhysType = outputPhysType;
- }
-
- // return Linq4j.asEnumerable(list);
- builder.add(
- Expressions.return_(null, source_));
- return implementor.result(inputPhysType, builder.toBlock());
- }
-
- private Function<BlockBuilder, WinAggFrameResultContext>
- getBlockBuilderWinAggFrameResultContextFunction(
- final JavaTypeFactory typeFactory, final Result result,
- final List<Expression> translatedConstants,
- final Expression comparator_,
- final Expression rows_, final ParameterExpression i_,
- final Expression startX, final Expression endX,
- final Expression minX, final Expression maxX,
- final Expression hasRows, final Expression frameRowCount,
- final Expression partitionRowCount,
- final DeclarationStatement jDecl,
- final PhysType inputPhysType) {
- return new Function<BlockBuilder,
- WinAggFrameResultContext>() {
- public WinAggFrameResultContext apply(
- final BlockBuilder block) {
- return new WinAggFrameResultContext() {
- public RexToLixTranslator rowTranslator(Expression rowIndex) {
- Expression row =
- getRow(rowIndex);
- final RexToLixTranslator.InputGetter inputGetter =
- new WindowRelInputGetter(row, inputPhysType,
- result.physType.getRowType().getFieldCount(),
- translatedConstants);
-
- return RexToLixTranslator.forAggregation(typeFactory,
- block, inputGetter);
- }
-
- public Expression computeIndex(Expression offset,
- WinAggImplementor.SeekType seekType) {
- Expression index;
- if (seekType == WinAggImplementor.SeekType.AGG_INDEX) {
- index = jDecl.parameter;
- } else if (seekType == WinAggImplementor.SeekType.SET) {
- index = i_;
- } else if (seekType == WinAggImplementor.SeekType.START) {
- index = startX;
- } else if (seekType == WinAggImplementor.SeekType.END) {
- index = endX;
- } else {
- throw new IllegalArgumentException("SeekSet " + seekType
- + " is not supported");
- }
- if (!Expressions.constant(0).equals(offset)) {
- index = block.append("idx", Expressions.add(index, offset));
- }
- return index;
- }
-
- private Expression checkBounds(Expression rowIndex,
- Expression minIndex, Expression maxIndex) {
- if (rowIndex == i_ || rowIndex == startX || rowIndex == endX) {
- // No additional bounds check required
- return hasRows;
- }
-
- //noinspection UnnecessaryLocalVariable
- Expression res = block.append("rowInFrame",
- Expressions.foldAnd(
- ImmutableList.of(hasRows,
- Expressions.greaterThanOrEqual(rowIndex, minIndex),
- Expressions.lessThanOrEqual(rowIndex, maxIndex))));
-
- return res;
- }
-
- public Expression rowInFrame(Expression rowIndex) {
- return checkBounds(rowIndex, startX, endX);
- }
-
- public Expression rowInPartition(Expression rowIndex) {
- return checkBounds(rowIndex, minX, maxX);
- }
-
- public Expression compareRows(Expression a, Expression b) {
- return Expressions.call(comparator_,
- BuiltInMethod.COMPARATOR_COMPARE.method,
- getRow(a), getRow(b));
- }
-
- public Expression getRow(Expression rowIndex) {
- return block.append(
- "jRow",
- RexToLixTranslator.convert(
- Expressions.arrayIndex(rows_, rowIndex),
- inputPhysType.getJavaRowType()));
- }
-
- public Expression index() {
- return i_;
- }
-
- public Expression startIndex() {
- return startX;
- }
-
- public Expression endIndex() {
- return endX;
- }
-
- public Expression hasRows() {
- return hasRows;
- }
-
- public Expression getFrameRowCount() {
- return frameRowCount;
- }
-
- public Expression getPartitionRowCount() {
- return partitionRowCount;
- }
- };
- }
- };
- }
-
- private Pair<Expression, Expression> getPartitionIterator(
- BlockBuilder builder,
- Expression source_,
- PhysType inputPhysType,
- Group group,
- Expression comparator_) {
- // Populate map of lists, one per partition
- // final Map<Integer, List<Employee>> multiMap =
- // new SortedMultiMap<Integer, List<Employee>>();
- // source.foreach(
- // new Function1<Employee, Void>() {
- // public Void apply(Employee v) {
- // final Integer k = v.deptno;
- // multiMap.putMulti(k, v);
- // return null;
- // }
- // });
- // final List<Xxx> list = new ArrayList<Xxx>(multiMap.size());
- // Iterator<Employee[]> iterator = multiMap.arrays(comparator);
- //
- if (group.keys.isEmpty()) {
- // If partition key is empty, no need to partition.
- //
- // final List<Employee> tempList =
- // source.into(new ArrayList<Employee>());
- // Iterator<Employee[]> iterator =
- // SortedMultiMap.singletonArrayIterator(comparator, tempList);
- // final List<Xxx> list = new ArrayList<Xxx>(tempList.size());
-
- final Expression tempList_ = builder.append(
- "tempList",
- Expressions.convert_(
- Expressions.call(
- source_,
- BuiltInMethod.INTO.method,
- Expressions.new_(ArrayList.class)),
- List.class));
- return Pair.of(tempList_,
- builder.append(
- "iterator",
- Expressions.call(
- null,
- BuiltInMethod.SORTED_MULTI_MAP_SINGLETON.method,
- comparator_,
- tempList_)));
- }
- Expression multiMap_ =
- builder.append(
- "multiMap", Expressions.new_(SortedMultiMap.class));
- final BlockBuilder builder2 = new BlockBuilder();
- final ParameterExpression v_ =
- Expressions.parameter(inputPhysType.getJavaRowType(),
- builder2.newName("v"));
-
- Pair<Type, List<Expression>> selector = inputPhysType.selector(
- v_,
- group.keys.asList(),
- JavaRowFormat.CUSTOM);
- final ParameterExpression key_;
- if(selector.left instanceof Types.RecordType) {
- Types.RecordType keyJavaType = (Types.RecordType) selector.left;
- List<Expression> initExpressions = selector.right;
-
- key_ = Expressions.parameter(keyJavaType, "key");
- builder2.add(Expressions.declare(0, key_, null));
- builder2.add(Expressions.statement(Expressions.assign(key_, Expressions.new_(keyJavaType))));
- List<Types.RecordField> fieldList = keyJavaType.getRecordFields();
- for (int i = 0; i < initExpressions.size(); i++) {
- Expression right = initExpressions.get(i);
- builder2.add(
- Expressions.statement(
- Expressions.assign(
- Expressions.field(key_, fieldList.get(i)), right)));
- }
- }
- else
- {
- DeclarationStatement declare = Expressions.declare(0, "key", selector.right.get(0));
- builder2.add(declare);
- key_ = declare.parameter;
- }
- builder2.add(
- Expressions.statement(
- Expressions.call(
- multiMap_,
- BuiltInMethod.SORTED_MULTI_MAP_PUT_MULTI.method,
- key_,
- v_)));
- builder2.add(
- Expressions.return_(
- null, Expressions.constant(null)));
-
- builder.add(
- Expressions.statement(
- Expressions.call(
- source_,
- BuiltInMethod.ENUMERABLE_FOREACH.method,
- Expressions.lambda(
- builder2.toBlock(), v_))));
-
- return Pair.of(multiMap_,
- builder.append(
- "iterator",
- Expressions.call(
- multiMap_,
- BuiltInMethod.SORTED_MULTI_MAP_ARRAYS.method,
- comparator_)));
- }
-
- private Pair<Expression, Expression> getRowCollationKey(
- BlockBuilder builder, PhysType inputPhysType,
- Group group, int windowIdx) {
- if (!(group.isRows || (group.upperBound.isUnbounded()
- && group.lowerBound.isUnbounded()))) {
- Pair<Expression, Expression> pair =
- inputPhysType.generateCollationKey(
- group.collation().getFieldCollations());
- // optimize=false to prevent inlining of object create into for-loops
- return Pair.of(
- builder.append("keySelector" + windowIdx, pair.left, false),
- builder.append("keyComparator" + windowIdx, pair.right, false));
- } else {
- return Pair.of(null, null);
- }
- }
-
- private void declareAndResetState(final JavaTypeFactory typeFactory,
- BlockBuilder builder, final Result result, int windowIdx,
- List<AggImpState> aggs, PhysType outputPhysType,
- List<Expression> outputRow) {
- for (final AggImpState agg : aggs) {
- agg.context =
- new WinAggContext() {
- public SqlAggFunction aggregation() {
- return agg.call.getAggregation();
- }
-
- public RelDataType returnRelType() {
- return agg.call.type;
- }
-
- public Type returnType() {
- return EnumUtils.javaClass(typeFactory, returnRelType());
- }
-
- public List<? extends Type> parameterTypes() {
- return EnumUtils.fieldTypes(typeFactory,
- parameterRelTypes());
- }
-
- public List<? extends RelDataType> parameterRelTypes() {
- return EnumUtils.fieldRowTypes(result.physType.getRowType(),
- constants, agg.call.getArgList());
- }
- };
- String aggName = "a" + agg.aggIdx;
- if (CalcitePrepareImpl.DEBUG) {
- aggName = Util.toJavaId(agg.call.getAggregation().getName(), 0)
- .substring("ID$0$".length()) + aggName;
- }
- List<Type> state = agg.implementor.getStateType(agg.context);
- final List<Expression> decls =
- new ArrayList<Expression>(state.size());
- for (int i = 0; i < state.size(); i++) {
- Type type = state.get(i);
- ParameterExpression pe =
- Expressions.parameter(type,
- builder.newName(aggName
- + "s" + i + "w" + windowIdx));
- builder.add(Expressions.declare(0, pe, null));
- decls.add(pe);
- }
- agg.state = decls;
- Type aggHolderType = agg.context.returnType();
- Type aggStorageType =
- outputPhysType.getJavaFieldType(outputRow.size());
- if (Primitive.is(aggHolderType) && !Primitive.is(aggStorageType)) {
- aggHolderType = Primitive.box(aggHolderType);
- }
- ParameterExpression aggRes = Expressions.parameter(0,
- aggHolderType,
- builder.newName(aggName + "w" + windowIdx));
-
- builder.add(
- Expressions.declare(0, aggRes,
- Expressions.constant(Primitive.is(aggRes.getType())
- ? Primitive.of(aggRes.getType()).defaultValue
- : null,
- aggRes.getType())));
- agg.result = aggRes;
- outputRow.add(aggRes);
- agg.implementor.implementReset(agg.context,
- new WinAggResetContextImpl(builder, agg.state,
- null, null, null, null, null, null));
- }
- }
-
- private void implementAdd(List<AggImpState> aggs,
- final BlockBuilder builder7,
- final Function<BlockBuilder, WinAggFrameResultContext> frame,
- final Function<AggImpState, List<RexNode>> rexArguments,
- final DeclarationStatement jDecl) {
- for (final AggImpState agg : aggs) {
- final WinAggAddContext addContext =
- new WinAggAddContextImpl(builder7, agg.state, frame) {
- public Expression currentPosition() {
- return jDecl.parameter;
- }
-
- public List<RexNode> rexArguments() {
- return rexArguments.apply(agg);
- }
-
- public RexNode rexFilterArgument() {
- return null; // REVIEW
- }
- };
- agg.implementor.implementAdd(agg.context, addContext);
- }
- }
-
- private boolean implementResult(List<AggImpState> aggs,
- final BlockBuilder builder,
- final Function<BlockBuilder, WinAggFrameResultContext> frame,
- final Function<AggImpState, List<RexNode>> rexArguments,
- boolean cachedBlock) {
- boolean nonEmpty = false;
- for (final AggImpState agg : aggs) {
- boolean needCache = true;
- if (agg.implementor instanceof WinAggImplementor) {
- WinAggImplementor imp = (WinAggImplementor) agg.implementor;
- needCache = imp.needCacheWhenFrameIntact();
- }
- if (needCache ^ cachedBlock) {
- // Regular aggregates do not change when the windowing frame keeps
- // the same. Ths
- continue;
- }
- nonEmpty = true;
- Expression res = agg.implementor.implementResult(agg.context,
- new WinAggResultContextImpl(builder, agg.state, frame) {
- public List<RexNode> rexArguments() {
- return rexArguments.apply(agg);
- }
- });
- // Several count(a) and count(b) might share the result
- Expression aggRes = builder.append("a" + agg.aggIdx + "res",
- RexToLixTranslator.convert(res, agg.result.getType()));
- builder.add(
- Expressions.statement(Expressions.assign(agg.result, aggRes)));
- }
- return nonEmpty;
- }
-
- private Expression translateBound(RexToLixTranslator translator,
- ParameterExpression i_, Expression row_, Expression min_,
- Expression max_, Expression rows_, Group group,
- boolean lower,
- PhysType physType, Expression rowComparator,
- Expression keySelector, Expression keyComparator) {
- RexWindowBound bound = lower ? group.lowerBound : group.upperBound;
- if (bound.isUnbounded()) {
- return bound.isPreceding() ? min_ : max_;
- }
- if (group.isRows) {
- if (bound.isCurrentRow()) {
- return i_;
- }
- RexNode node = bound.getOffset();
- Expression offs = translator.translate(node);
- // Floating offset does not make sense since we refer to array index.
- // Nulls do not make sense as well.
- offs = RexToLixTranslator.convert(offs, int.class);
-
- Expression b = i_;
- if (bound.isFollowing()) {
- b = Expressions.add(b, offs);
- } else {
- b = Expressions.subtract(b, offs);
- }
- return b;
- }
- Expression searchLower = min_;
- Expression searchUpper = max_;
- if (bound.isCurrentRow()) {
- if (lower) {
- searchUpper = i_;
- } else {
- searchLower = i_;
- }
- }
-
- List<RelFieldCollation> fieldCollations =
- group.collation().getFieldCollations();
- if (bound.isCurrentRow() && fieldCollations.size() != 1) {
- return Expressions.call(
- (lower
- ? BuiltInMethod.BINARY_SEARCH5_LOWER
- : BuiltInMethod.BINARY_SEARCH5_UPPER).method,
- rows_, row_, searchLower, searchUpper, keySelector, keyComparator);
- }
- assert fieldCollations.size() == 1
- : "When using range window specification, ORDER BY should have"
- + " exactly one expression."
- + " Actual collation is " + group.collation();
- // isRange
- int orderKey =
- fieldCollations.get(0).getFieldIndex();
- RelDataType keyType =
- physType.getRowType().getFieldList().get(orderKey).getType();
- Type desiredKeyType = translator.typeFactory.getJavaClass(keyType);
- if (bound.getOffset() == null) {
- desiredKeyType = Primitive.box(desiredKeyType);
- }
- Expression val = translator.translate(
- new RexInputRef(orderKey, keyType), desiredKeyType);
- if (!bound.isCurrentRow()) {
- RexNode node = bound.getOffset();
- Expression offs = translator.translate(node);
- // TODO: support date + interval somehow
- if (bound.isFollowing()) {
- val = Expressions.add(val, offs);
- } else {
- val = Expressions.subtract(val, offs);
- }
- }
- return Expressions.call(
- (lower
- ? BuiltInMethod.BINARY_SEARCH6_LOWER
- : BuiltInMethod.BINARY_SEARCH6_UPPER).method,
- rows_, val, searchLower, searchUpper, keySelector, keyComparator);
- }
-}
-
-// End EnumerableWindow.java
http://git-wip-us.apache.org/repos/asf/kylin/blob/7611338b/atopcalcite/src/main/java/org/apache/calcite/adapter/enumerable/PhysType.java
----------------------------------------------------------------------
diff --git a/atopcalcite/src/main/java/org/apache/calcite/adapter/enumerable/PhysType.java b/atopcalcite/src/main/java/org/apache/calcite/adapter/enumerable/PhysType.java
deleted file mode 100644
index e37b196..0000000
--- a/atopcalcite/src/main/java/org/apache/calcite/adapter/enumerable/PhysType.java
+++ /dev/null
@@ -1,209 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.calcite.adapter.enumerable;
-
-import org.apache.calcite.linq4j.tree.Expression;
-import org.apache.calcite.linq4j.tree.ParameterExpression;
-import org.apache.calcite.rel.RelCollation;
-import org.apache.calcite.rel.RelFieldCollation;
-import org.apache.calcite.rel.type.RelDataType;
-import org.apache.calcite.util.Pair;
-
-import java.lang.reflect.Type;
-import java.util.List;
-
-/*
- * OVERRIDE POINT: patching CALCITE-1540 on calcite 1.8.0
- */
-
-/**
- * Physical type of a row.
- *
- * <p>Consists of the SQL row type (returned by {@link #getRowType()}), the Java
- * type of the row (returned by {@link #getJavaRowType()}), and methods to
- * generate expressions to access fields, generate records, and so forth.
- * Together, the records encapsulate how the logical type maps onto the physical
- * type.</p>
- */
-public interface PhysType {
- /** Returns the Java type (often a Class) that represents a row. For
- * example, in one row format, always returns {@code Object[].class}. */
- Type getJavaRowType();
-
- /**
- * Returns the Java class that is used to store the field with the given
- * ordinal.
- *
- * <p>For instance, when the java row type is {@code Object[]}, the java
- * field type is {@code Object} even if the field is not nullable.</p> */
- Type getJavaFieldType(int field);
-
- /** Returns the physical type of a field. */
- PhysType field(int ordinal);
-
- /** Returns the physical type of a given field's component type. */
- PhysType component(int field);
-
- /** Returns the SQL row type. */
- RelDataType getRowType();
-
- /** Returns the Java class of the field with the given ordinal. */
- Class fieldClass(int field);
-
- /** Returns whether a given field allows null values. */
- boolean fieldNullable(int index);
-
- /** Generates a reference to a given field in an expression.
- *
- * <p>For example given {@code expression=employee} and {@code field=2},
- * generates</p>
- *
- * <pre>{@code employee.deptno}</pre>
- *
- * @param expression Expression
- * @param field Ordinal of field
- * @return Expression to access the field of the expression
- */
- Expression fieldReference(Expression expression, int field);
-
- /** Generates a reference to a given field in an expression.
- *
- * <p>This method optimizes for the target storage type (i.e. avoids
- * casts).</p>
- *
- * <p>For example given {@code expression=employee} and {@code field=2},
- * generates</p>
- *
- * <pre>{@code employee.deptno}</pre>
- *
- * @param expression Expression
- * @param field Ordinal of field
- * @param storageType optional hint for storage class
- * @return Expression to access the field of the expression
- */
- Expression fieldReference(Expression expression, int field,
- Type storageType);
-
- /** Generates an accessor function for a given list of fields. The resulting
- * object is a {@link List} (implementing {@link Object#hashCode()} and
- * {@link Object#equals(Object)} per that interface) and also implements
- * {@link Comparable}.
- *
- * <p>For example:</p>
- *
- * <pre>{@code
- * new Function1<Employee, Object[]> {
- * public Object[] apply(Employee v1) {
- * return FlatLists.of(v1.<fieldN>, v1.<fieldM>);
- * }
- * }
- * }</pre>
- */
- Expression generateAccessor(List<Integer> fields);
-
- /** Generates a selector for the given fields from an expression, with the
- * default row format. */
- Expression generateSelector(
- ParameterExpression parameter,
- List<Integer> fields);
-
- /** Generates a lambda expression that is a selector for the given fields from
- * an expression. */
- Expression generateSelector(
- ParameterExpression parameter,
- List<Integer> fields,
- JavaRowFormat targetFormat);
-
- /** Generates a lambda expression that is a selector for the given fields from
- * an expression.
- *
- * <p>{@code usedFields} must be a subset of {@code fields}.
- * For each field, there is a corresponding indicator field.
- * If a field is used, its value is assigned and its indicator is left
- * {@code false}.
- * If a field is not used, its value is not assigned and its indicator is
- * set to {@code true};
- * This will become a value of 1 when {@code GROUPING(field)} is called. */
- Expression generateSelector(
- ParameterExpression parameter,
- List<Integer> fields,
- List<Integer> usedFields,
- JavaRowFormat targetFormat);
-
- /** Generates a selector for the given fields from an expression. */
- /** Only used by EnumerableWindow */
- Pair<Type, List<Expression>> selector(
- ParameterExpression parameter,
- List<Integer> fields,
- JavaRowFormat targetFormat);
-
- /** Projects a given collection of fields from this input record, into
- * a particular preferred output format. The output format is optimized
- * if there are 0 or 1 fields. */
- PhysType project(
- List<Integer> integers,
- JavaRowFormat format);
-
- /** Projects a given collection of fields from this input record, optionally
- * with indicator fields, into a particular preferred output format.
- *
- * <p>The output format is optimized if there are 0 or 1 fields
- * and indicators are disabled. */
- PhysType project(
- List<Integer> integers,
- boolean indicator,
- JavaRowFormat format);
-
- /** Returns a lambda to create a collation key and a comparator. The
- * comparator is sometimes null. */
- Pair<Expression, Expression> generateCollationKey(
- List<RelFieldCollation> collations);
-
- /** Returns a comparator. Unlike the comparator returned by
- * {@link #generateCollationKey(java.util.List)}, this comparator acts on the
- * whole element. */
- Expression generateComparator(
- RelCollation collation);
-
- /** Returns a expression that yields a comparer, or null if this type
- * is comparable. */
- Expression comparer();
-
- /** Generates an expression that creates a record for a row, initializing
- * its fields with the given expressions. There must be one expression per
- * field.
- *
- * @param expressions Expression to initialize each field
- * @return Expression to create a row
- */
- Expression record(List<Expression> expressions);
-
- /** Returns the format. */
- JavaRowFormat getFormat();
-
- List<Expression> accessors(Expression parameter, List<Integer> argList);
-
- /** Returns a copy of this type that allows nulls if {@code nullable} is
- * true. */
- PhysType makeNullable(boolean nullable);
-
- /** Converts an enumerable of this physical type to an enumerable that uses a
- * given physical type for its rows. */
- Expression convertTo(Expression expression, PhysType targetPhysType);
-}
-
-// End PhysType.java
http://git-wip-us.apache.org/repos/asf/kylin/blob/7611338b/atopcalcite/src/main/java/org/apache/calcite/adapter/enumerable/PhysTypeImpl.java
----------------------------------------------------------------------
diff --git a/atopcalcite/src/main/java/org/apache/calcite/adapter/enumerable/PhysTypeImpl.java b/atopcalcite/src/main/java/org/apache/calcite/adapter/enumerable/PhysTypeImpl.java
deleted file mode 100644
index 678b469..0000000
--- a/atopcalcite/src/main/java/org/apache/calcite/adapter/enumerable/PhysTypeImpl.java
+++ /dev/null
@@ -1,654 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.calcite.adapter.enumerable;
-
-import org.apache.calcite.adapter.java.JavaTypeFactory;
-import org.apache.calcite.linq4j.Ord;
-import org.apache.calcite.linq4j.function.Function1;
-import org.apache.calcite.linq4j.tree.BlockBuilder;
-import org.apache.calcite.linq4j.tree.Expression;
-import org.apache.calcite.linq4j.tree.Expressions;
-import org.apache.calcite.linq4j.tree.MemberDeclaration;
-import org.apache.calcite.linq4j.tree.ParameterExpression;
-import org.apache.calcite.linq4j.tree.Primitive;
-import org.apache.calcite.linq4j.tree.Types;
-import org.apache.calcite.rel.RelCollation;
-import org.apache.calcite.rel.RelFieldCollation;
-import org.apache.calcite.rel.type.RelDataType;
-import org.apache.calcite.rel.type.RelDataTypeFactory;
-import org.apache.calcite.rel.type.RelDataTypeField;
-import org.apache.calcite.runtime.Utilities;
-import org.apache.calcite.sql.SqlUtil;
-import org.apache.calcite.sql.type.SqlTypeName;
-import org.apache.calcite.util.BuiltInMethod;
-import org.apache.calcite.util.Pair;
-import org.apache.calcite.util.Util;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Lists;
-
-import java.lang.reflect.Method;
-import java.lang.reflect.Modifier;
-import java.lang.reflect.Type;
-import java.util.AbstractList;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-
-import static org.apache.calcite.adapter.enumerable.EnumUtils.javaRowClass;
-import static org.apache.calcite.adapter.enumerable.EnumUtils.overridingMethodDecl;
-
-/*
- * OVERRIDE POINT: patching CALCITE-1540 on calcite 1.8.0
- */
-
-/** Implementation of {@link PhysType}. */
-public class PhysTypeImpl implements PhysType {
- private final JavaTypeFactory typeFactory;
- private final RelDataType rowType;
- private final Type javaRowClass;
- private final List<Class> fieldClasses = new ArrayList<>();
- final JavaRowFormat format;
-
- /** Creates a PhysTypeImpl. */
- PhysTypeImpl(
- JavaTypeFactory typeFactory,
- RelDataType rowType,
- Type javaRowClass,
- JavaRowFormat format) {
- this.typeFactory = typeFactory;
- this.rowType = rowType;
- this.javaRowClass = javaRowClass;
- this.format = format;
- for (RelDataTypeField field : rowType.getFieldList()) {
- fieldClasses.add(javaRowClass(typeFactory, field.getType()));
- }
- }
-
- public static PhysType of(
- JavaTypeFactory typeFactory,
- RelDataType rowType,
- JavaRowFormat format) {
- return of(typeFactory, rowType, format, true);
- }
-
- public static PhysType of(
- JavaTypeFactory typeFactory,
- RelDataType rowType,
- JavaRowFormat format,
- boolean optimize) {
- if (optimize) {
- format = format.optimize(rowType);
- }
- final Type javaRowClass = format.javaRowClass(typeFactory, rowType);
- return new PhysTypeImpl(typeFactory, rowType, javaRowClass, format);
- }
-
- static PhysType of(
- final JavaTypeFactory typeFactory,
- Type javaRowClass) {
- final RelDataTypeFactory.FieldInfoBuilder builder = typeFactory.builder();
- if (javaRowClass instanceof Types.RecordType) {
- final Types.RecordType recordType = (Types.RecordType) javaRowClass;
- for (Types.RecordField field : recordType.getRecordFields()) {
- builder.add(field.getName(), typeFactory.createType(field.getType()));
- }
- }
- RelDataType rowType = builder.build();
- // Do not optimize if there are 0 or 1 fields.
- return new PhysTypeImpl(typeFactory, rowType, javaRowClass,
- JavaRowFormat.CUSTOM);
- }
-
- public JavaRowFormat getFormat() {
- return format;
- }
-
- public PhysType project(List<Integer> integers, JavaRowFormat format) {
- return project(integers, false, format);
- }
-
- public PhysType project(List<Integer> integers, boolean indicator,
- JavaRowFormat format) {
- final RelDataTypeFactory.FieldInfoBuilder builder = typeFactory.builder();
- for (int index : integers) {
- builder.add(rowType.getFieldList().get(index));
- }
- if (indicator) {
- final RelDataType booleanType =
- typeFactory.createTypeWithNullability(
- typeFactory.createSqlType(SqlTypeName.BOOLEAN), false);
- for (int index : integers) {
- builder.add("i$" + rowType.getFieldList().get(index).getName(),
- booleanType);
- }
- }
- RelDataType projectedRowType = builder.build();
- return of(typeFactory, projectedRowType, format.optimize(projectedRowType));
- }
-
- public Expression generateSelector(
- ParameterExpression parameter,
- List<Integer> fields) {
- return generateSelector(parameter, fields, format);
- }
-
- public Expression generateSelector(
- ParameterExpression parameter,
- List<Integer> fields,
- JavaRowFormat targetFormat) {
- // Optimize target format
- switch (fields.size()) {
- case 0:
- targetFormat = JavaRowFormat.LIST;
- break;
- case 1:
- targetFormat = JavaRowFormat.SCALAR;
- break;
- }
- final PhysType targetPhysType =
- project(fields, targetFormat);
- switch (format) {
- case SCALAR:
- return Expressions.call(BuiltInMethod.IDENTITY_SELECTOR.method);
- default:
- return Expressions.lambda(Function1.class,
- targetPhysType.record(fieldReferences(parameter, fields)), parameter);
- }
- }
-
- public Expression generateSelector(final ParameterExpression parameter,
- final List<Integer> fields, List<Integer> usedFields,
- JavaRowFormat targetFormat) {
- final PhysType targetPhysType =
- project(fields, true, targetFormat);
- final List<Expression> expressions = Lists.newArrayList();
- for (Ord<Integer> ord : Ord.zip(fields)) {
- final Integer field = ord.e;
- if (usedFields.contains(field)) {
- expressions.add(fieldReference(parameter, field));
- } else {
- final Primitive primitive =
- Primitive.of(targetPhysType.fieldClass(ord.i));
- expressions.add(
- Expressions.constant(
- primitive != null ? primitive.defaultValue : null));
- }
- }
- for (Integer field : fields) {
- expressions.add(Expressions.constant(!usedFields.contains(field)));
- }
- return Expressions.lambda(Function1.class,
- targetPhysType.record(expressions), parameter);
- }
-
- public Pair<Type, List<Expression>> selector(
- ParameterExpression parameter,
- List<Integer> fields,
- JavaRowFormat targetFormat) {
- // Optimize target format
- switch (fields.size()) {
- case 0:
- targetFormat = JavaRowFormat.LIST;
- break;
- case 1:
- targetFormat = JavaRowFormat.SCALAR;
- break;
- }
- final PhysType targetPhysType =
- project(fields, targetFormat);
- switch (format) {
- case SCALAR:
- return Pair.of(parameter.getType(), Collections.<Expression>singletonList(parameter));
- default:
- return Pair.of(targetPhysType.getJavaRowType(), fieldReferences(parameter, fields));
- }
- }
-
- public List<Expression> accessors(Expression v1, List<Integer> argList) {
- final List<Expression> expressions = new ArrayList<>();
- for (int field : argList) {
- expressions.add(
- Types.castIfNecessary(
- fieldClass(field),
- fieldReference(v1, field)));
- }
- return expressions;
- }
-
- public PhysType makeNullable(boolean nullable) {
- if (!nullable) {
- return this;
- }
- return new PhysTypeImpl(typeFactory,
- typeFactory.createTypeWithNullability(rowType, true),
- Primitive.box(javaRowClass), format);
- }
-
- public Expression convertTo(Expression exp, PhysType targetPhysType) {
- final JavaRowFormat targetFormat = targetPhysType.getFormat();
- if (format == targetFormat) {
- return exp;
- }
- final ParameterExpression o_ =
- Expressions.parameter(javaRowClass, "o");
- final int fieldCount = rowType.getFieldCount();
- return Expressions.call(exp, BuiltInMethod.SELECT.method,
- generateSelector(o_, Util.range(fieldCount), targetFormat));
- }
-
- public Pair<Expression, Expression> generateCollationKey(
- final List<RelFieldCollation> collations) {
- final Expression selector;
- if (collations.size() == 1) {
- RelFieldCollation collation = collations.get(0);
- ParameterExpression parameter =
- Expressions.parameter(javaRowClass, "v");
- selector =
- Expressions.lambda(
- Function1.class,
- fieldReference(parameter, collation.getFieldIndex()),
- parameter);
- return Pair.<Expression, Expression>of(
- selector,
- Expressions.call(
- BuiltInMethod.NULLS_COMPARATOR.method,
- Expressions.constant(
- collation.nullDirection
- == RelFieldCollation.NullDirection.FIRST),
- Expressions.constant(
- collation.getDirection()
- == RelFieldCollation.Direction.DESCENDING)));
- }
- selector =
- Expressions.call(BuiltInMethod.IDENTITY_SELECTOR.method);
-
- // int c;
- // c = Utilities.compare(v0, v1);
- // if (c != 0) return c; // or -c if descending
- // ...
- // return 0;
- BlockBuilder body = new BlockBuilder();
- final ParameterExpression parameterV0 =
- Expressions.parameter(javaRowClass, "v0");
- final ParameterExpression parameterV1 =
- Expressions.parameter(javaRowClass, "v1");
- final ParameterExpression parameterC =
- Expressions.parameter(int.class, "c");
- final int mod = collations.size() == 1 ? Modifier.FINAL : 0;
- body.add(Expressions.declare(mod, parameterC, null));
- for (RelFieldCollation collation : collations) {
- final int index = collation.getFieldIndex();
- Expression arg0 = fieldReference(parameterV0, index);
- Expression arg1 = fieldReference(parameterV1, index);
- switch (Primitive.flavor(fieldClass(index))) {
- case OBJECT:
- arg0 = Types.castIfNecessary(Comparable.class, arg0);
- arg1 = Types.castIfNecessary(Comparable.class, arg1);
- }
- final boolean nullsFirst =
- collation.nullDirection
- == RelFieldCollation.NullDirection.FIRST;
- final boolean descending =
- collation.getDirection()
- == RelFieldCollation.Direction.DESCENDING;
- final Method method = (fieldNullable(index)
- ? (nullsFirst ^ descending
- ? BuiltInMethod.COMPARE_NULLS_FIRST
- : BuiltInMethod.COMPARE_NULLS_LAST)
- : BuiltInMethod.COMPARE).method;
- body.add(
- Expressions.statement(
- Expressions.assign(
- parameterC,
- Expressions.call(method.getDeclaringClass(),
- method.getName(),
- arg0,
- arg1))));
- body.add(
- Expressions.ifThen(
- Expressions.notEqual(
- parameterC, Expressions.constant(0)),
- Expressions.return_(
- null,
- descending
- ? Expressions.negate(parameterC)
- : parameterC)));
- }
- body.add(
- Expressions.return_(null, Expressions.constant(0)));
-
- final List<MemberDeclaration> memberDeclarations =
- Expressions.<MemberDeclaration>list(
- Expressions.methodDecl(
- Modifier.PUBLIC,
- int.class,
- "compare",
- ImmutableList.of(
- parameterV0, parameterV1),
- body.toBlock()));
-
- if (EnumerableRules.BRIDGE_METHODS) {
- final ParameterExpression parameterO0 =
- Expressions.parameter(Object.class, "o0");
- final ParameterExpression parameterO1 =
- Expressions.parameter(Object.class, "o1");
- BlockBuilder bridgeBody = new BlockBuilder();
- bridgeBody.add(
- Expressions.return_(
- null,
- Expressions.call(
- Expressions.parameter(
- Comparable.class, "this"),
- BuiltInMethod.COMPARATOR_COMPARE.method,
- Expressions.convert_(
- parameterO0,
- javaRowClass),
- Expressions.convert_(
- parameterO1,
- javaRowClass))));
- memberDeclarations.add(
- overridingMethodDecl(
- BuiltInMethod.COMPARATOR_COMPARE.method,
- ImmutableList.of(parameterO0, parameterO1),
- bridgeBody.toBlock()));
- }
- return Pair.<Expression, Expression>of(
- selector,
- Expressions.new_(
- Comparator.class,
- Collections.<Expression>emptyList(),
- memberDeclarations));
- }
-
- public Expression generateComparator(RelCollation collation) {
- // int c;
- // c = Utilities.compare(v0, v1);
- // if (c != 0) return c; // or -c if descending
- // ...
- // return 0;
- BlockBuilder body = new BlockBuilder();
- final Type javaRowClass = Primitive.box(this.javaRowClass);
- final ParameterExpression parameterV0 =
- Expressions.parameter(javaRowClass, "v0");
- final ParameterExpression parameterV1 =
- Expressions.parameter(javaRowClass, "v1");
- final ParameterExpression parameterC =
- Expressions.parameter(int.class, "c");
- final int mod =
- collation.getFieldCollations().size() == 1 ? Modifier.FINAL : 0;
- body.add(Expressions.declare(mod, parameterC, null));
- for (RelFieldCollation fieldCollation : collation.getFieldCollations()) {
- final int index = fieldCollation.getFieldIndex();
- Expression arg0 = fieldReference(parameterV0, index);
- Expression arg1 = fieldReference(parameterV1, index);
- switch (Primitive.flavor(fieldClass(index))) {
- case OBJECT:
- arg0 = Types.castIfNecessary(Comparable.class, arg0);
- arg1 = Types.castIfNecessary(Comparable.class, arg1);
- }
- final boolean nullsFirst =
- fieldCollation.nullDirection
- == RelFieldCollation.NullDirection.FIRST;
- final boolean descending =
- fieldCollation.getDirection()
- == RelFieldCollation.Direction.DESCENDING;
- body.add(
- Expressions.statement(
- Expressions.assign(
- parameterC,
- Expressions.call(
- Utilities.class,
- fieldNullable(index)
- ? (nullsFirst != descending
- ? "compareNullsFirst"
- : "compareNullsLast")
- : "compare",
- arg0,
- arg1))));
- body.add(
- Expressions.ifThen(
- Expressions.notEqual(
- parameterC, Expressions.constant(0)),
- Expressions.return_(
- null,
- descending
- ? Expressions.negate(parameterC)
- : parameterC)));
- }
- body.add(
- Expressions.return_(null, Expressions.constant(0)));
-
- final List<MemberDeclaration> memberDeclarations =
- Expressions.<MemberDeclaration>list(
- Expressions.methodDecl(
- Modifier.PUBLIC,
- int.class,
- "compare",
- ImmutableList.of(parameterV0, parameterV1),
- body.toBlock()));
-
- if (EnumerableRules.BRIDGE_METHODS) {
- final ParameterExpression parameterO0 =
- Expressions.parameter(Object.class, "o0");
- final ParameterExpression parameterO1 =
- Expressions.parameter(Object.class, "o1");
- BlockBuilder bridgeBody = new BlockBuilder();
- bridgeBody.add(
- Expressions.return_(
- null,
- Expressions.call(
- Expressions.parameter(
- Comparable.class, "this"),
- BuiltInMethod.COMPARATOR_COMPARE.method,
- Expressions.convert_(
- parameterO0,
- javaRowClass),
- Expressions.convert_(
- parameterO1,
- javaRowClass))));
- memberDeclarations.add(
- overridingMethodDecl(
- BuiltInMethod.COMPARATOR_COMPARE.method,
- ImmutableList.of(parameterO0, parameterO1),
- bridgeBody.toBlock()));
- }
- return Expressions.new_(
- Comparator.class,
- Collections.<Expression>emptyList(),
- memberDeclarations);
- }
-
- public RelDataType getRowType() {
- return rowType;
- }
-
- public Expression record(List<Expression> expressions) {
- return format.record(javaRowClass, expressions);
- }
-
- public Type getJavaRowType() {
- return javaRowClass;
- }
-
- public Type getJavaFieldType(int index) {
- return format.javaFieldClass(typeFactory, rowType, index);
- }
-
- public PhysType component(int fieldOrdinal) {
- final RelDataTypeField field = rowType.getFieldList().get(fieldOrdinal);
- return PhysTypeImpl.of(typeFactory,
- toStruct(field.getType().getComponentType()), format, false);
- }
-
- public PhysType field(int ordinal) {
- final RelDataTypeField field = rowType.getFieldList().get(ordinal);
- final RelDataType type = field.getType();
- return PhysTypeImpl.of(typeFactory, toStruct(type), format, false);
- }
-
- private RelDataType toStruct(RelDataType type) {
- if (type.isStruct()) {
- return type;
- }
- return typeFactory.builder()
- .add(SqlUtil.deriveAliasFromOrdinal(0), type)
- .build();
- }
-
- public Expression comparer() {
- return format.comparer();
- }
-
- private List<Expression> fieldReferences(
- final Expression parameter, final List<Integer> fields) {
- return new AbstractList<Expression>() {
- public Expression get(int index) {
- return fieldReference(parameter, fields.get(index));
- }
-
- public int size() {
- return fields.size();
- }
- };
- }
-
- public Class fieldClass(int field) {
- return fieldClasses.get(field);
- }
-
- public boolean fieldNullable(int field) {
- return rowType.getFieldList().get(field).getType().isNullable();
- }
-
- public Expression generateAccessor(
- List<Integer> fields) {
- ParameterExpression v1 =
- Expressions.parameter(javaRowClass, "v1");
- switch (fields.size()) {
- case 0:
- return Expressions.lambda(
- Function1.class,
- Expressions.field(
- null,
- BuiltInMethod.COMPARABLE_EMPTY_LIST.field),
- v1);
- case 1:
- int field0 = fields.get(0);
-
- // new Function1<Employee, Res> {
- // public Res apply(Employee v1) {
- // return v1.<fieldN>;
- // }
- // }
- Class returnType = fieldClasses.get(field0);
- Expression fieldReference =
- Types.castIfNecessary(
- returnType,
- fieldReference(v1, field0));
- return Expressions.lambda(
- Function1.class,
- fieldReference,
- v1);
- default:
- // new Function1<Employee, List> {
- // public List apply(Employee v1) {
- // return Arrays.asList(
- // new Object[] {v1.<fieldN>, v1.<fieldM>});
- // }
- // }
- Expressions.FluentList<Expression> list = Expressions.list();
- for (int field : fields) {
- list.add(fieldReference(v1, field));
- }
- switch (list.size()) {
- case 2:
- return Expressions.lambda(
- Function1.class,
- Expressions.call(
- List.class,
- null,
- BuiltInMethod.LIST2.method,
- list),
- v1);
- case 3:
- return Expressions.lambda(
- Function1.class,
- Expressions.call(
- List.class,
- null,
- BuiltInMethod.LIST3.method,
- list),
- v1);
- case 4:
- return Expressions.lambda(
- Function1.class,
- Expressions.call(
- List.class,
- null,
- BuiltInMethod.LIST4.method,
- list),
- v1);
- case 5:
- return Expressions.lambda(
- Function1.class,
- Expressions.call(
- List.class,
- null,
- BuiltInMethod.LIST5.method,
- list),
- v1);
- case 6:
- return Expressions.lambda(
- Function1.class,
- Expressions.call(
- List.class,
- null,
- BuiltInMethod.LIST6.method,
- list),
- v1);
- default:
- return Expressions.lambda(
- Function1.class,
- Expressions.call(
- List.class,
- null,
- BuiltInMethod.LIST_N.method,
- Expressions.newArrayInit(
- Comparable.class,
- list)),
- v1);
- }
- }
- }
-
- public Expression fieldReference(
- Expression expression, int field) {
- return fieldReference(expression, field, null);
- }
-
- public Expression fieldReference(
- Expression expression, int field, Type storageType) {
- if (storageType == null) {
- storageType = fieldClass(field);
- }
- return format.field(expression, field, storageType);
- }
-}
-
-// End PhysTypeImpl.java
[08/39] kylin git commit: KYLIN-2361 add Tomcat8 ordered class loader
Posted by li...@apache.org.
KYLIN-2361 add Tomcat8 ordered class loader
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/9a3bd71c
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/9a3bd71c
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/9a3bd71c
Branch: refs/heads/master-hbase0.98
Commit: 9a3bd71c8e5ce9dc13e38560efc556dc862819a1
Parents: 2b60ac6
Author: Billy Liu <bi...@apache.org>
Authored: Sat Feb 4 11:41:49 2017 +0800
Committer: Billy Liu <bi...@apache.org>
Committed: Sat Feb 4 11:42:02 2017 +0800
----------------------------------------------------------------------
.../kylin/ext/CustomizedWebappClassloader.java | 4 +-
.../kylin/ext/OrderedWebResourceRoot.java | 286 +++++++++++++++++++
.../kylin/ext/WebappOrderedClassLoader.java | 66 +++++
3 files changed, 353 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/9a3bd71c/tomcat-ext/src/main/java/org/apache/kylin/ext/CustomizedWebappClassloader.java
----------------------------------------------------------------------
diff --git a/tomcat-ext/src/main/java/org/apache/kylin/ext/CustomizedWebappClassloader.java b/tomcat-ext/src/main/java/org/apache/kylin/ext/CustomizedWebappClassloader.java
index f241865..bbf4053 100644
--- a/tomcat-ext/src/main/java/org/apache/kylin/ext/CustomizedWebappClassloader.java
+++ b/tomcat-ext/src/main/java/org/apache/kylin/ext/CustomizedWebappClassloader.java
@@ -18,14 +18,12 @@
package org.apache.kylin.ext;
-import org.apache.catalina.loader.ParallelWebappClassLoader;
-
/**
* simple extension to standard ParallelWebappClassLoader
* the only difference is that CustomizedWebappClassloader is able to delegate more packages
* to parent classloaders
*/
-public class CustomizedWebappClassloader extends ParallelWebappClassLoader {
+public class CustomizedWebappClassloader extends WebappOrderedClassLoader {
/**
* Set of package names which are not allowed to be loaded from a webapp
* class loader without delegating first.
http://git-wip-us.apache.org/repos/asf/kylin/blob/9a3bd71c/tomcat-ext/src/main/java/org/apache/kylin/ext/OrderedWebResourceRoot.java
----------------------------------------------------------------------
diff --git a/tomcat-ext/src/main/java/org/apache/kylin/ext/OrderedWebResourceRoot.java b/tomcat-ext/src/main/java/org/apache/kylin/ext/OrderedWebResourceRoot.java
new file mode 100644
index 0000000..9784bd8
--- /dev/null
+++ b/tomcat-ext/src/main/java/org/apache/kylin/ext/OrderedWebResourceRoot.java
@@ -0,0 +1,286 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.ext;
+
+import java.io.InputStream;
+import java.net.URL;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.catalina.Context;
+import org.apache.catalina.LifecycleException;
+import org.apache.catalina.LifecycleListener;
+import org.apache.catalina.LifecycleState;
+import org.apache.catalina.TrackedWebResource;
+import org.apache.catalina.WebResource;
+import org.apache.catalina.WebResourceRoot;
+import org.apache.catalina.WebResourceSet;
+
+public class OrderedWebResourceRoot implements WebResourceRoot {
+
+ private static final String WEB_INF_LIB_PATH = "/WEB-INF/lib";
+
+ private static final Comparator<WebResource> WEB_RESOURCE_COMPARATOR = new Comparator<WebResource>() {
+ @Override
+ public int compare(WebResource o1, WebResource o2) {
+ return o1.getName().compareTo(o2.getName());
+ }
+ };
+
+ private WebResourceRoot delegate;
+
+ public OrderedWebResourceRoot(WebResourceRoot delegate) {
+ this.delegate = delegate;
+ }
+
+ @Override
+ public WebResource[] listResources(String path) {
+ WebResource[] webResources = delegate.listResources(path);
+
+ if (WEB_INF_LIB_PATH.equals(path)) {
+ Arrays.sort(webResources, WEB_RESOURCE_COMPARATOR);
+ }
+
+ return webResources;
+ }
+
+ @Override
+ public void addLifecycleListener(LifecycleListener listener) {
+ delegate.addLifecycleListener(listener);
+ }
+
+ @Override
+ public LifecycleListener[] findLifecycleListeners() {
+ return delegate.findLifecycleListeners();
+ }
+
+ @Override
+ public void removeLifecycleListener(LifecycleListener listener) {
+ delegate.removeLifecycleListener(listener);
+ }
+
+ @Override
+ public void init() throws LifecycleException {
+ delegate.init();
+ }
+
+ @Override
+ public void start() throws LifecycleException {
+ delegate.start();
+ }
+
+ @Override
+ public void stop() throws LifecycleException {
+ delegate.stop();
+ }
+
+ @Override
+ public void destroy() throws LifecycleException {
+ delegate.destroy();
+ }
+
+ @Override
+ public LifecycleState getState() {
+ return delegate.getState();
+ }
+
+ @Override
+ public String getStateName() {
+ return delegate.getStateName();
+ }
+
+ @Override
+ public WebResource getResource(String path) {
+ return delegate.getResource(path);
+ }
+
+ @Override
+ public WebResource[] getResources(String path) {
+ return delegate.getResources(path);
+ }
+
+ @Override
+ public WebResource getClassLoaderResource(String path) {
+ return delegate.getClassLoaderResource(path);
+ }
+
+ @Override
+ public WebResource[] getClassLoaderResources(String path) {
+ return delegate.getClassLoaderResources(path);
+ }
+
+ @Override
+ public String[] list(String path) {
+ return delegate.list(path);
+ }
+
+ @Override
+ public Set<String> listWebAppPaths(String path) {
+ return delegate.listWebAppPaths(path);
+ }
+
+ @Override
+ public boolean mkdir(String path) {
+ return delegate.mkdir(path);
+ }
+
+ @Override
+ public boolean write(String path, InputStream is, boolean overwrite) {
+ return delegate.write(path, is, overwrite);
+ }
+
+ @Override
+ public void createWebResourceSet(ResourceSetType type, String webAppMount, URL url, String internalPath) {
+ delegate.createWebResourceSet(type, webAppMount, url, internalPath);
+ }
+
+ @Override
+ public void createWebResourceSet(ResourceSetType type, String webAppMount, String base, String archivePath,
+ String internalPath) {
+ delegate.createWebResourceSet(type, webAppMount, base, archivePath, internalPath);
+ }
+
+ @Override
+ public void addPreResources(WebResourceSet webResourceSet) {
+ delegate.addPreResources(webResourceSet);
+ }
+
+ @Override
+ public WebResourceSet[] getPreResources() {
+ return delegate.getPreResources();
+ }
+
+ @Override
+ public void addJarResources(WebResourceSet webResourceSet) {
+ delegate.addJarResources(webResourceSet);
+ }
+
+ @Override
+ public WebResourceSet[] getJarResources() {
+ return delegate.getJarResources();
+ }
+
+ @Override
+ public void addPostResources(WebResourceSet webResourceSet) {
+ delegate.addPostResources(webResourceSet);
+ }
+
+ @Override
+ public WebResourceSet[] getPostResources() {
+ return delegate.getPostResources();
+ }
+
+ @Override
+ public Context getContext() {
+ return delegate.getContext();
+ }
+
+ @Override
+ public void setContext(Context context) {
+ delegate.setContext(context);
+ }
+
+ @Override
+ public void setAllowLinking(boolean allowLinking) {
+ delegate.setAllowLinking(allowLinking);
+ }
+
+ @Override
+ public boolean getAllowLinking() {
+ return delegate.getAllowLinking();
+ }
+
+ @Override
+ public void setCachingAllowed(boolean cachingAllowed) {
+ delegate.setCachingAllowed(cachingAllowed);
+ }
+
+ @Override
+ public boolean isCachingAllowed() {
+ return delegate.isCachingAllowed();
+ }
+
+ @Override
+ public void setCacheTtl(long ttl) {
+ delegate.setCacheTtl(ttl);
+ }
+
+ @Override
+ public long getCacheTtl() {
+ return delegate.getCacheTtl();
+ }
+
+ @Override
+ public void setCacheMaxSize(long cacheMaxSize) {
+ delegate.setCacheMaxSize(cacheMaxSize);
+ }
+
+ @Override
+ public long getCacheMaxSize() {
+ return delegate.getCacheMaxSize();
+ }
+
+ @Override
+ public void setCacheObjectMaxSize(int cacheObjectMaxSize) {
+ delegate.setCacheObjectMaxSize(cacheObjectMaxSize);
+ }
+
+ @Override
+ public int getCacheObjectMaxSize() {
+ return delegate.getCacheObjectMaxSize();
+ }
+
+ @Override
+ public void setTrackLockedFiles(boolean trackLockedFiles) {
+ delegate.setTrackLockedFiles(trackLockedFiles);
+ }
+
+ @Override
+ public boolean getTrackLockedFiles() {
+ return delegate.getTrackLockedFiles();
+ }
+
+ @Override
+ public void backgroundProcess() {
+ delegate.backgroundProcess();
+ }
+
+ @Override
+ public void registerTrackedResource(TrackedWebResource trackedResource) {
+ delegate.registerTrackedResource(trackedResource);
+ }
+
+ @Override
+ public void deregisterTrackedResource(TrackedWebResource trackedResource) {
+ delegate.deregisterTrackedResource(trackedResource);
+ }
+
+ @Override
+ public List<URL> getBaseUrls() {
+ return delegate.getBaseUrls();
+ }
+
+ @Override
+ public void gc() {
+ delegate.gc();
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/kylin/blob/9a3bd71c/tomcat-ext/src/main/java/org/apache/kylin/ext/WebappOrderedClassLoader.java
----------------------------------------------------------------------
diff --git a/tomcat-ext/src/main/java/org/apache/kylin/ext/WebappOrderedClassLoader.java b/tomcat-ext/src/main/java/org/apache/kylin/ext/WebappOrderedClassLoader.java
new file mode 100644
index 0000000..6a90e55
--- /dev/null
+++ b/tomcat-ext/src/main/java/org/apache/kylin/ext/WebappOrderedClassLoader.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.ext;
+
+import org.apache.catalina.LifecycleException;
+import org.apache.catalina.WebResourceRoot;
+import org.apache.catalina.loader.ParallelWebappClassLoader;
+
+/**
+ * Modified from the openwide-java/tomcat-classloader-ordered in https://github.com/openwide-java/tomcat-classloader-ordered
+ *
+ * This classloader is designed to return the jar of WEB-INF lib in alphabetical order as it was the case with Tomcat
+ * 7.x.
+ *
+ * See the discussion in https://bz.apache.org/bugzilla/show_bug.cgi?id=57129 for more information.
+ */
+public class WebappOrderedClassLoader extends ParallelWebappClassLoader {
+
+ public WebappOrderedClassLoader() {
+ }
+
+ public WebappOrderedClassLoader(ClassLoader parent) {
+ super(parent);
+ }
+
+ @Override
+ public void setResources(WebResourceRoot resources) {
+ super.setResources(new OrderedWebResourceRoot(resources));
+ }
+
+ @Override
+ public WebappOrderedClassLoader copyWithoutTransformers() {
+ WebappOrderedClassLoader result = new WebappOrderedClassLoader(getParent());
+
+ super.copyStateWithoutTransformers(result);
+
+ try {
+ result.start();
+ } catch (LifecycleException e) {
+ throw new IllegalStateException(e);
+ }
+
+ return result;
+ }
+
+ @Override
+ protected Object getClassLoadingLock(String className) {
+ return this;
+ }
+}
\ No newline at end of file
[12/39] kylin git commit: KYLIN-2391 Unclosed FileInputStream in
KylinConfig#getConfigAsString()
Posted by li...@apache.org.
KYLIN-2391 Unclosed FileInputStream in KylinConfig#getConfigAsString()
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/6f0bc1c3
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/6f0bc1c3
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/6f0bc1c3
Branch: refs/heads/master-hbase0.98
Commit: 6f0bc1c3edd75f0b180afeeb68e09cf01298d4d8
Parents: 56a3e6c
Author: shaofengshi <sh...@apache.org>
Authored: Sat Feb 4 19:35:12 2017 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Sat Feb 4 19:37:59 2017 +0800
----------------------------------------------------------------------
.../org/apache/kylin/common/KylinConfig.java | 21 ++++++++++++--------
1 file changed, 13 insertions(+), 8 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/6f0bc1c3/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
index 4eac92a..0f40654 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
@@ -296,15 +296,20 @@ public class KylinConfig extends KylinConfigBase {
}
public String getConfigAsString() throws IOException {
- File propertiesFile = getKylinPropertiesFile();
- OrderedProperties orderedProperties = new OrderedProperties();
- orderedProperties.load(new FileInputStream(propertiesFile));
- orderedProperties = BCC.check(orderedProperties);
- final StringBuilder sb = new StringBuilder();
- for (Map.Entry<String, String> entry : orderedProperties.entrySet()) {
- sb.append(entry.getKey() + "=" + entry.getValue()).append('\n');
+ final File propertiesFile = getKylinPropertiesFile();
+ final InputStream is = new FileInputStream(propertiesFile);
+ try {
+ OrderedProperties orderedProperties = new OrderedProperties();
+ orderedProperties.load(is);
+ orderedProperties = BCC.check(orderedProperties);
+ final StringBuilder sb = new StringBuilder();
+ for (Map.Entry<String, String> entry : orderedProperties.entrySet()) {
+ sb.append(entry.getKey() + "=" + entry.getValue()).append('\n');
+ }
+ return sb.toString();
+ } finally {
+ IOUtils.closeQuietly(is);
}
- return sb.toString();
}
public KylinConfig base() {
[06/39] kylin git commit: minor, stablize DefaultSchedulerTest
Posted by li...@apache.org.
minor, stablize DefaultSchedulerTest
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/a058bfb8
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/a058bfb8
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/a058bfb8
Branch: refs/heads/master-hbase0.98
Commit: a058bfb8b0490fe36b7fe4da026028411ed208a5
Parents: 546f88f
Author: Li Yang <li...@apache.org>
Authored: Fri Feb 3 13:43:56 2017 +0800
Committer: Li Yang <li...@apache.org>
Committed: Fri Feb 3 13:43:56 2017 +0800
----------------------------------------------------------------------
.../job/impl/threadpool/BaseSchedulerTest.java | 36 ++++++++++++++------
1 file changed, 26 insertions(+), 10 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/a058bfb8/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java
----------------------------------------------------------------------
diff --git a/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java b/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java
index fdf5252..1ada9a1 100644
--- a/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java
+++ b/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java
@@ -31,11 +31,15 @@ import org.apache.kylin.job.execution.ExecutableState;
import org.apache.kylin.job.lock.MockJobLock;
import org.junit.After;
import org.junit.Before;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
*/
public abstract class BaseSchedulerTest extends LocalFileMetadataTestCase {
+ private static final Logger logger = LoggerFactory.getLogger(BaseSchedulerTest.class);
+
private DefaultScheduler scheduler;
protected ExecutableManager jobService;
@@ -70,19 +74,31 @@ public abstract class BaseSchedulerTest extends LocalFileMetadataTestCase {
}
protected void waitForJobFinish(String jobId) {
- while (true) {
- AbstractExecutable job = jobService.getJob(jobId);
- final ExecutableState status = job.getStatus();
- if (status == ExecutableState.SUCCEED || status == ExecutableState.ERROR || status == ExecutableState.STOPPED || status == ExecutableState.DISCARDED) {
- break;
- } else {
- try {
- Thread.sleep(5000);
- } catch (InterruptedException e) {
- e.printStackTrace();
+ int error = 0;
+ final int errorLimit = 3;
+
+ while (error < errorLimit) {
+ try {
+ Thread.sleep(2000);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+
+ try {
+ AbstractExecutable job = jobService.getJob(jobId);
+ ExecutableState status = job.getStatus();
+ if (status == ExecutableState.SUCCEED || status == ExecutableState.ERROR || status == ExecutableState.STOPPED || status == ExecutableState.DISCARDED) {
+ break;
}
+ } catch (Exception ex) {
+ logger.error("", ex);
+ error++;
}
}
+
+ if (error >= errorLimit) {
+ throw new RuntimeException("waitForJobFinish() encounters exceptions, see logs above");
+ }
}
protected void waitForJobStatus(String jobId, ExecutableState state, long interval) {
[26/39] kylin git commit: KYLIN-2433 Fix NPE in MergeCuboidMapper
Posted by li...@apache.org.
KYLIN-2433 Fix NPE in MergeCuboidMapper
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/b32cc954
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/b32cc954
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/b32cc954
Branch: refs/heads/master-hbase0.98
Commit: b32cc9545a76657570c4a3353469ded43892c772
Parents: 6d11dd1
Author: kangkaisen <ka...@163.com>
Authored: Fri Feb 3 14:00:50 2017 +0800
Committer: kangkaisen <ka...@163.com>
Committed: Tue Feb 7 17:20:30 2017 +0800
----------------------------------------------------------------------
.../apache/kylin/engine/mr/steps/MergeCuboidMapper.java | 10 ++++++++++
1 file changed, 10 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/b32cc954/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidMapper.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidMapper.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidMapper.java
index 047e2b1..acf1403 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidMapper.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidMapper.java
@@ -129,6 +129,11 @@ public class MergeCuboidMapper extends KylinMapper<Text, Text, Text, Text> {
List<TblColRef> columns = measureType.getColumnsNeedDictionary(measureDesc.getFunction());
boolean needReEncode = false;
for (TblColRef col : columns) {
+ //handle the column that all records is null
+ if (sourceCubeSegment.getDictionary(col) == null) {
+ continue;
+ }
+
if (!sourceCubeSegment.getDictionary(col).equals(mergedCubeSegment.getDictionary(col))) {
oldDicts.put(col, sourceCubeSegment.getDictionary(col));
newDicts.put(col, mergedCubeSegment.getDictionary(col));
@@ -249,6 +254,11 @@ public class MergeCuboidMapper extends KylinMapper<Text, Text, Text, Text> {
}
private Boolean checkNeedMerging(TblColRef col) throws IOException {
+ //handle the column that all records is null
+ if (sourceCubeSegment.getDictionary(col) == null) {
+ return false;
+ }
+
Boolean ret = dimensionsNeedDict.get(col);
if (ret != null)
return ret;
[29/39] kylin git commit: KYLIN-2430 Get exitCode form FsShell.run in
BulkLoadJob
Posted by li...@apache.org.
KYLIN-2430 Get exitCode form FsShell.run in BulkLoadJob
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/7b860adb
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/7b860adb
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/7b860adb
Branch: refs/heads/master-hbase0.98
Commit: 7b860adb0a23b53d6e2e40337f4317a3a2b067ba
Parents: eee9ecb
Author: kangkaisen <ka...@163.com>
Authored: Tue Feb 7 17:35:48 2017 +0800
Committer: kangkaisen <ka...@163.com>
Committed: Tue Feb 7 17:40:39 2017 +0800
----------------------------------------------------------------------
.../kylin/storage/hbase/steps/BulkLoadJob.java | 17 ++++++++++++-----
1 file changed, 12 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/7b860adb/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/BulkLoadJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/BulkLoadJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/BulkLoadJob.java
index 1c05767..30616c5 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/BulkLoadJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/BulkLoadJob.java
@@ -55,11 +55,18 @@ public class BulkLoadJob extends AbstractHadoopJob {
Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
FsShell shell = new FsShell(conf);
- try {
- shell.run(new String[] { "-chmod", "-R", "777", input });
- } catch (Exception e) {
- logger.error("Couldn't change the file permissions ", e);
- throw new IOException(e);
+
+ int exitCode = -1;
+ int retryCount = 10;
+ while (exitCode != 0 && retryCount >= 1) {
+ exitCode = shell.run(new String[] { "-chmod", "-R", "777", input });
+ retryCount--;
+ Thread.sleep(5000);
+ }
+
+ if (exitCode != 0) {
+ logger.error("Failed to change the file permissions: " + input);
+ throw new IOException("Failed to change the file permissions: " + input);
}
String[] newArgs = new String[2];
[17/39] kylin git commit: minor fix ci
Posted by li...@apache.org.
minor fix ci
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/663a6f93
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/663a6f93
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/663a6f93
Branch: refs/heads/master-hbase0.98
Commit: 663a6f939f2fed83f096daf8a3a55ed3f785e5a0
Parents: 5d83c80
Author: shaofengshi <sh...@apache.org>
Authored: Sun Feb 5 09:57:34 2017 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Sun Feb 5 09:57:34 2017 +0800
----------------------------------------------------------------------
.../org/apache/kylin/provision/BuildCubeWithStream.java | 12 +++++++++---
.../test/java/org/apache/kylin/query/KylinTestBase.java | 2 +-
2 files changed, 10 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/663a6f93/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
index 37ff8ae..53c89cf 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
@@ -87,6 +87,8 @@ public class BuildCubeWithStream {
protected static boolean fastBuildMode = false;
private boolean generateData = true;
+ private static final int BUILD_ROUND = 5;
+
public void before() throws Exception {
deployEnv();
@@ -181,8 +183,12 @@ public class BuildCubeWithStream {
ExecutorService executorService = Executors.newCachedThreadPool();
List<FutureTask<ExecutableState>> futures = Lists.newArrayList();
- for (int i = 0; i < 5; i++) {
- Thread.sleep(2 * 60 * 1000); // wait for new messages
+ for (int i = 0; i < BUILD_ROUND; i++) {
+ if (i == (BUILD_ROUND - 1)) {
+ // stop generating message to kafka
+ generateData = false;
+ }
+ Thread.sleep(1 * 60 * 1000); // wait for new messages
FutureTask futureTask = new FutureTask(new Callable<ExecutableState>() {
@Override
public ExecutableState call() {
@@ -202,7 +208,7 @@ public class BuildCubeWithStream {
futures.add(futureTask);
}
- generateData = false; // stop generating message to kafka
+ generateData = false;
executorService.shutdown();
int succeedBuild = 0;
for (int i = 0; i < futures.size(); i++) {
http://git-wip-us.apache.org/repos/asf/kylin/blob/663a6f93/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
index 1cc5c76..6b9397d 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
@@ -646,7 +646,7 @@ public class KylinTestBase {
cubeConnection = DriverManager.getConnection("jdbc:calcite:model=" + olapTmp.getAbsolutePath(), props);
//setup h2
- h2Connection = DriverManager.getConnection("jdbc:h2:mem:db" + (h2InstanceCount++) + ";CACHE_SIZE=32072;DB_CLOSE_DELAY=-1", "sa", "");
+ h2Connection = DriverManager.getConnection("jdbc:h2:mem:db" + (h2InstanceCount++) + ";CACHE_SIZE=32072", "sa", "");
// Load H2 Tables (inner join)
H2Database h2DB = new H2Database(h2Connection, config);
h2DB.loadAllTables();
[35/39] kylin git commit: Fix bug in static init of DataType to pass
ExtendedColumnSerializerTest UT
Posted by li...@apache.org.
Fix bug in static init of DataType to pass ExtendedColumnSerializerTest UT
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/570ab42a
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/570ab42a
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/570ab42a
Branch: refs/heads/master-hbase0.98
Commit: 570ab42a9b7597ce85b17202b67cd0ea5403cba4
Parents: df3ecd3
Author: kangkaisen <ka...@163.com>
Authored: Wed Feb 8 21:25:37 2017 +0800
Committer: kangkaisen <ka...@163.com>
Committed: Wed Feb 8 21:38:00 2017 +0800
----------------------------------------------------------------------
.../main/java/org/apache/kylin/metadata/datatype/DataType.java | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/570ab42a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataType.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataType.java b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataType.java
index b726c5f..d3756b8 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataType.java
@@ -114,7 +114,8 @@ public class DataType implements Serializable {
public static final DataType ANY = DataType.getType("any");
static {
- MeasureTypeFactory.init();
+ //to ensure the MeasureTypeFactory class has initialized
+ MeasureTypeFactory.getUDAFs();
}
public static DataType getType(String type) {
[32/39] kylin git commit: KYLIN-2377 Add kylin client query timeout
Posted by li...@apache.org.
KYLIN-2377 Add kylin client query timeout
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/8263752a
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/8263752a
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/8263752a
Branch: refs/heads/master-hbase0.98
Commit: 8263752a499158342e0588fda851a4006e8b1669
Parents: 0dcce15
Author: kangkaisen <ka...@live.com>
Authored: Tue Jan 10 14:02:18 2017 +0800
Committer: kangkaisen <ka...@163.com>
Committed: Wed Feb 8 15:51:30 2017 +0800
----------------------------------------------------------------------
.../org/apache/kylin/common/KylinConfigBase.java | 4 ++++
.../java/org/apache/kylin/cube/CubeInstance.java | 1 +
.../kylin/metadata/realization/IRealization.java | 5 ++++-
.../org/apache/kylin/storage/StorageContext.java | 16 +++++++++++++++-
.../storage/gtrecord/GTCubeStorageQueryBase.java | 2 ++
.../gtrecord/SequentialCubeTupleIterator.java | 5 +++++
.../apache/kylin/storage/hybrid/HybridInstance.java | 1 +
7 files changed, 32 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/8263752a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index fe15b1e..ebd9dfc 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -877,6 +877,10 @@ abstract public class KylinConfigBase implements Serializable {
return udfMap;
}
+ public int getQueryTimeoutSeconds() {
+ return Integer.parseInt(this.getOptional("kylin.query.timeout-seconds", "0"));
+ }
+
// ============================================================================
// SERVER
// ============================================================================
http://git-wip-us.apache.org/repos/asf/kylin/blob/8263752a/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
index 1d60575..fb9a7a7 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
@@ -178,6 +178,7 @@ public class CubeInstance extends RootPersistentEntity implements IRealization,
return sizeRecordSize;
}
+ @Override
public KylinConfig getConfig() {
return config;
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/8263752a/core-metadata/src/main/java/org/apache/kylin/metadata/realization/IRealization.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/realization/IRealization.java b/core-metadata/src/main/java/org/apache/kylin/metadata/realization/IRealization.java
index a0243f4..aafc0f0 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/realization/IRealization.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/realization/IRealization.java
@@ -21,6 +21,7 @@ package org.apache.kylin.metadata.realization;
import java.util.List;
import java.util.Set;
+import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.metadata.model.ColumnDesc;
import org.apache.kylin.metadata.model.DataModelDesc;
import org.apache.kylin.metadata.model.IStorageAware;
@@ -42,7 +43,7 @@ public interface IRealization extends IStorageAware {
public DataModelDesc getModel();
public Set<TblColRef> getAllColumns();
-
+
public Set<ColumnDesc> getAllColumnDescs();
public List<TblColRef> getAllDimensions();
@@ -60,4 +61,6 @@ public interface IRealization extends IStorageAware {
public long getDateRangeEnd();
public boolean supportsLimitPushDown();
+
+ public KylinConfig getConfig();
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/8263752a/core-storage/src/main/java/org/apache/kylin/storage/StorageContext.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/StorageContext.java b/core-storage/src/main/java/org/apache/kylin/storage/StorageContext.java
index ec46f83..ab0ea73 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/StorageContext.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/StorageContext.java
@@ -41,6 +41,7 @@ public class StorageContext {
private int finalPushDownLimit = Integer.MAX_VALUE;
private boolean hasSort = false;
private boolean acceptPartialResult = false;
+ private long deadline;
private boolean exactAggregation = false;
private boolean needStorageAggregation = false;
@@ -123,6 +124,19 @@ public class StorageContext {
}
}
+ public long getDeadline() {
+ return this.deadline;
+ }
+
+ public void setDeadline(IRealization realization) {
+ int timeout = realization.getConfig().getQueryTimeoutSeconds() * 1000;
+ if (timeout == 0) {
+ this.deadline = Long.MAX_VALUE;
+ } else {
+ this.deadline = timeout + System.currentTimeMillis();
+ }
+ }
+
public void markSort() {
this.hasSort = true;
}
@@ -202,5 +216,5 @@ public class StorageContext {
public void setStorageQuery(IStorageQuery storageQuery) {
this.storageQuery = storageQuery;
}
-
+
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/8263752a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java
index 4fcfad1..4dbdf94 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java
@@ -120,6 +120,8 @@ public abstract class GTCubeStorageQueryBase implements IStorageQuery {
// set limit push down
enableStorageLimitIfPossible(cuboid, groups, derivedPostAggregation, groupsD, filter, loosenedColumnD, sqlDigest.aggregations, context);
+ // set query deadline
+ context.setDeadline(cubeInstance);
// set cautious threshold to prevent out of memory
setThresholdIfNecessary(dimensionsD, metrics, context);
http://git-wip-us.apache.org/repos/asf/kylin/blob/8263752a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SequentialCubeTupleIterator.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SequentialCubeTupleIterator.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SequentialCubeTupleIterator.java
index c621215..c6b2c6c 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SequentialCubeTupleIterator.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SequentialCubeTupleIterator.java
@@ -28,6 +28,7 @@ import javax.annotation.Nullable;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.cube.cuboid.Cuboid;
+import org.apache.kylin.gridtable.GTScanTimeoutException;
import org.apache.kylin.metadata.model.FunctionDesc;
import org.apache.kylin.metadata.model.TblColRef;
import org.apache.kylin.metadata.tuple.ITuple;
@@ -141,6 +142,10 @@ public class SequentialCubeTupleIterator implements ITupleIterator {
@Override
public ITuple next() {
+ if (scanCount % 100 == 1 && System.currentTimeMillis() > context.getDeadline()) {
+ throw new GTScanTimeoutException("Query Timeout!");
+ }
+
// prevent the big query to make the Query Server OOM
if (scanCount++ > SCAN_THRESHOLD) {
throw new ScanOutOfLimitException("Scan count exceed the scan threshold: " + SCAN_THRESHOLD);
http://git-wip-us.apache.org/repos/asf/kylin/blob/8263752a/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridInstance.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridInstance.java b/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridInstance.java
index 4f0e446..1b113ee 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridInstance.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridInstance.java
@@ -251,6 +251,7 @@ public class HybridInstance extends RootPersistentEntity implements IRealization
return getType() + "[name=" + name + "]";
}
+ @Override
public KylinConfig getConfig() {
return config;
}
[02/39] kylin git commit: KYLIN-2414 Distinguish UHC columns from
normal columns in KYLIN-2217
Posted by li...@apache.org.
KYLIN-2414 Distinguish UHC columns from normal columns in KYLIN-2217
Signed-off-by: shaofengshi <sh...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/e6a9382b
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/e6a9382b
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/e6a9382b
Branch: refs/heads/master-hbase0.98
Commit: e6a9382b8d357e5bb13be2f678ebabb8bad75dc7
Parents: 5eae37e
Author: xiefan46 <95...@qq.com>
Authored: Tue Jan 24 11:14:40 2017 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Thu Jan 26 09:56:54 2017 +0800
----------------------------------------------------------------------
.../src/main/java/org/apache/kylin/common/KylinConfigBase.java | 3 ---
.../kylin/engine/mr/steps/FactDistinctColumnsReducer.java | 6 ++++++
2 files changed, 6 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/e6a9382b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index b1acbbf..b25bcc0 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -741,9 +741,6 @@ abstract public class KylinConfigBase implements Serializable {
}
public boolean isReducerLocalBuildDict() {
- if (getUHCReducerCount() != 1) {
- return false;
- }
return Boolean.parseBoolean(getOptional("kylin.engine.mr.reducer-local-build-dict", "true"));
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/e6a9382b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java
index 8c56bdf..cf94b30 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java
@@ -132,6 +132,12 @@ public class FactDistinctColumnsReducer extends KylinReducer<SelfDefineSortableK
if (cubeDesc.getDictionaryBuilderClass(col) != null) { // only works with default dictionary builder
isReducerLocalBuildDict = false;
}
+ if(config.getUHCReducerCount() > 1) {
+ int[] uhcIndex = CubeManager.getInstance(config).getUHCIndex(cubeDesc);
+ int colIndex = reducerIdToColumnIndex.get(taskId);
+ if (uhcIndex[colIndex] == 1)
+ isReducerLocalBuildDict = false; //for UHC columns, this feature should be disabled
+ }
if (isReducerLocalBuildDict) {
builder = DictionaryGenerator.newDictionaryBuilder(col.getType());
builder.init(null, 0);
[03/39] kylin git commit: KYLIN-2414 minor rename on properties
Posted by li...@apache.org.
KYLIN-2414 minor rename on properties
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/c3fff6d1
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/c3fff6d1
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/c3fff6d1
Branch: refs/heads/master-hbase0.98
Commit: c3fff6d19d355e78461fa7f32d02feabc5bf63c8
Parents: e6a9382
Author: shaofengshi <sh...@apache.org>
Authored: Thu Jan 26 10:53:19 2017 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Thu Jan 26 10:53:19 2017 +0800
----------------------------------------------------------------------
build/conf/kylin.properties | 8 +++++++-
.../org/apache/kylin/common/KylinConfigBase.java | 4 ++--
.../mr/steps/FactDistinctColumnsReducer.java | 19 +++++++++----------
3 files changed, 18 insertions(+), 13 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/c3fff6d1/build/conf/kylin.properties
----------------------------------------------------------------------
diff --git a/build/conf/kylin.properties b/build/conf/kylin.properties
index 43ea17d..1232c47 100644
--- a/build/conf/kylin.properties
+++ b/build/conf/kylin.properties
@@ -127,9 +127,15 @@ kylin.engine.mr.max-reducer-number=500
kylin.engine.mr.mapper-input-rows=1000000
+# Enable dictionary building in MR reducer
+kylin.engine.mr.build-dict-in-reducer=true
+
+# Number of reducers for fetching UHC column distinct values
+kylin.engine.mr.uhc-reducer-count=1
+
### CUBE | DICTIONARY ###
-# 'auto', 'inmem', 'layer' or 'random' for testing
+# 'auto', 'inmem' or 'layer'
kylin.cube.algorithm=auto
# A smaller threshold prefers layer, a larger threshold prefers in-mem
http://git-wip-us.apache.org/repos/asf/kylin/blob/c3fff6d1/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index b25bcc0..6a88fc4 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -740,8 +740,8 @@ abstract public class KylinConfigBase implements Serializable {
return Integer.parseInt(getOptional("kylin.engine.mr.uhc-reducer-count", "1"));
}
- public boolean isReducerLocalBuildDict() {
- return Boolean.parseBoolean(getOptional("kylin.engine.mr.reducer-local-build-dict", "true"));
+ public boolean isBuildDictInReducerEnabled() {
+ return Boolean.parseBoolean(getOptional("kylin.engine.mr.build-dict-in-reducer", "true"));
}
public String getYarnStatusCheckUrl() {
http://git-wip-us.apache.org/repos/asf/kylin/blob/c3fff6d1/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java
index cf94b30..5b795c2 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java
@@ -78,7 +78,7 @@ public class FactDistinctColumnsReducer extends KylinReducer<SelfDefineSortableK
private int rowCount = 0;
//local build dict
- private boolean isReducerLocalBuildDict;
+ private boolean buildDictInReducer;
private IDictionaryBuilder builder;
private long timeMaxValue = Long.MIN_VALUE;
private long timeMinValue = Long.MAX_VALUE;
@@ -119,30 +119,29 @@ public class FactDistinctColumnsReducer extends KylinReducer<SelfDefineSortableK
isPartitionCol = true;
col = cubeDesc.getModel().getPartitionDesc().getPartitionDateColumnRef();
if (col == null) {
- logger.info("Do not have partition col. This reducer will keep empty");
+ logger.info("No partition col. This reducer will do nothing");
}
} else {
// normal col
col = columnList.get(reducerIdToColumnIndex.get(taskId));
-
Preconditions.checkNotNull(col);
// local build dict
- isReducerLocalBuildDict = config.isReducerLocalBuildDict();
+ buildDictInReducer = config.isBuildDictInReducerEnabled();
if (cubeDesc.getDictionaryBuilderClass(col) != null) { // only works with default dictionary builder
- isReducerLocalBuildDict = false;
+ buildDictInReducer = false;
}
if(config.getUHCReducerCount() > 1) {
int[] uhcIndex = CubeManager.getInstance(config).getUHCIndex(cubeDesc);
int colIndex = reducerIdToColumnIndex.get(taskId);
if (uhcIndex[colIndex] == 1)
- isReducerLocalBuildDict = false; //for UHC columns, this feature should be disabled
+ buildDictInReducer = false; //for UHC columns, this feature should be disabled
}
- if (isReducerLocalBuildDict) {
+ if (buildDictInReducer) {
builder = DictionaryGenerator.newDictionaryBuilder(col.getType());
builder.init(null, 0);
}
- logger.info("Reducer " + taskId + " handling column " + col + ", isReducerLocalBuildDict=" + isReducerLocalBuildDict);
+ logger.info("Reducer " + taskId + " handling column " + col + ", buildDictInReducer=" + buildDictInReducer);
}
}
@@ -192,7 +191,7 @@ public class FactDistinctColumnsReducer extends KylinReducer<SelfDefineSortableK
timeMaxValue = Math.max(timeMaxValue, time);
} else {
// normal col
- if (isReducerLocalBuildDict) {
+ if (buildDictInReducer) {
String value = Bytes.toString(key.getBytes(), 1, key.getLength() - 1);
logAFewRows(value);
builder.addValue(value);
@@ -228,7 +227,7 @@ public class FactDistinctColumnsReducer extends KylinReducer<SelfDefineSortableK
outputPartitionInfo();
} else {
// normal col
- if (isReducerLocalBuildDict) {
+ if (buildDictInReducer) {
Dictionary<String> dict = builder.build();
outputDict(col, dict);
}
[18/39] kylin git commit: Update sample cube to pass latest validation
Posted by li...@apache.org.
Update sample cube to pass latest validation
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/9cd6c707
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/9cd6c707
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/9cd6c707
Branch: refs/heads/master-hbase0.98
Commit: 9cd6c7075839e41a0f2ff71334343613ce503cd3
Parents: 663a6f9
Author: shaofengshi <sh...@apache.org>
Authored: Sun Feb 5 11:31:13 2017 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Sun Feb 5 11:31:13 2017 +0800
----------------------------------------------------------------------
examples/sample_cube/template/model_desc/kylin_sales_model.json | 4 +---
1 file changed, 1 insertion(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/9cd6c707/examples/sample_cube/template/model_desc/kylin_sales_model.json
----------------------------------------------------------------------
diff --git a/examples/sample_cube/template/model_desc/kylin_sales_model.json b/examples/sample_cube/template/model_desc/kylin_sales_model.json
index cce360f..cfe6845 100644
--- a/examples/sample_cube/template/model_desc/kylin_sales_model.json
+++ b/examples/sample_cube/template/model_desc/kylin_sales_model.json
@@ -76,9 +76,7 @@
} ],
"metrics": [
"PRICE",
- "ITEM_COUNT",
- "SELLER_ID",
- "LSTG_FORMAT_NAME"
+ "ITEM_COUNT"
],
"last_modified" : 1422435345362,
"fact_table" : "DEFAULT.KYLIN_SALES",
[25/39] kylin git commit: KYLIN-2432 Couldn't select partition column
in some old browser
Posted by li...@apache.org.
KYLIN-2432 Couldn't select partition column in some old browser
Signed-off-by: zhongjian <ji...@163.com>
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/6d11dd1d
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/6d11dd1d
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/6d11dd1d
Branch: refs/heads/master-hbase0.98
Commit: 6d11dd1d27a74fe50cd4fa58d2acf26f728f82e7
Parents: 8581f1d
Author: luguosheng <55...@qq.com>
Authored: Tue Feb 7 16:54:05 2017 +0800
Committer: zhongjian <ji...@163.com>
Committed: Tue Feb 7 17:08:45 2017 +0800
----------------------------------------------------------------------
webapp/app/js/config.js | 12 +++++++++++-
1 file changed, 11 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/6d11dd1d/webapp/app/js/config.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/config.js b/webapp/app/js/config.js
index 5f72eb2..b9ae6a5 100644
--- a/webapp/app/js/config.js
+++ b/webapp/app/js/config.js
@@ -35,7 +35,17 @@ var Config = {
},
contact_mail: ''
};
-
+//resolve startsWith and endsWidth not work in low version chrome
+if (typeof String.prototype.startsWith != 'function') {
+ String.prototype.startsWith = function (prefix){
+ return this.slice(0, prefix.length) === prefix;
+ };
+}
+if (typeof String.prototype.endsWith != 'function') {
+ String.prototype.endsWith = function(suffix) {
+ return this.indexOf(suffix, this.length - suffix.length) !== -1;
+ };
+}
// Angular module to load routes.
KylinApp.config(function ($routeProvider, $httpProvider, $locationProvider, $logProvider) {
//resolve http always use cache data in IE11,IE10
[10/39] kylin git commit: KYLIN-2420 Record zookeeper lock holder
information
Posted by li...@apache.org.
KYLIN-2420 Record zookeeper lock holder information
Signed-off-by: Billy Liu <bi...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/56c7aa50
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/56c7aa50
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/56c7aa50
Branch: refs/heads/master-hbase0.98
Commit: 56c7aa505706f9aa878b4d3804cdf3f357a7931f
Parents: 7611338
Author: Roger Shi <ro...@hotmail.com>
Authored: Sat Feb 4 13:43:03 2017 +0800
Committer: Billy Liu <bi...@apache.org>
Committed: Sat Feb 4 13:46:59 2017 +0800
----------------------------------------------------------------------
.../storage/hbase/util/ZookeeperJobLock.java | 26 +++++++++++++++++++-
1 file changed, 25 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/56c7aa50/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
index bdd3981..7bf7498 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
@@ -18,6 +18,8 @@
package org.apache.kylin.storage.hbase.util;
+import java.lang.management.ManagementFactory;
+import java.net.UnknownHostException;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
@@ -35,6 +37,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.job.lock.JobLock;
import org.apache.kylin.storage.hbase.HBaseConnection;
+import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -63,9 +66,14 @@ public class ZookeeperJobLock implements JobLock {
}
RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3);
- this.zkClient = CuratorFrameworkFactory.newClient(zkConnectString, retryPolicy);
+ try {
+ this.zkClient = CuratorFrameworkFactory.builder().connectString(zkConnectString).retryPolicy(retryPolicy).defaultData(getIpProcess()).build();
+ } catch (UnknownHostException e) {
+ throw new RuntimeException(e);
+ }
this.zkClient.start();
this.sharedLock = new InterProcessMutex(zkClient, this.scheduleID);
+
boolean hasLock = false;
try {
hasLock = sharedLock.acquire(3, TimeUnit.SECONDS);
@@ -74,9 +82,20 @@ public class ZookeeperJobLock implements JobLock {
}
if (!hasLock) {
logger.warn("fail to acquire lock, scheduler has not been started; maybe another kylin process is still running?");
+ try {
+ for (String node : sharedLock.getParticipantNodes()) {
+ logger.warn("lock holder info: {}", new String(zkClient.getData().forPath(node)));
+ }
+ } catch (Exception e) {
+ logger.warn("error check participant", e);
+ if (!(e instanceof KeeperException.NoNodeException)) {
+ throw new RuntimeException(e);
+ }
+ }
zkClient.close();
return false;
}
+
return true;
}
@@ -115,4 +134,9 @@ public class ZookeeperJobLock implements JobLock {
private String schedulerId() {
return ZOOKEEPER_LOCK_PATH + "/" + KylinConfig.getInstanceFromEnv().getMetadataUrlPrefix();
}
+
+ private byte[] getIpProcess() throws UnknownHostException {
+ logger.info("get IP and processId: {}", ManagementFactory.getRuntimeMXBean().getName().getBytes());
+ return ManagementFactory.getRuntimeMXBean().getName().getBytes();
+ }
}
[11/39] kylin git commit: KYLIN-2423 Model should always include
PK/FK as dimensions
Posted by li...@apache.org.
KYLIN-2423 Model should always include PK/FK as dimensions
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/855301dc
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/855301dc
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/855301dc
Branch: refs/heads/master-hbase0.98
Commit: 855301dc7fc83ece4f03dd981f3184bb46a97105
Parents: 56c7aa5
Author: Li Yang <li...@apache.org>
Authored: Sat Feb 4 15:19:31 2017 +0800
Committer: Li Yang <li...@apache.org>
Committed: Sat Feb 4 16:12:42 2017 +0800
----------------------------------------------------------------------
.../kylin/metadata/model/DataModelDesc.java | 76 ++++++++++++++++++++
.../kylin/metadata/model/JoinTableDesc.java | 7 +-
.../apache/kylin/metadata/model/JoinsTree.java | 11 +--
.../metadata/model/ModelDimensionDesc.java | 2 +
.../model_desc/ut_large_dimension_number.json | 2 +-
5 files changed, 90 insertions(+), 8 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/855301dc/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
index cc599a4..86eea55 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
@@ -20,15 +20,18 @@ package org.apache.kylin.metadata.model;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
+import org.apache.commons.lang3.ArrayUtils;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.persistence.ResourceStore;
import org.apache.kylin.common.persistence.RootPersistentEntity;
import org.apache.kylin.common.util.StringUtil;
import org.apache.kylin.metadata.MetadataConstants;
+import org.apache.kylin.metadata.model.JoinsTree.Chain;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -294,6 +297,11 @@ public class DataModelDesc extends RootPersistentEntity {
initJoinsTree();
initDimensionsAndMetrics();
initPartitionDesc();
+
+ boolean reinit = validate();
+ if (reinit) { // model slightly changed by validate() and must init() again
+ init(config, tables);
+ }
}
private void initJoinTablesForUpgrade() {
@@ -443,6 +451,74 @@ public class DataModelDesc extends RootPersistentEntity {
joinsTree = new JoinsTree(rootFactTableRef, joins);
}
+ private boolean validate() {
+ Set<TblColRef> mcols = new HashSet<>();
+ for (String m : metrics) {
+ mcols.add(findColumn(m));
+ }
+
+ // validate no dup between dimensions/metrics
+ for (ModelDimensionDesc dim : dimensions) {
+ String table = dim.getTable();
+ for (String c : dim.getColumns()) {
+ TblColRef dcol = findColumn(table, c);
+ if (mcols.contains(dcol))
+ throw new IllegalStateException(dcol + " cannot be both dimension and metrics at the same time in " + this);
+ }
+ }
+
+ // validate PK/FK are in dimensions
+ boolean pkfkDimAmended = false;
+ for (Chain chain : joinsTree.tableChains.values()) {
+ pkfkDimAmended = validatePkFkDim(chain.join, mcols) || pkfkDimAmended;
+ }
+ return pkfkDimAmended;
+ }
+
+ private boolean validatePkFkDim(JoinDesc join, Set<TblColRef> mcols) {
+ if (join == null)
+ return false;
+
+ boolean pkfkDimAmended = false;
+
+ for (TblColRef c : join.getForeignKeyColumns()) {
+ if (!mcols.contains(c)) {
+ pkfkDimAmended = validatePkFkDim(c) || pkfkDimAmended;
+ }
+ }
+ for (TblColRef c : join.getPrimaryKeyColumns()) {
+ if (!mcols.contains(c)) {
+ pkfkDimAmended = validatePkFkDim(c) || pkfkDimAmended;
+ }
+ }
+ return pkfkDimAmended;
+ }
+
+ private boolean validatePkFkDim(TblColRef c) {
+ String t = c.getTableAlias();
+ ModelDimensionDesc dimDesc = null;
+ for (ModelDimensionDesc dim : dimensions) {
+ if (dim.getTable().equals(t)) {
+ dimDesc = dim;
+ break;
+ }
+ }
+
+ if (dimDesc == null) {
+ dimDesc = new ModelDimensionDesc();
+ dimDesc.setTable(t);
+ dimDesc.setColumns(new String[0]);
+ }
+
+ if (ArrayUtils.contains(dimDesc.getColumns(), c.getName()) == false) {
+ String[] newCols = ArrayUtils.add(dimDesc.getColumns(), c.getName());
+ dimDesc.setColumns(newCols);
+ return true;
+ }
+
+ return false;
+ }
+
/**
* Add error info and thrown exception out
*/
http://git-wip-us.apache.org/repos/asf/kylin/blob/855301dc/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinTableDesc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinTableDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinTableDesc.java
index 51e5787..9ca806e 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinTableDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinTableDesc.java
@@ -18,17 +18,18 @@
package org.apache.kylin.metadata.model;
+import java.io.Serializable;
+
import org.apache.kylin.metadata.model.DataModelDesc.TableKind;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
-import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
+import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
-import java.io.Serializable;
-
@JsonAutoDetect(fieldVisibility = Visibility.NONE, getterVisibility = Visibility.NONE, isGetterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE)
public class JoinTableDesc implements Serializable {
+ private static final long serialVersionUID = 1L;
@JsonProperty("table")
private String table;
http://git-wip-us.apache.org/repos/asf/kylin/blob/855301dc/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinsTree.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinsTree.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinsTree.java
index c132d0e..3ab9a46 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinsTree.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinsTree.java
@@ -18,8 +18,6 @@
package org.apache.kylin.metadata.model;
-import com.google.common.base.Preconditions;
-
import java.io.Serializable;
import java.util.Collections;
import java.util.HashMap;
@@ -27,8 +25,11 @@ import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
-public class JoinsTree implements Serializable {
+import com.google.common.base.Preconditions;
+public class JoinsTree implements Serializable {
+ private static final long serialVersionUID = 1L;
+
final Map<String, Chain> tableChains = new LinkedHashMap<>();
public JoinsTree(TableRef rootTable, List<JoinDesc> joins) {
@@ -112,7 +113,9 @@ public class JoinsTree implements Serializable {
return chain.join;
}
- static class Chain implements java.io.Serializable {
+ static class Chain implements Serializable {
+ private static final long serialVersionUID = 1L;
+
TableRef table; // pk side
JoinDesc join;
Chain fkSide;
http://git-wip-us.apache.org/repos/asf/kylin/blob/855301dc/core-metadata/src/main/java/org/apache/kylin/metadata/model/ModelDimensionDesc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ModelDimensionDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ModelDimensionDesc.java
index d14a56b..c0ddbad 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ModelDimensionDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ModelDimensionDesc.java
@@ -30,6 +30,8 @@ import com.fasterxml.jackson.annotation.JsonProperty;
*/
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE)
public class ModelDimensionDesc implements Serializable {
+ private static final long serialVersionUID = 1L;
+
@JsonProperty("table")
private String table;
@JsonProperty("columns")
http://git-wip-us.apache.org/repos/asf/kylin/blob/855301dc/examples/test_case_data/localmeta/model_desc/ut_large_dimension_number.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/model_desc/ut_large_dimension_number.json b/examples/test_case_data/localmeta/model_desc/ut_large_dimension_number.json
index 38b070e..9eeb774 100644
--- a/examples/test_case_data/localmeta/model_desc/ut_large_dimension_number.json
+++ b/examples/test_case_data/localmeta/model_desc/ut_large_dimension_number.json
@@ -11,7 +11,7 @@
"table" : "DEFAULT.WIDE_TABLE",
"columns" : ["A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z", "AA", "AB", "AC", "AD", "AE", "AF", "AG", "AH", "AI", "AJ", "AK", "AL", "AM", "AN", "AO", "AP", "AQ", "AR", "AS", "AT", "AU", "AV", "AW", "AX", "AY", "AZ"]
}],
- "metrics" : [ "A" ],
+ "metrics" : [ ],
"filter_condition" : "",
"partition_desc" : {
"partition_date_column" : "DEFAULT.WIDE_TABLE.A",
[23/39] kylin git commit: minor,update reload and unload table style
Posted by li...@apache.org.
minor,update reload and unload table style
Signed-off-by: zhongjian <ji...@163.com>
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/7a6dd1ca
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/7a6dd1ca
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/7a6dd1ca
Branch: refs/heads/master-hbase0.98
Commit: 7a6dd1ca3a35bb2a0223b7df0762f922c341d0af
Parents: 1f44468
Author: chenzhx <34...@qq.com>
Authored: Mon Feb 6 18:19:49 2017 +0800
Committer: zhongjian <ji...@163.com>
Committed: Mon Feb 6 18:54:47 2017 +0800
----------------------------------------------------------------------
webapp/app/css/AdminLTE.css | 26 ------
webapp/app/js/controllers/sourceMeta.js | 96 +++++++++++---------
.../js/directives/kylin_abn_tree_directive.js | 6 +-
.../app/partials/tables/source_table_tree.html | 4 -
webapp/app/partials/tables/table_detail.html | 3 +-
5 files changed, 57 insertions(+), 78 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/7a6dd1ca/webapp/app/css/AdminLTE.css
----------------------------------------------------------------------
diff --git a/webapp/app/css/AdminLTE.css b/webapp/app/css/AdminLTE.css
index c7740d1..34e94e5 100644
--- a/webapp/app/css/AdminLTE.css
+++ b/webapp/app/css/AdminLTE.css
@@ -4801,29 +4801,3 @@ Gradient Background colors
}
}
-.abn-tree .abn-tree-row .tree-table-btn{
- width: 4%;
- float: right;
- border-radius: 5px;
- margin-right: 5px;
- padding: 3px 16px 2px 5px;
- color: #ffffff;
- position:static;
-}
-.abn-tree .abn-tree-row .tree-table-btn .tooltip{
- overflow:visible;
-}
-.abn-tree .abn-tree-row .btn-info:hover{
- background-color:#269abc;
-}
-.abn-tree > .abn-tree-row.active > .btn-info{
- background-color:#269abc;
- border-left-color:#269abc;
-}
-.abn-tree .abn-tree-row .btn-success:hover{
- background-color:#008d4c;
-}
-.abn-tree > .abn-tree-row.active > .btn-success{
- background-color:#008d4c;
- border-left-color:#008d4c;
-}
http://git-wip-us.apache.org/repos/asf/kylin/blob/7a6dd1ca/webapp/app/js/controllers/sourceMeta.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/controllers/sourceMeta.js b/webapp/app/js/controllers/sourceMeta.js
index 5421673..e2fdd66 100755
--- a/webapp/app/js/controllers/sourceMeta.js
+++ b/webapp/app/js/controllers/sourceMeta.js
@@ -132,9 +132,10 @@ KylinApp
});
};
- $scope.reloadTable = function (tableName,projectName){
+ $scope.reloadTable = function (tableName){
+ var delay = $q.defer();
loadingRequest.show();
- TableService.loadHiveTable({tableName: tableName, action: projectName}, {calculate: $scope.isCalculate}, function (result) {
+ TableService.loadHiveTable({tableName: tableName, action: $scope.projectModel.selectedProject}, {calculate: $scope.isCalculate}, function (result) {
var loadTableInfo = "";
angular.forEach(result['result.loaded'], function (table) {
loadTableInfo += "\n" + table;
@@ -153,7 +154,7 @@ KylinApp
SweetAlert.swal('Partial loaded!', 'The following table(s) have been successfully loaded: ' + loadTableInfo + "\n\n Failed to load following table(s):" + unloadedTableInfo, 'warning');
}
loadingRequest.hide();
- $scope.aceSrcTbLoaded(true);
+ delay.resolve("");
}, function (e) {
if (e.data && e.data.exception) {
var message = e.data.exception;
@@ -164,49 +165,58 @@ KylinApp
}
loadingRequest.hide();
})
+ return delay.promise;
}
- $scope.removeList = function (tableName,projectName) {
- if (tableName.trim() === "") {
- SweetAlert.swal('', 'Please input table(s) you want to unload.', 'info');
- return;
- }
- if (!projectName) {
- SweetAlert.swal('', 'Please choose your project first!.', 'info');
- return;
- }
- loadingRequest.show();
- TableService.unLoadHiveTable({tableName: tableName, action: projectName}, {}, function (result) {
- var removedTableInfo = "";
- angular.forEach(result['result.unload.success'], function (table) {
- removedTableInfo += "\n" + table;
- })
- var unRemovedTableInfo = "";
- angular.forEach(result['result.unload.fail'], function (table) {
- unRemovedTableInfo += "\n" + table;
- })
- if (result['result.unload.fail'].length != 0 && result['result.unload.success'].length == 0) {
- SweetAlert.swal('Failed!', 'Failed to unload following table(s): ' + unRemovedTableInfo, 'error');
- }
- if (result['result.unload.success'].length != 0 && result['result.unload.fail'].length == 0) {
- SweetAlert.swal('Success!', 'The following table(s) have been successfully unloaded: ' + removedTableInfo, 'success');
- }
- if (result['result.unload.success'].length != 0 && result['result.unload.fail'].length != 0) {
- SweetAlert.swal('Partial unloaded!', 'The following table(s) have been successfully unloaded: ' + removedTableInfo + "\n\n Failed to unload following table(s):" + unRemovedTableInfo, 'warning');
- }
- loadingRequest.hide();
- $scope.aceSrcTbLoaded(true);
- }, function (e) {
- if (e.data && e.data.exception) {
- var message = e.data.exception;
- var msg = !!(message) ? message : 'Failed to take action.';
- SweetAlert.swal('Oops...', msg, 'error');
- } else {
- SweetAlert.swal('Oops...', "Failed to take action.", 'error');
+ $scope.unloadTable = function (tableName) {
+ SweetAlert.swal({
+ title: "",
+ text: "Are you sure to unload this table?",
+ showCancelButton: true,
+ confirmButtonColor: '#DD6B55',
+ confirmButtonText: "Yes",
+ cancelButtonText: "No",
+ closeOnConfirm: true
+ }, function (isConfirm) {
+ if (isConfirm) {
+ if (!$scope.projectModel.selectedProject) {
+ SweetAlert.swal('', 'Please choose your project first!.', 'info');
+ return;
+ }
+ loadingRequest.show();
+ TableService.unLoadHiveTable({tableName: tableName, action: $scope.projectModel.selectedProject}, {}, function (result) {
+ var removedTableInfo = "";
+ angular.forEach(result['result.unload.success'], function (table) {
+ removedTableInfo += "\n" + table;
+ })
+ var unRemovedTableInfo = "";
+ angular.forEach(result['result.unload.fail'], function (table) {
+ unRemovedTableInfo += "\n" + table;
+ })
+ if (result['result.unload.fail'].length != 0 && result['result.unload.success'].length == 0) {
+ SweetAlert.swal('Failed!', 'Failed to unload following table(s): ' + unRemovedTableInfo, 'error');
+ }
+ if (result['result.unload.success'].length != 0 && result['result.unload.fail'].length == 0) {
+ SweetAlert.swal('Success!', 'The following table(s) have been successfully unloaded: ' + removedTableInfo, 'success');
+ }
+ if (result['result.unload.success'].length != 0 && result['result.unload.fail'].length != 0) {
+ SweetAlert.swal('Partial unloaded!', 'The following table(s) have been successfully unloaded: ' + removedTableInfo + "\n\n Failed to unload following table(s):" + unRemovedTableInfo, 'warning');
+ }
+ loadingRequest.hide();
+ $scope.aceSrcTbLoaded(true);
+ }, function (e) {
+ if (e.data && e.data.exception) {
+ var message = e.data.exception;
+ var msg = !!(message) ? message : 'Failed to take action.';
+ SweetAlert.swal('Oops...', msg, 'error');
+ } else {
+ SweetAlert.swal('Oops...', "Failed to take action.", 'error');
+ }
+ loadingRequest.hide();
+ })
}
- loadingRequest.hide();
})
}
@@ -392,7 +402,9 @@ KylinApp
}
$scope.cancel();
- scope.reloadTable ($scope.tableNames,projectName);
+ scope.reloadTable($scope.tableNames).then(function(){
+ scope.aceSrcTbLoaded(true);
+ });
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/7a6dd1ca/webapp/app/js/directives/kylin_abn_tree_directive.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/directives/kylin_abn_tree_directive.js b/webapp/app/js/directives/kylin_abn_tree_directive.js
index 7545666..9466b6a 100644
--- a/webapp/app/js/directives/kylin_abn_tree_directive.js
+++ b/webapp/app/js/directives/kylin_abn_tree_directive.js
@@ -31,13 +31,9 @@
'$timeout', function($timeout) {
return {
restrict: 'E',
- template: "<ul class=\"nav nav-list nav-pills nav-stacked abn-tree\">\n <li data=\"{{row.branch.fullName}}\" ng-repeat=\"row in tree_rows | filter:{visible:true} track by row.branch.uid\" ng-animate=\"'abn-tree-animate'\" ng-class=\"'level-' + {{ row.level }} + (row.branch.selected ? ' active':'') + ' ' +row.classes.join(' ')\" class=\"abn-tree-row\" ><a ng-click=\"user_clicks_branch(row.branch)\" ng-dblclick=\"user_dbClicks_branch(row.branch)\" style=\"width:80%;float:left;\"><i ng-class=\"row.tree_icon\" class=\"indented tree-icon\" > </i><span class=\"indented tree-label\">{{ row.label }} </span></a> <a class=\"btn btn-xs btn-info tree-table-btn\" ng-if=\"row.branch.data.exd&&row.level==2&&userService.hasRole('ROLE_ADMIN')&&row.branch.data.source_type==0 \" tooltip=\"UnLoad Hive Table\" tooltip-placement=\"left\" ng-click=\"unloadTable({tableName:row.branch.label,projectName:projectName})\" ><i class=\"fa fa-remove\"></i></a> <a class=\"btn btn-xs btn-success tree-ta
ble-btn\" tooltip-placement=\"left\" tooltip=\"ReLoad Hive Table\" ng-if=\"row.level==2&&userService.hasRole('ROLE_ADMIN')&&row.branch.data.source_type==0\" ng-click=\"reloadTable({tableName:row.branch.label,projectName:projectName})\"><i class=\"fa fa-download\"></i></a> </li>\n</ul>",
+ template: "<ul class=\"nav nav-list nav-pills nav-stacked abn-tree\">\n <li ng-repeat=\"row in tree_rows | filter:{visible:true} track by row.branch.uid\" ng-animate=\"'abn-tree-animate'\" ng-class=\"'level-' + {{ row.level }} + (row.branch.selected ? ' active':'') + ' ' +row.classes.join(' ')\" class=\"abn-tree-row\"><a ng-click=\"user_clicks_branch(row.branch)\" ng-dblclick=\"user_dbClicks_branch(row.branch)\"><i ng-class=\"row.tree_icon\" class=\"indented tree-icon\"> </i><span class=\"indented tree-label\">{{ row.label }} </span></a></li>\n</ul>",
replace: true,
scope: {
- userService:'=',
- reloadTable:'&',
- unloadTable:'&',
- projectName:'@',
treeData: '=',
onSelect: '&',
onDblclick:'&',
http://git-wip-us.apache.org/repos/asf/kylin/blob/7a6dd1ca/webapp/app/partials/tables/source_table_tree.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/tables/source_table_tree.html b/webapp/app/partials/tables/source_table_tree.html
index eb1ad79..e8ec286 100755
--- a/webapp/app/partials/tables/source_table_tree.html
+++ b/webapp/app/partials/tables/source_table_tree.html
@@ -36,10 +36,6 @@
<!--tree-->
<div style="width:100%; height:{{window}}px; overflow:auto;">
<abn-tree
- user-service = "userService"
- reload-table = "reloadTable(tableName,projectName)"
- unload-table = "removeList(tableName,projectName)"
- project-name = "{{projectModel.selectedProject}}"
tree-data = "tableModel.selectedSrcDb"
tree-control = "my_tree"
icon-leaf = "fa fa-columns"
http://git-wip-us.apache.org/repos/asf/kylin/blob/7a6dd1ca/webapp/app/partials/tables/table_detail.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/tables/table_detail.html b/webapp/app/partials/tables/table_detail.html
index f227826..7f299b4 100644
--- a/webapp/app/partials/tables/table_detail.html
+++ b/webapp/app/partials/tables/table_detail.html
@@ -19,7 +19,8 @@
<div ng-controller="SourceMetaCtrl" class="nav-tabs-custom">
<div class="col-xs-12" ng-show="tableModel.selectedSrcDb&&tableModel.selectedSrcTable.name">
<h3 class="text-info">Table Schema:{{ tableModel.selectedSrcTable.name}}</h3>
-
+ <a class="btn btn-primary pull-right" ng-if="userService.hasRole('ROLE_ADMIN')" ng-click="unloadTable(tableModel.selectedSrcTable.database+'.'+tableModel.selectedSrcTable.name)" style="margin-left:10px;" ><span class="fa fa-remove"></span> Unload Table</a>
+ <a class="btn btn-success pull-right" ng-if="tableModel.selectedSrcTable.source_type==0&&userService.hasRole('ROLE_ADMIN')" ng-click="reloadTable(tableModel.selectedSrcTable.database+'.'+tableModel.selectedSrcTable.name)" style="margin-left:10px;" ><span class="fa fa-download"></span> Reload Table</a>
<div class="tabbable nav-tabs-custom">
<ul class="nav nav-tabs">
<li class="active">
[27/39] kylin git commit: KYLIN-2434 use
kylin.source.hive.database-for-flat-table in spark cubing
Posted by li...@apache.org.
KYLIN-2434 use kylin.source.hive.database-for-flat-table in spark cubing
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/f62465cc
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/f62465cc
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/f62465cc
Branch: refs/heads/master-hbase0.98
Commit: f62465cc5a5fe0f90142c0606c0c75e8c721bf0e
Parents: b32cc95
Author: lidongsjtu <li...@apache.org>
Authored: Tue Feb 7 17:31:07 2017 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Tue Feb 7 17:31:14 2017 +0800
----------------------------------------------------------------------
.../java/org/apache/kylin/engine/spark/SparkCubingByLayer.java | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/f62465cc/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
----------------------------------------------------------------------
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
index 8892a73..071806c 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
@@ -150,11 +150,12 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
setupClasspath(sc, confPath);
HadoopUtil.deletePath(sc.hadoopConfiguration(), new Path(outputPath));
+ final KylinConfig envConfig = KylinConfig.getInstanceFromEnv();
+
HiveContext sqlContext = new HiveContext(sc.sc());
- final DataFrame intermediateTable = sqlContext.table(hiveTable);
+ final DataFrame intermediateTable = sqlContext.table(envConfig.getHiveDatabaseForIntermediateTable() + "." + hiveTable);
System.setProperty(KylinConfig.KYLIN_CONF, confPath);
- final KylinConfig envConfig = KylinConfig.getInstanceFromEnv();
final CubeInstance cubeInstance = CubeManager.getInstance(envConfig).getCube(cubeName);
final CubeDesc cubeDesc = cubeInstance.getDescriptor();
final CubeSegment cubeSegment = cubeInstance.getSegmentById(segmentId);
[05/39] kylin git commit: KYLIN-2418 drop unused useSandbox
Posted by li...@apache.org.
KYLIN-2418 drop unused useSandbox
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/546f88f3
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/546f88f3
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/546f88f3
Branch: refs/heads/master-hbase0.98
Commit: 546f88f38ca017773c17ccaf2cf4812b0c7c4666
Parents: 0582512
Author: Yang Li <li...@apache.org>
Authored: Mon Jan 30 09:07:29 2017 +0800
Committer: Yang Li <li...@apache.org>
Committed: Tue Jan 31 08:11:41 2017 +0800
----------------------------------------------------------------------
.../kylin/common/persistence/FileResourceStore.java | 4 ++++
.../org/apache/kylin/common/persistence/ResourceTool.java | 10 ++++++----
kylin-it/pom.xml | 6 ------
pom.xml | 8 --------
.../apache/kylin/storage/hbase/HBaseResourceStore.java | 5 +++++
5 files changed, 15 insertions(+), 18 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/546f88f3/core-common/src/main/java/org/apache/kylin/common/persistence/FileResourceStore.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/persistence/FileResourceStore.java b/core-common/src/main/java/org/apache/kylin/common/persistence/FileResourceStore.java
index dcd3f38..3e012f5 100644
--- a/core-common/src/main/java/org/apache/kylin/common/persistence/FileResourceStore.java
+++ b/core-common/src/main/java/org/apache/kylin/common/persistence/FileResourceStore.java
@@ -160,4 +160,8 @@ public class FileResourceStore extends ResourceStore {
return new File(root, resPath);
}
+ @Override
+ public String toString() {
+ return root.getAbsolutePath();
+ }
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/546f88f3/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java b/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java
index b3aac09..6ba68ae 100644
--- a/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java
+++ b/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java
@@ -133,22 +133,24 @@ public class ResourceTool {
ResourceStore src = ResourceStore.getStore(srcConfig);
ResourceStore dst = ResourceStore.getStore(dstConfig);
+ logger.info("Copy from {} to {}", src, dst);
+
copyR(src, dst, path);
}
public static void copy(KylinConfig srcConfig, KylinConfig dstConfig, List<String> paths) throws IOException {
ResourceStore src = ResourceStore.getStore(srcConfig);
ResourceStore dst = ResourceStore.getStore(dstConfig);
+
+ logger.info("Copy from {} to {}", src, dst);
+
for (String path : paths) {
copyR(src, dst, path);
}
}
public static void copy(KylinConfig srcConfig, KylinConfig dstConfig) throws IOException {
-
- ResourceStore src = ResourceStore.getStore(srcConfig);
- ResourceStore dst = ResourceStore.getStore(dstConfig);
- copyR(src, dst, "/");
+ copy(srcConfig, dstConfig, "/");
}
public static void copyR(ResourceStore src, ResourceStore dst, String path) throws IOException {
http://git-wip-us.apache.org/repos/asf/kylin/blob/546f88f3/kylin-it/pom.xml
----------------------------------------------------------------------
diff --git a/kylin-it/pom.xml b/kylin-it/pom.xml
index f88db9f..9662806 100644
--- a/kylin-it/pom.xml
+++ b/kylin-it/pom.xml
@@ -272,10 +272,6 @@
</excludes>
<systemProperties>
<property>
- <name>useSandbox</name>
- <value>true</value>
- </property>
- <property>
<name>log4j.configuration</name>
<value>file:${project.basedir}/..//build/conf/kylin-tools-log4j.properties</value>
</property>
@@ -298,7 +294,6 @@
<classpathScope>test</classpathScope>
<executable>java</executable>
<arguments>
- <argument>-DuseSandbox=true</argument>
<argument>-Dhdp.version=${hdp.version}</argument>
<argument>-DfastBuildMode=${fastBuildMode}</argument>
<argument>-Dlog4j.configuration=file:${project.basedir}/..//build/conf/kylin-tools-log4j.properties</argument>
@@ -320,7 +315,6 @@
<classpathScope>test</classpathScope>
<executable>java</executable>
<arguments>
- <argument>-DuseSandbox=true</argument>
<argument>-Dhdp.version=${hdp.version}</argument>
<argument>-DfastBuildMode=${fastBuildMode}</argument>
<argument>-Dlog4j.configuration=file:${project.basedir}/..//build/conf/kylin-tools-log4j.properties</argument>
http://git-wip-us.apache.org/repos/asf/kylin/blob/546f88f3/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index bd1882e..bf33e07 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1159,10 +1159,6 @@
</excludes>
<systemProperties>
<property>
- <name>useSandbox</name>
- <value>false</value>
- </property>
- <property>
<name>buildCubeUsingProvidedData</name>
<value>false</value>
</property>
@@ -1263,10 +1259,6 @@
</excludes>
<systemProperties>
<property>
- <name>useSandbox</name>
- <value>false</value>
- </property>
- <property>
<name>buildCubeUsingProvidedData</name>
<value>false</value>
</property>
http://git-wip-us.apache.org/repos/asf/kylin/blob/546f88f3/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
index 501f1e4..74ab017 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
@@ -371,4 +371,9 @@ public class HBaseResourceStore extends ResourceStore {
return put;
}
+
+ @Override
+ public String toString() {
+ return getAllInOneTableName() + "@hbase";
+ }
}
[31/39] kylin git commit: minor, import correct Lists
Posted by li...@apache.org.
minor, import correct Lists
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/0dcce15f
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/0dcce15f
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/0dcce15f
Branch: refs/heads/master-hbase0.98
Commit: 0dcce15f7fc01f6d8c388661025e5dab99ef05ad
Parents: 39afa51
Author: Hongbin Ma <ma...@apache.org>
Authored: Wed Feb 8 15:18:10 2017 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Wed Feb 8 15:18:10 2017 +0800
----------------------------------------------------------------------
kylin-it/src/test/java/org/apache/kylin/query/H2Database.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/0dcce15f/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java b/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java
index 8b60c49..78ed1b6 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java
@@ -29,7 +29,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import com.clearspring.analytics.util.Lists;
+import com.google.common.collect.Lists;
import org.apache.commons.io.IOUtils;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.metadata.MetadataManager;
[14/39] kylin git commit: KYLIN-2422 NumberDictionary support for
decimal with extra 0 after "."
Posted by li...@apache.org.
KYLIN-2422 NumberDictionary support for decimal with extra 0 after "."
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/24fa338e
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/24fa338e
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/24fa338e
Branch: refs/heads/master-hbase0.98
Commit: 24fa338e608030e3762ce5a17340fcf1d82029b1
Parents: 5da5393
Author: shaofengshi <sh...@apache.org>
Authored: Sat Feb 4 14:16:11 2017 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Sat Feb 4 19:37:59 2017 +0800
----------------------------------------------------------------------
.../org/apache/kylin/dict/NumberDictionary.java | 25 ++++++++++++++++++++
.../apache/kylin/dict/NumberDictionaryTest.java | 5 +++-
2 files changed, 29 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/24fa338e/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionary.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionary.java b/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionary.java
index c55937d..de28440 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionary.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionary.java
@@ -53,6 +53,7 @@ public class NumberDictionary<T> extends TrieDictionary<T> {
return;
}
+
if (len > buf.length) {
throw new IllegalArgumentException("Too many digits for NumberDictionary: " + Bytes.toString(value, offset, len) + ". Internal buffer is only " + buf.length + " bytes");
}
@@ -104,6 +105,30 @@ public class NumberDictionary<T> extends TrieDictionary<T> {
bufOffset = start;
bufLen = buf.length - start;
+
+ // remove 0 in tail after the decimal point
+ if (decimalPoint != end) {
+ if (negative == true) {
+ while (buf[bufOffset + bufLen - 2] == '9' && (bufOffset + bufLen - 2 > decimalPoint)) {
+ bufLen--;
+ }
+
+ if (bufOffset + bufLen - 2 == decimalPoint) {
+ bufLen--;
+ }
+
+ buf[bufOffset + bufLen - 1] = ';';
+ } else {
+ while (buf[bufOffset + bufLen - 1] == '0' && (bufOffset + bufLen - 1 > decimalPoint)) {
+ bufLen--;
+ }
+
+ if (bufOffset + bufLen - 1 == decimalPoint) {
+ bufLen--;
+ }
+
+ }
+ }
}
int decodeNumber(byte[] returnValue, int offset) {
http://git-wip-us.apache.org/repos/asf/kylin/blob/24fa338e/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java b/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java
index 1c04745..36eedf5 100644
--- a/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java
+++ b/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java
@@ -91,11 +91,14 @@ public class NumberDictionaryTest extends LocalFileMetadataTestCase {
checkCodec("-12345", "-9999999999999987654;");
checkCodec("-12345.123", "-9999999999999987654.876;");
checkCodec("0", "00000000000000000000");
- checkCodec("0.0", "00000000000000000000.0");
//test resolved jira-1800
checkCodec("-0.0045454354354354359999999999877218", "-9999999999999999999.9954545645645645640000000000122781;");
checkCodec("-0.009999999999877218", "-9999999999999999999.990000000000122781;");
checkCodec("12343434372493274.438403840384023840253554345345345345", "00012343434372493274.438403840384023840253554345345345345");
+ assertEquals("00000000000000000052.57", encodeNumber("52.5700"));
+ assertEquals("00000000000000000000", encodeNumber("0.00"));
+ assertEquals("00000000000000000000", encodeNumber("0.0"));
+ assertEquals("-9999999999999987654.876;", encodeNumber("-12345.12300"));
}
private void checkCodec(String number, String code) {
[21/39] kylin git commit: minor,
add filterPushdown setter in GTScanRequest
Posted by li...@apache.org.
minor, add filterPushdown setter in GTScanRequest
Signed-off-by: Hongbin Ma <ma...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/ef0fc868
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/ef0fc868
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/ef0fc868
Branch: refs/heads/master-hbase0.98
Commit: ef0fc86876db488bf65353ec50e5f29411835b82
Parents: 4047e8d
Author: Roger Shi <ro...@hotmail.com>
Authored: Mon Feb 6 15:40:56 2017 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Mon Feb 6 15:46:26 2017 +0800
----------------------------------------------------------------------
.../src/main/java/org/apache/kylin/gridtable/GTScanRequest.java | 4 ++++
1 file changed, 4 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/ef0fc868/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
index dc90ed6..c45f90c 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
@@ -253,6 +253,10 @@ public class GTScanRequest {
return filterPushDown;
}
+ public void setFilterPushDown(TupleFilter filter) {
+ filterPushDown = filter;
+ }
+
public ImmutableBitSet getDimensions() {
return this.getColumns().andNot(this.getAggrMetrics());
}
[16/39] kylin git commit: KYLIN-2424 Optimize the integration test's
performance
Posted by li...@apache.org.
KYLIN-2424 Optimize the integration test's performance
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/5d83c80f
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/5d83c80f
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/5d83c80f
Branch: refs/heads/master-hbase0.98
Commit: 5d83c80fbc0d6c3db434368cc8c29d786eebcd94
Parents: 6f0bc1c
Author: shaofengshi <sh...@apache.org>
Authored: Sat Feb 4 21:37:05 2017 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Sat Feb 4 21:37:05 2017 +0800
----------------------------------------------------------------------
.../apache/kylin/metadata/model/ColumnDesc.java | 8 +++++++
.../spark/SparkBatchCubingJobBuilder2.java | 2 +-
.../table/DEFAULT.STREAMING_TABLE.json | 9 +++++---
.../localmeta/table/DEFAULT.TEST_ACCOUNT.json | 6 +++--
.../table/DEFAULT.TEST_CATEGORY_GROUPINGS.json | 9 +++++---
.../localmeta/table/DEFAULT.TEST_COUNTRY.json | 6 +++--
.../table/DEFAULT.TEST_KYLIN_FACT.json | 21 ++++++++++++------
.../localmeta/table/DEFAULT.TEST_ORDER.json | 6 +++--
.../localmeta/table/EDW.TEST_CAL_DT.json | 15 ++++++++-----
.../table/EDW.TEST_SELLER_TYPE_DIM.json | 6 +++--
.../table/EDW.TEST_SELLER_TYPE_DIM_TABLE.json | 6 +++--
.../localmeta/table/EDW.TEST_SITES.json | 9 +++++---
.../java/org/apache/kylin/query/H2Database.java | 23 ++++++++++++++++++++
.../org/apache/kylin/query/KylinTestBase.java | 4 +++-
14 files changed, 97 insertions(+), 33 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/5d83c80f/core-metadata/src/main/java/org/apache/kylin/metadata/model/ColumnDesc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ColumnDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ColumnDesc.java
index 7105ede..e5b51e4 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ColumnDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ColumnDesc.java
@@ -49,6 +49,10 @@ public class ColumnDesc implements Serializable {
@JsonInclude(JsonInclude.Include.NON_NULL)
private String dataGen;
+ @JsonProperty("index")
+ @JsonInclude(JsonInclude.Include.NON_NULL)
+ private String index;
+
// parsed from data type
private DataType type;
private DataType upgradedType;
@@ -157,6 +161,10 @@ public class ColumnDesc implements Serializable {
return dataGen;
}
+ public String getIndex() {
+ return index;
+ }
+
public void init(TableDesc table) {
this.table = table;
http://git-wip-us.apache.org/repos/asf/kylin/blob/5d83c80f/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkBatchCubingJobBuilder2.java
----------------------------------------------------------------------
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkBatchCubingJobBuilder2.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkBatchCubingJobBuilder2.java
index 76b73b6..327d215 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkBatchCubingJobBuilder2.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkBatchCubingJobBuilder2.java
@@ -77,7 +77,7 @@ public class SparkBatchCubingJobBuilder2 extends BatchCubingJobBuilder2 {
try {
return ClassUtil.findContainingJar(Class.forName(className));
} catch (ClassNotFoundException e) {
- logger.error("failed to locate jar for class " + className, e);
+ logger.warn("failed to locate jar for class " + className + ", ignore it", e);
}
return "";
http://git-wip-us.apache.org/repos/asf/kylin/blob/5d83c80f/examples/test_case_data/localmeta/table/DEFAULT.STREAMING_TABLE.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/table/DEFAULT.STREAMING_TABLE.json b/examples/test_case_data/localmeta/table/DEFAULT.STREAMING_TABLE.json
index f28683f..d67cbe5 100644
--- a/examples/test_case_data/localmeta/table/DEFAULT.STREAMING_TABLE.json
+++ b/examples/test_case_data/localmeta/table/DEFAULT.STREAMING_TABLE.json
@@ -5,17 +5,20 @@
{
"id": "1",
"name": "minute_start",
- "datatype": "timestamp"
+ "datatype": "timestamp",
+ "index": "T"
},
{
"id": "2",
"name": "hour_start",
- "datatype": "timestamp"
+ "datatype": "timestamp",
+ "index": "T"
},
{
"id": "3",
"name": "day_start",
- "datatype": "date"
+ "datatype": "date",
+ "index": "T"
},
{
"id": "4",
http://git-wip-us.apache.org/repos/asf/kylin/blob/5d83c80f/examples/test_case_data/localmeta/table/DEFAULT.TEST_ACCOUNT.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/table/DEFAULT.TEST_ACCOUNT.json b/examples/test_case_data/localmeta/table/DEFAULT.TEST_ACCOUNT.json
index 89e265b..5945713 100644
--- a/examples/test_case_data/localmeta/table/DEFAULT.TEST_ACCOUNT.json
+++ b/examples/test_case_data/localmeta/table/DEFAULT.TEST_ACCOUNT.json
@@ -8,7 +8,8 @@
"id" : "1",
"name" : "ACCOUNT_ID",
"datatype" : "bigint",
- "data_gen" : "ID|10000000"
+ "data_gen" : "ID|10000000",
+ "index": "T"
}, {
"id" : "2",
"name" : "ACCOUNT_BUYER_LEVEL",
@@ -23,7 +24,8 @@
"id" : "4",
"name" : "ACCOUNT_COUNTRY",
"datatype" : "string",
- "data_gen" : "CN|FR|GB|GE|JP|IT|RU|US"
+ "data_gen" : "CN|FR|GB|GE|JP|IT|RU|US",
+ "index": "T"
}, {
"id" : "5",
"name" : "ACCOUNT_CONTACT",
http://git-wip-us.apache.org/repos/asf/kylin/blob/5d83c80f/examples/test_case_data/localmeta/table/DEFAULT.TEST_CATEGORY_GROUPINGS.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/table/DEFAULT.TEST_CATEGORY_GROUPINGS.json b/examples/test_case_data/localmeta/table/DEFAULT.TEST_CATEGORY_GROUPINGS.json
index d7ab580..adaf987 100644
--- a/examples/test_case_data/localmeta/table/DEFAULT.TEST_CATEGORY_GROUPINGS.json
+++ b/examples/test_case_data/localmeta/table/DEFAULT.TEST_CATEGORY_GROUPINGS.json
@@ -5,15 +5,18 @@
"columns" : [ {
"id" : "1",
"name" : "LEAF_CATEG_ID",
- "datatype" : "bigint"
+ "datatype" : "bigint",
+ "index": "T"
}, {
"id" : "2",
"name" : "LEAF_CATEG_NAME",
- "datatype" : "string"
+ "datatype" : "string",
+ "index": "T"
}, {
"id" : "3",
"name" : "SITE_ID",
- "datatype" : "int"
+ "datatype" : "int",
+ "index": "T"
}, {
"id" : "4",
"name" : "CATEG_BUSN_MGR",
http://git-wip-us.apache.org/repos/asf/kylin/blob/5d83c80f/examples/test_case_data/localmeta/table/DEFAULT.TEST_COUNTRY.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/table/DEFAULT.TEST_COUNTRY.json b/examples/test_case_data/localmeta/table/DEFAULT.TEST_COUNTRY.json
index 202182b..870cb2d 100644
--- a/examples/test_case_data/localmeta/table/DEFAULT.TEST_COUNTRY.json
+++ b/examples/test_case_data/localmeta/table/DEFAULT.TEST_COUNTRY.json
@@ -5,7 +5,8 @@
"columns" : [ {
"id" : "1",
"name" : "COUNTRY",
- "datatype" : "string"
+ "datatype" : "string",
+ "index": "T"
}, {
"id" : "2",
"name" : "LATITUDE",
@@ -17,7 +18,8 @@
}, {
"id" : "4",
"name" : "NAME",
- "datatype" : "string"
+ "datatype" : "string",
+ "index": "T"
} ],
"database" : "DEFAULT",
"last_modified" : 0
http://git-wip-us.apache.org/repos/asf/kylin/blob/5d83c80f/examples/test_case_data/localmeta/table/DEFAULT.TEST_KYLIN_FACT.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/table/DEFAULT.TEST_KYLIN_FACT.json b/examples/test_case_data/localmeta/table/DEFAULT.TEST_KYLIN_FACT.json
index 2b33e66..e82d535 100644
--- a/examples/test_case_data/localmeta/table/DEFAULT.TEST_KYLIN_FACT.json
+++ b/examples/test_case_data/localmeta/table/DEFAULT.TEST_KYLIN_FACT.json
@@ -10,36 +10,43 @@
}, {
"id" : "2",
"name" : "ORDER_ID",
- "datatype" : "bigint"
+ "datatype" : "bigint",
+ "index": "T"
}, {
"id" : "3",
"name" : "CAL_DT",
"datatype" : "date",
- "data_gen" : "FK,order"
+ "data_gen" : "FK,order",
+ "index": "T"
}, {
"id" : "4",
"name" : "LSTG_FORMAT_NAME",
"datatype" : "string",
- "data_gen" : "FP-GTC|FP-non GTC|ABIN|Auction|Others"
+ "data_gen" : "FP-GTC|FP-non GTC|ABIN|Auction|Others",
+ "index": "T"
}, {
"id" : "5",
"name" : "LEAF_CATEG_ID",
"datatype" : "bigint",
- "data_gen" : "FK,null,nullstr=0"
+ "data_gen" : "FK,null,nullstr=0",
+ "index": "T"
}, {
"id" : "6",
"name" : "LSTG_SITE_ID",
- "datatype" : "int"
+ "datatype" : "int",
+ "index": "T"
}, {
"id" : "7",
"name" : "SLR_SEGMENT_CD",
"datatype" : "smallint",
- "data_gen" : "FK,pk=EDW.TEST_SELLER_TYPE_DIM_TABLE.SELLER_TYPE_CD"
+ "data_gen" : "FK,pk=EDW.TEST_SELLER_TYPE_DIM_TABLE.SELLER_TYPE_CD",
+ "index": "T"
}, {
"id" : "8",
"name" : "SELLER_ID",
"datatype" : "bigint",
- "data_gen" : "RAND||10000000|10001000"
+ "data_gen" : "RAND||10000000|10001000",
+ "index": "T"
}, {
"id" : "9",
"name" : "PRICE",
http://git-wip-us.apache.org/repos/asf/kylin/blob/5d83c80f/examples/test_case_data/localmeta/table/DEFAULT.TEST_ORDER.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/table/DEFAULT.TEST_ORDER.json b/examples/test_case_data/localmeta/table/DEFAULT.TEST_ORDER.json
index f157846..7232492 100644
--- a/examples/test_case_data/localmeta/table/DEFAULT.TEST_ORDER.json
+++ b/examples/test_case_data/localmeta/table/DEFAULT.TEST_ORDER.json
@@ -6,12 +6,14 @@
"id" : "1",
"name" : "ORDER_ID",
"datatype" : "bigint",
- "data_gen" : "ID"
+ "data_gen" : "ID",
+ "index": "T"
}, {
"id" : "2",
"name" : "BUYER_ID",
"datatype" : "bigint",
- "data_gen" : "RAND||10000500|10001500"
+ "data_gen" : "RAND||10000500|10001500",
+ "index": "T"
}, {
"id" : "3",
"name" : "TEST_DATE_ENC",
http://git-wip-us.apache.org/repos/asf/kylin/blob/5d83c80f/examples/test_case_data/localmeta/table/EDW.TEST_CAL_DT.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/table/EDW.TEST_CAL_DT.json b/examples/test_case_data/localmeta/table/EDW.TEST_CAL_DT.json
index 02299d9..1230649 100644
--- a/examples/test_case_data/localmeta/table/EDW.TEST_CAL_DT.json
+++ b/examples/test_case_data/localmeta/table/EDW.TEST_CAL_DT.json
@@ -5,23 +5,28 @@
"columns" : [ {
"id" : "1",
"name" : "CAL_DT",
- "datatype" : "date"
+ "datatype" : "date",
+ "index": "T"
}, {
"id" : "2",
"name" : "YEAR_BEG_DT",
- "datatype" : "date"
+ "datatype" : "date",
+ "index": "T"
}, {
"id" : "3",
"name" : "QTR_BEG_DT",
- "datatype" : "date"
+ "datatype" : "date",
+ "index": "T"
}, {
"id" : "4",
"name" : "MONTH_BEG_DT",
- "datatype" : "date"
+ "datatype" : "date",
+ "index": "T"
}, {
"id" : "5",
"name" : "WEEK_BEG_DT",
- "datatype" : "date"
+ "datatype" : "date",
+ "index": "T"
}, {
"id" : "6",
"name" : "AGE_FOR_YEAR_ID",
http://git-wip-us.apache.org/repos/asf/kylin/blob/5d83c80f/examples/test_case_data/localmeta/table/EDW.TEST_SELLER_TYPE_DIM.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/table/EDW.TEST_SELLER_TYPE_DIM.json b/examples/test_case_data/localmeta/table/EDW.TEST_SELLER_TYPE_DIM.json
index 2bfbb70..136ae57 100644
--- a/examples/test_case_data/localmeta/table/EDW.TEST_SELLER_TYPE_DIM.json
+++ b/examples/test_case_data/localmeta/table/EDW.TEST_SELLER_TYPE_DIM.json
@@ -4,7 +4,8 @@
"columns" : [ {
"id" : "1",
"name" : "SELLER_TYPE_CD",
- "datatype" : "smallint"
+ "datatype" : "smallint",
+ "index": "T"
}, {
"id" : "2",
"name" : "SELLER_TYPE_DESC",
@@ -16,7 +17,8 @@
}, {
"id" : "4",
"name" : "SELLER_GROUP_CD",
- "datatype" : "tinyint"
+ "datatype" : "tinyint",
+ "index": "T"
}, {
"id" : "5",
"name" : "SELLER_GROUP_DESC",
http://git-wip-us.apache.org/repos/asf/kylin/blob/5d83c80f/examples/test_case_data/localmeta/table/EDW.TEST_SELLER_TYPE_DIM_TABLE.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/table/EDW.TEST_SELLER_TYPE_DIM_TABLE.json b/examples/test_case_data/localmeta/table/EDW.TEST_SELLER_TYPE_DIM_TABLE.json
index 3c5749b..794810d 100644
--- a/examples/test_case_data/localmeta/table/EDW.TEST_SELLER_TYPE_DIM_TABLE.json
+++ b/examples/test_case_data/localmeta/table/EDW.TEST_SELLER_TYPE_DIM_TABLE.json
@@ -4,7 +4,8 @@
"columns" : [ {
"id" : "1",
"name" : "SELLER_TYPE_CD",
- "datatype" : "smallint"
+ "datatype" : "smallint",
+ "index": "T"
}, {
"id" : "2",
"name" : "SELLER_TYPE_DESC",
@@ -16,7 +17,8 @@
}, {
"id" : "4",
"name" : "SELLER_GROUP_CD",
- "datatype" : "tinyint"
+ "datatype" : "tinyint",
+ "index": "T"
}, {
"id" : "5",
"name" : "SELLER_GROUP_DESC",
http://git-wip-us.apache.org/repos/asf/kylin/blob/5d83c80f/examples/test_case_data/localmeta/table/EDW.TEST_SITES.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/table/EDW.TEST_SITES.json b/examples/test_case_data/localmeta/table/EDW.TEST_SITES.json
index cc801af..4c2d727 100644
--- a/examples/test_case_data/localmeta/table/EDW.TEST_SITES.json
+++ b/examples/test_case_data/localmeta/table/EDW.TEST_SITES.json
@@ -5,11 +5,13 @@
"columns" : [ {
"id" : "1",
"name" : "SITE_ID",
- "datatype" : "int"
+ "datatype" : "int",
+ "index": "T"
}, {
"id" : "2",
"name" : "SITE_NAME",
- "datatype" : "string"
+ "datatype" : "string",
+ "index": "T"
}, {
"id" : "3",
"name" : "SITE_DOMAIN_CODE",
@@ -25,7 +27,8 @@
}, {
"id" : "6",
"name" : "SITE_CNTRY_ID",
- "datatype" : "int"
+ "datatype" : "int",
+ "index": "T"
}, {
"id" : "7",
"name" : "CRE_DATE",
http://git-wip-us.apache.org/repos/asf/kylin/blob/5d83c80f/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java b/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java
index c449837..8b60c49 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java
@@ -26,8 +26,10 @@ import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
+import com.clearspring.analytics.util.Lists;
import org.apache.commons.io.IOUtils;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.metadata.MetadataManager;
@@ -103,6 +105,11 @@ public class H2Database {
String sql = generateCreateH2TableSql(tableDesc, cvsFilePath);
stmt.executeUpdate(sql);
+ List<String> createIndexStatements = generateCreateH2IndexSql(tableDesc);
+ for (String indexSql : createIndexStatements) {
+ stmt.executeUpdate(indexSql);
+ }
+
if (tempFile != null)
tempFile.delete();
}
@@ -136,6 +143,22 @@ public class H2Database {
return ddl.toString();
}
+ private List<String> generateCreateH2IndexSql(TableDesc tableDesc) {
+ List<String> result = Lists.newArrayList();
+ int x = 0;
+ for (ColumnDesc col : tableDesc.getColumns()) {
+ if ("T".equalsIgnoreCase(col.getIndex())) {
+ StringBuilder ddl = new StringBuilder();
+ ddl.append("CREATE INDEX IDX_" + tableDesc.getName() + "_" + x + " ON " + tableDesc.getIdentity() + "(" + col.getName() + ")");
+ ddl.append("\n");
+ result.add(ddl.toString());
+ x++;
+ }
+ }
+
+ return result;
+ }
+
private static String getH2DataType(String javaDataType) {
String hiveDataType = javaToH2DataTypeMapping.get(javaDataType.toLowerCase());
if (hiveDataType == null) {
http://git-wip-us.apache.org/repos/asf/kylin/blob/5d83c80f/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
index deb1104..1cc5c76 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
@@ -499,7 +499,9 @@ public class KylinTestBase {
// execute H2
printInfo("Query Result from H2 - " + queryName);
+ long currentTime = System.currentTimeMillis();
ITable h2Table = executeQuery(newH2Connection(), queryName, sql, needSort);
+ printInfo("H2 spent " + (System.currentTimeMillis() - currentTime) + " mili-seconds.");
try {
// compare the result
@@ -644,7 +646,7 @@ public class KylinTestBase {
cubeConnection = DriverManager.getConnection("jdbc:calcite:model=" + olapTmp.getAbsolutePath(), props);
//setup h2
- h2Connection = DriverManager.getConnection("jdbc:h2:mem:db" + (h2InstanceCount++), "sa", "");
+ h2Connection = DriverManager.getConnection("jdbc:h2:mem:db" + (h2InstanceCount++) + ";CACHE_SIZE=32072;DB_CLOSE_DELAY=-1", "sa", "");
// Load H2 Tables (inner join)
H2Database h2DB = new H2Database(h2Connection, config);
h2DB.loadAllTables();
[22/39] kylin git commit: KYLIN-2406 fix model match rules when
having hanging OLAPTablescan
Posted by li...@apache.org.
KYLIN-2406 fix model match rules when having hanging OLAPTablescan
Signed-off-by: lidongsjtu <li...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/1f444681
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/1f444681
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/1f444681
Branch: refs/heads/master-hbase0.98
Commit: 1f444681d30ed5f31997c2182cce0f388fef0347
Parents: ef0fc86
Author: etherge <et...@163.com>
Authored: Sat Feb 4 11:31:58 2017 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Mon Feb 6 17:06:55 2017 +0800
----------------------------------------------------------------------
.../main/java/org/apache/kylin/query/routing/ModelChooser.java | 3 +++
1 file changed, 3 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/1f444681/query/src/main/java/org/apache/kylin/query/routing/ModelChooser.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/routing/ModelChooser.java b/query/src/main/java/org/apache/kylin/query/routing/ModelChooser.java
index f979f7b..507b371 100644
--- a/query/src/main/java/org/apache/kylin/query/routing/ModelChooser.java
+++ b/query/src/main/java/org/apache/kylin/query/routing/ModelChooser.java
@@ -106,6 +106,9 @@ public class ModelChooser {
// one lookup table
String modelAlias = model.findFirstTable(firstTable.getTableIdentity()).getAlias();
matchUp = ImmutableMap.of(firstTable.getAlias(), modelAlias);
+ } else if (ctx.joins.size() != ctx.allTableScans.size() - 1) {
+ // has hanging tables
+ throw new NoRealizationFoundException("Please adjust the sequence of join tables and put subquery or temporary table after lookup tables. " + toErrorMsg(ctx));
} else {
// normal big joins
if (ctx.joinsTree == null) {
[04/39] kylin git commit: minor,
make DefaultSchedulerTest.testDiscard() more stable
Posted by li...@apache.org.
minor, make DefaultSchedulerTest.testDiscard() more stable
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/0582512e
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/0582512e
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/0582512e
Branch: refs/heads/master-hbase0.98
Commit: 0582512ea06bcd1c662903ff69012ba1cc7cc33c
Parents: c3fff6d
Author: Li Yang <li...@apache.org>
Authored: Thu Jan 26 11:49:04 2017 +0800
Committer: Li Yang <li...@apache.org>
Committed: Thu Jan 26 11:49:56 2017 +0800
----------------------------------------------------------------------
.../apache/kylin/job/SelfStopExecutable.java | 31 ++++++++++++++++----
.../impl/threadpool/DefaultSchedulerTest.java | 5 ++--
2 files changed, 27 insertions(+), 9 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/0582512e/core-job/src/test/java/org/apache/kylin/job/SelfStopExecutable.java
----------------------------------------------------------------------
diff --git a/core-job/src/test/java/org/apache/kylin/job/SelfStopExecutable.java b/core-job/src/test/java/org/apache/kylin/job/SelfStopExecutable.java
index b4f6a98..9a3eb48 100644
--- a/core-job/src/test/java/org/apache/kylin/job/SelfStopExecutable.java
+++ b/core-job/src/test/java/org/apache/kylin/job/SelfStopExecutable.java
@@ -26,21 +26,40 @@ import org.apache.kylin.job.execution.ExecuteResult;
*/
public class SelfStopExecutable extends BaseTestExecutable {
+ volatile boolean doingWork;
+
public SelfStopExecutable() {
super();
}
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
+ doingWork = true;
try {
- Thread.sleep(5000);
- } catch (InterruptedException e) {
- }
- if (isDiscarded()) {
- return new ExecuteResult(ExecuteResult.State.STOPPED, "stopped");
- } else {
+ for (int i = 0; i < 20; i++) {
+ sleepOneSecond();
+
+ if (isDiscarded())
+ return new ExecuteResult(ExecuteResult.State.STOPPED, "stopped");
+ }
+
return new ExecuteResult(ExecuteResult.State.SUCCEED, "succeed");
+ } finally {
+ doingWork = false;
+ }
+ }
+
+ private void sleepOneSecond() {
+ try {
+ Thread.sleep(1000);
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
}
}
+ public void waitForDoWork() {
+ while (doingWork) {
+ sleepOneSecond();
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/0582512e/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/DefaultSchedulerTest.java
----------------------------------------------------------------------
diff --git a/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/DefaultSchedulerTest.java b/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/DefaultSchedulerTest.java
index bcd6a59..2416311 100644
--- a/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/DefaultSchedulerTest.java
+++ b/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/DefaultSchedulerTest.java
@@ -97,7 +97,7 @@ public class DefaultSchedulerTest extends BaseSchedulerTest {
@Test
public void testDiscard() throws Exception {
DefaultChainedExecutable job = new DefaultChainedExecutable();
- BaseTestExecutable task1 = new SelfStopExecutable();
+ SelfStopExecutable task1 = new SelfStopExecutable();
job.addTask(task1);
jobService.addJob(job);
waitForJobStatus(job.getId(), ExecutableState.RUNNING, 500);
@@ -105,8 +105,7 @@ public class DefaultSchedulerTest extends BaseSchedulerTest {
waitForJobFinish(job.getId());
Assert.assertEquals(ExecutableState.DISCARDED, jobService.getOutput(job.getId()).getState());
Assert.assertEquals(ExecutableState.DISCARDED, jobService.getOutput(task1.getId()).getState());
- Thread.sleep(5000);
- System.out.println(job);
+ task1.waitForDoWork();
}
@SuppressWarnings("rawtypes")
[36/39] kylin git commit: KYLIN-2222 web ui uses rest api to decide
which dim encoding is valid for different typed columns
Posted by li...@apache.org.
KYLIN-2222 web ui uses rest api to decide which dim encoding is valid for different typed columns
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/722efb82
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/722efb82
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/722efb82
Branch: refs/heads/master-hbase0.98
Commit: 722efb82357e0ebcf7853a813272bd960044dd52
Parents: 570ab42
Author: Hongbin Ma <ma...@apache.org>
Authored: Wed Feb 8 21:41:41 2017 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Wed Feb 8 21:41:57 2017 +0800
----------------------------------------------------------------------
.../rest/controller/EncodingController.java | 73 ++++++++++++++++++++
.../kylin/rest/service/EncodingService.java | 54 +++++++++++++++
2 files changed, 127 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/722efb82/server-base/src/main/java/org/apache/kylin/rest/controller/EncodingController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/EncodingController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/EncodingController.java
new file mode 100644
index 0000000..2f532e2
--- /dev/null
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/EncodingController.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.rest.controller;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.kylin.metadata.datatype.DataType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestMethod;
+import org.springframework.web.bind.annotation.ResponseBody;
+
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+
+import org.apache.kylin.rest.service.EncodingService;
+
+@Controller
+@RequestMapping(value = "/encodings")
+public class EncodingController extends BasicController {
+
+ private static final Logger logger = LoggerFactory.getLogger(EncodingController.class);
+
+ @Autowired
+ private EncodingService encodingService;
+
+ /**
+ * Get valid encodings for the datatype, if no datatype parameter, return all encodings.
+ *
+ * @return suggestion map
+ */
+ @RequestMapping(value = "valid_encodings", method = { RequestMethod.GET })
+ @ResponseBody
+ public Map<String, Object> getValidEncodings() {
+
+ Set<String> allDatatypes = Sets.newHashSet();
+ allDatatypes.addAll(DataType.DATETIME_FAMILY);
+ allDatatypes.addAll(DataType.INTEGER_FAMILY);
+ allDatatypes.addAll(DataType.NUMBER_FAMILY);
+ allDatatypes.addAll(DataType.STRING_FAMILY);
+
+ Map<String, List<String>> datatypeValidEncodings = Maps.newHashMap();
+ for (String dataTypeStr : allDatatypes) {
+ datatypeValidEncodings.put(dataTypeStr, encodingService.getValidEncodings(DataType.getType(dataTypeStr)));
+ }
+
+ Map<String, Object> ret = Maps.newHashMap();
+ ret.put("code", "000");
+ ret.put("data", datatypeValidEncodings);
+ return ret;
+ }
+}
http://git-wip-us.apache.org/repos/asf/kylin/blob/722efb82/server-base/src/main/java/org/apache/kylin/rest/service/EncodingService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/EncodingService.java b/server-base/src/main/java/org/apache/kylin/rest/service/EncodingService.java
new file mode 100644
index 0000000..7d7d016
--- /dev/null
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/EncodingService.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+
+package org.apache.kylin.rest.service;
+
+import java.util.List;
+
+import org.apache.kylin.dimension.BooleanDimEnc;
+import org.apache.kylin.dimension.DateDimEnc;
+import org.apache.kylin.dimension.DictionaryDimEnc;
+import org.apache.kylin.dimension.FixedLenDimEnc;
+import org.apache.kylin.dimension.FixedLenHexDimEnc;
+import org.apache.kylin.dimension.IntegerDimEnc;
+import org.apache.kylin.dimension.TimeDimEnc;
+import org.apache.kylin.metadata.datatype.DataType;
+import org.springframework.stereotype.Component;
+
+import com.google.common.collect.Lists;
+
+@Component("encodingService")
+public class EncodingService extends BasicService {
+
+ public List<String> getValidEncodings(DataType dataType) {
+ if (dataType.isIntegerFamily()) {
+ return Lists.newArrayList(BooleanDimEnc.ENCODING_NAME, DateDimEnc.ENCODING_NAME, TimeDimEnc.ENCODING_NAME, DictionaryDimEnc.ENCODING_NAME, IntegerDimEnc.ENCODING_NAME);
+ } else if (dataType.isNumberFamily()) { //numbers include integers
+ return Lists.newArrayList(DictionaryDimEnc.ENCODING_NAME);
+ } else if (dataType.isDateTimeFamily()) {
+ return Lists.newArrayList(DateDimEnc.ENCODING_NAME, TimeDimEnc.ENCODING_NAME, DictionaryDimEnc.ENCODING_NAME);
+ } else if (dataType.isStringFamily()) {
+ return Lists.newArrayList(BooleanDimEnc.ENCODING_NAME, DateDimEnc.ENCODING_NAME, TimeDimEnc.ENCODING_NAME, DictionaryDimEnc.ENCODING_NAME, FixedLenDimEnc.ENCODING_NAME, //
+ FixedLenHexDimEnc.ENCODING_NAME, IntegerDimEnc.ENCODING_NAME);
+ } else {
+ throw new IllegalArgumentException("can't provide valid encodings for datatype:" + dataType);
+ }
+ }
+
+}
[28/39] kylin git commit: KYLIN-2434 minor fix
Posted by li...@apache.org.
KYLIN-2434 minor fix
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/eee9ecbc
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/eee9ecbc
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/eee9ecbc
Branch: refs/heads/master-hbase0.98
Commit: eee9ecbcb031862ed316f630933bb320cf93a9d1
Parents: f62465c
Author: lidongsjtu <li...@apache.org>
Authored: Tue Feb 7 17:37:21 2017 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Tue Feb 7 17:37:21 2017 +0800
----------------------------------------------------------------------
.../java/org/apache/kylin/engine/spark/SparkCubingByLayer.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/eee9ecbc/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
----------------------------------------------------------------------
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
index 071806c..317d2bd 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
@@ -150,12 +150,12 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
setupClasspath(sc, confPath);
HadoopUtil.deletePath(sc.hadoopConfiguration(), new Path(outputPath));
+ System.setProperty(KylinConfig.KYLIN_CONF, confPath);
final KylinConfig envConfig = KylinConfig.getInstanceFromEnv();
HiveContext sqlContext = new HiveContext(sc.sc());
final DataFrame intermediateTable = sqlContext.table(envConfig.getHiveDatabaseForIntermediateTable() + "." + hiveTable);
- System.setProperty(KylinConfig.KYLIN_CONF, confPath);
final CubeInstance cubeInstance = CubeManager.getInstance(envConfig).getCube(cubeName);
final CubeDesc cubeDesc = cubeInstance.getDescriptor();
final CubeSegment cubeSegment = cubeInstance.getSegmentById(segmentId);
[34/39] kylin git commit: minor, clean up kylin.sh a little
Posted by li...@apache.org.
minor, clean up kylin.sh a little
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/df3ecd32
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/df3ecd32
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/df3ecd32
Branch: refs/heads/master-hbase0.98
Commit: df3ecd324a6442fee52f146d930cc5f84dafd945
Parents: d135bdb
Author: Yang Li <li...@apache.org>
Authored: Wed Feb 8 20:42:02 2017 +0800
Committer: Yang Li <li...@apache.org>
Committed: Wed Feb 8 20:42:02 2017 +0800
----------------------------------------------------------------------
build/bin/kylin.sh | 12 ++++--------
1 file changed, 4 insertions(+), 8 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/df3ecd32/build/bin/kylin.sh
----------------------------------------------------------------------
diff --git a/build/bin/kylin.sh b/build/bin/kylin.sh
index 7813b79..cc571e5 100644
--- a/build/bin/kylin.sh
+++ b/build/bin/kylin.sh
@@ -60,8 +60,7 @@ then
PID=`cat $KYLIN_HOME/pid`
if ps -p $PID > /dev/null
then
- echo "Kylin is running, stop it first"
- exit 1
+ quit "Kylin is running, stop it first"
fi
fi
@@ -81,7 +80,7 @@ then
spring_profile=`bash ${dir}/get-properties.sh kylin.security.profile`
if [ -z "$spring_profile" ]
then
- quit 'please set kylin.security.profile in kylin.properties, options are: testing, ldap, saml.'
+ quit 'Please set kylin.security.profile in kylin.properties, options are: testing, ldap, saml.'
else
verbose "kylin.security.profile is set to $spring_profile"
fi
@@ -95,10 +94,7 @@ then
kylin_rest_address_arr=(${kylin_rest_address//;/ })
nc -z -w 5 ${kylin_rest_address_arr[0]} ${kylin_rest_address_arr[1]} 1>/dev/null 2>&1; nc_result=$?
if [ $nc_result -eq 0 ]; then
- echo "port ${kylin_rest_address} is not available, could not start Kylin"
- exit 1
- else
- echo "port ${kylin_rest_address} is available"
+ quit "Port ${kylin_rest_address} is not available, could not start Kylin."
fi
#debug if encounter NoClassDefError
@@ -155,7 +151,7 @@ then
elif [ "$1" = "diag" ]
then
- echo "kylin.sh diag no longer supported, use diag.sh instead"
+ echo "'kylin.sh diag' no longer supported, use diag.sh instead"
exit 0
# tool command
[37/39] kylin git commit: KYLIN-2307 Create a branch for master with
HBase 0.98 API
Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java b/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
index f52fc3e..19e5db0 100644
--- a/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
@@ -25,11 +25,10 @@ import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Table;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.persistence.JsonSerializer;
import org.apache.kylin.common.persistence.ResourceStore;
@@ -232,9 +231,9 @@ public class ExtendCubeToHybridCLI {
Serializer<ProjectInstance> projectSerializer = new JsonSerializer<ProjectInstance>(ProjectInstance.class);
ProjectInstance project = store.getResource(projectResPath, ProjectInstance.class, projectSerializer);
String projUUID = project.getUuid();
- Table aclHtable = null;
+ HTableInterface aclHtable = null;
try {
- aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(TableName.valueOf(kylinConfig.getMetadataUrlPrefix() + "_acl"));
+ aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(kylinConfig.getMetadataUrlPrefix() + "_acl");
// cube acl
Result result = aclHtable.get(new Get(Bytes.toBytes(origCubeId)));
@@ -254,6 +253,7 @@ public class ExtendCubeToHybridCLI {
aclHtable.put(put);
}
}
+ aclHtable.flushCommits();
} finally {
IOUtils.closeQuietly(aclHtable);
}
[24/39] kylin git commit: KYLIN-1875 Kylin support SnowFlake schema
(alias name rule)
Posted by li...@apache.org.
KYLIN-1875 Kylin support SnowFlake schema (alias name rule)
Signed-off-by: zhongjian <ji...@163.com>
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/8581f1d2
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/8581f1d2
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/8581f1d2
Branch: refs/heads/master-hbase0.98
Commit: 8581f1d203cee9fe3d834d6beb9f6bf7717f921a
Parents: 7a6dd1c
Author: luguosheng <55...@qq.com>
Authored: Tue Feb 7 16:24:23 2017 +0800
Committer: zhongjian <ji...@163.com>
Committed: Tue Feb 7 17:08:24 2017 +0800
----------------------------------------------------------------------
webapp/app/partials/modelDesigner/data_model.html | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/8581f1d2/webapp/app/partials/modelDesigner/data_model.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/modelDesigner/data_model.html b/webapp/app/partials/modelDesigner/data_model.html
index 4d72266..167c088 100644
--- a/webapp/app/partials/modelDesigner/data_model.html
+++ b/webapp/app/partials/modelDesigner/data_model.html
@@ -157,8 +157,8 @@
<label class="col-sm-3 control-label font-color-default"><b>Alias</b></label>
<div class="col-sm-6">
<input type="text" class="form-control " name="joinTable_alias" placeholder="Input Table Alias" ng-required="true"
- ng-model="newLookup.alias" ng-pattern="/^\w+$/">
- <small class="help-block red" ng-show="!lookup_form.joinTable_alias.$error.required&&lookup_form.joinTable_alias.$invalid && (lookup_form.joinTable_alias.$dirty||lookup_form.$submitted)"><i class="fa fa-exclamation-triangle"></i> Alias is invalid.</small>
+ ng-model="newLookup.alias" ng-pattern="/^[A-Z_\d]+$/">
+ <small class="help-block red" ng-show="!lookup_form.joinTable_alias.$error.required&&lookup_form.joinTable_alias.$invalid && (lookup_form.joinTable_alias.$dirty||lookup_form.$submitted)"><i class="fa fa-exclamation-triangle"></i> Table alias is invalid(A Combination of numbers, uppercase letters or underscores).</small>
</div>
</div>
</div>
[38/39] kylin git commit: KYLIN-2307 Create a branch for master with
HBase 0.98 API
Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
index 74ab017..a44de4f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
@@ -30,15 +30,14 @@ import org.apache.commons.io.IOUtils;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
@@ -69,7 +68,7 @@ public class HBaseResourceStore extends ResourceStore {
final String tableNameBase;
final String hbaseUrl;
- Connection getConnection() throws IOException {
+ HConnection getConnection() throws IOException {
return HBaseConnection.get(hbaseUrl);
}
@@ -121,7 +120,7 @@ public class HBaseResourceStore extends ResourceStore {
byte[] endRow = Bytes.toBytes(lookForPrefix);
endRow[endRow.length - 1]++;
- Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+ HTableInterface table = getConnection().getTable(getAllInOneTableName());
Scan scan = new Scan(startRow, endRow);
if ((filter != null && filter instanceof KeyOnlyFilter) == false) {
scan.addColumn(B_FAMILY, B_COLUMN_TS);
@@ -238,12 +237,13 @@ public class HBaseResourceStore extends ResourceStore {
IOUtils.copy(content, bout);
bout.close();
- Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+ HTableInterface table = getConnection().getTable(getAllInOneTableName());
try {
byte[] row = Bytes.toBytes(resPath);
Put put = buildPut(resPath, ts, row, bout.toByteArray(), table);
table.put(put);
+ table.flushCommits();
} finally {
IOUtils.closeQuietly(table);
}
@@ -251,7 +251,7 @@ public class HBaseResourceStore extends ResourceStore {
@Override
protected long checkAndPutResourceImpl(String resPath, byte[] content, long oldTS, long newTS) throws IOException, IllegalStateException {
- Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+ HTableInterface table = getConnection().getTable(getAllInOneTableName());
try {
byte[] row = Bytes.toBytes(resPath);
byte[] bOldTS = oldTS == 0 ? null : Bytes.toBytes(oldTS);
@@ -264,6 +264,8 @@ public class HBaseResourceStore extends ResourceStore {
throw new IllegalStateException("Overwriting conflict " + resPath + ", expect old TS " + oldTS + ", but it is " + real);
}
+ table.flushCommits();
+
return newTS;
} finally {
IOUtils.closeQuietly(table);
@@ -272,7 +274,7 @@ public class HBaseResourceStore extends ResourceStore {
@Override
protected void deleteResourceImpl(String resPath) throws IOException {
- Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+ HTableInterface table = getConnection().getTable(getAllInOneTableName());
try {
boolean hdfsResourceExist = false;
Result result = internalGetFromHTable(table, resPath, true, false);
@@ -285,6 +287,7 @@ public class HBaseResourceStore extends ResourceStore {
Delete del = new Delete(Bytes.toBytes(resPath));
table.delete(del);
+ table.flushCommits();
if (hdfsResourceExist) { // remove hdfs cell value
Path redirectPath = bigCellHDFSPath(resPath);
@@ -305,7 +308,7 @@ public class HBaseResourceStore extends ResourceStore {
}
private Result getFromHTable(String path, boolean fetchContent, boolean fetchTimestamp) throws IOException {
- Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+ HTableInterface table = getConnection().getTable(getAllInOneTableName());
try {
return internalGetFromHTable(table, path, fetchContent, fetchTimestamp);
} finally {
@@ -314,7 +317,7 @@ public class HBaseResourceStore extends ResourceStore {
}
- private Result internalGetFromHTable(Table table, String path, boolean fetchContent, boolean fetchTimestamp) throws IOException {
+ private Result internalGetFromHTable(HTableInterface table, String path, boolean fetchContent, boolean fetchTimestamp) throws IOException {
byte[] rowkey = Bytes.toBytes(path);
Get get = new Get(rowkey);
@@ -333,7 +336,7 @@ public class HBaseResourceStore extends ResourceStore {
return exists ? result : null;
}
- private Path writeLargeCellToHdfs(String resPath, byte[] largeColumn, Table table) throws IOException {
+ private Path writeLargeCellToHdfs(String resPath, byte[] largeColumn, HTableInterface table) throws IOException {
Path redirectPath = bigCellHDFSPath(resPath);
FileSystem fileSystem = HadoopUtil.getWorkingFileSystem();
@@ -358,7 +361,7 @@ public class HBaseResourceStore extends ResourceStore {
return redirectPath;
}
- private Put buildPut(String resPath, long ts, byte[] row, byte[] content, Table table) throws IOException {
+ private Put buildPut(String resPath, long ts, byte[] row, byte[] content, HTableInterface table) throws IOException {
int kvSizeLimit = Integer.parseInt(getConnection().getConfiguration().get("hbase.client.keyvalue.maxsize", "10485760"));
if (content.length > kvSizeLimit) {
writeLargeCellToHdfs(resPath, content, table);
@@ -366,8 +369,8 @@ public class HBaseResourceStore extends ResourceStore {
}
Put put = new Put(row);
- put.addColumn(B_FAMILY, B_COLUMN, content);
- put.addColumn(B_FAMILY, B_COLUMN_TS, Bytes.toBytes(ts));
+ put.add(B_FAMILY, B_COLUMN, content);
+ put.add(B_FAMILY, B_COLUMN_TS, Bytes.toBytes(ts));
return put;
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
index f63d9c2..b141190 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
@@ -26,13 +26,12 @@ import java.util.NoSuchElementException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.BufferedMutator;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.cube.kv.RowConstants;
@@ -87,13 +86,14 @@ public class SimpleHBaseStore implements IGTStore {
}
private class Writer implements IGTWriter {
- final BufferedMutator table;
+ final HTableInterface table;
final ByteBuffer rowkey = ByteBuffer.allocate(50);
final ByteBuffer value = ByteBuffer.allocate(50);
Writer() throws IOException {
- Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
- table = conn.getBufferedMutator(htableName);
+ HConnection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+ table = conn.getTable(htableName);
+ table.setAutoFlush(false, true);
}
@Override
@@ -113,24 +113,24 @@ public class SimpleHBaseStore implements IGTStore {
Put put = new Put(rowkey);
put.addImmutable(CF_B, ByteBuffer.wrap(COL_B), HConstants.LATEST_TIMESTAMP, value);
- table.mutate(put);
+ table.put(put);
}
@Override
public void close() throws IOException {
- table.flush();
+ table.flushCommits();
table.close();
}
}
class Reader implements IGTScanner {
- final Table table;
+ final HTableInterface table;
final ResultScanner scanner;
int count = 0;
Reader() throws IOException {
- Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+ HConnection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
table = conn.getTable(htableName);
Scan scan = new Scan();
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
index cad5a3f..df1817e 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
@@ -26,9 +26,8 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.atomic.AtomicLong;
import java.util.zip.DataFormatException;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.ipc.BlockingRpcCallback;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
@@ -52,10 +51,10 @@ import org.apache.kylin.storage.gtrecord.StorageResponseGTScatter;
import org.apache.kylin.storage.hbase.HBaseConnection;
import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos;
import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest;
-import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList;
import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitResponse;
-import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitResponse.Stats;
import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitService;
+import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList;
+import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitResponse.Stats;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -118,7 +117,7 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
final ImmutableBitSet selectedColBlocks = scanRequest.getSelectedColBlocks().set(0);
// globally shared connection, does not require close
- final Connection conn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
+ final HConnection conn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
final List<IntList> hbaseColumnsToGTIntList = Lists.newArrayList();
List<List<Integer>> hbaseColumnsToGT = getHBaseColumnsGTMapping(selectedColBlocks);
@@ -173,7 +172,7 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
final boolean[] abnormalFinish = new boolean[1];
try {
- Table table = conn.getTable(TableName.valueOf(cubeSeg.getStorageLocationIdentifier()), HBaseConnection.getCoprocessorPool());
+ HTableInterface table = conn.getTable(cubeSeg.getStorageLocationIdentifier(), HBaseConnection.getCoprocessorPool());
final CubeVisitRequest request = builder.build();
final byte[] startKey = epRange.getFirst();
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
index a52af90..3cefc5f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
@@ -24,12 +24,11 @@ import java.util.Iterator;
import java.util.List;
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
import org.apache.kylin.common.util.BytesUtil;
import org.apache.kylin.common.util.ImmutableBitSet;
import org.apache.kylin.common.util.ShardingHash;
@@ -155,8 +154,8 @@ public class CubeHBaseScanRPC extends CubeHBaseRPC {
// primary key (also the 0th column block) is always selected
final ImmutableBitSet selectedColBlocks = scanRequest.getSelectedColBlocks().set(0);
// globally shared connection, does not require close
- Connection hbaseConn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
- final Table hbaseTable = hbaseConn.getTable(TableName.valueOf(cubeSeg.getStorageLocationIdentifier()));
+ HConnection hbaseConn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
+ final HTableInterface hbaseTable = hbaseConn.getTable(cubeSeg.getStorageLocationIdentifier());
List<RawScan> rawScans = preparedHBaseScans(scanRequest.getGTScanRanges(), selectedColBlocks);
List<List<Integer>> hbaseColumnsToGT = getHBaseColumnsGTMapping(selectedColBlocks);
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
index 810747f..21a0efb 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
@@ -142,7 +142,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
if (shardLength == 0) {
return;
}
- byte[] regionStartKey = ArrayUtils.isEmpty(region.getRegionInfo().getStartKey()) ? new byte[shardLength] : region.getRegionInfo().getStartKey();
+ byte[] regionStartKey = ArrayUtils.isEmpty(region.getStartKey()) ? new byte[shardLength] : region.getStartKey();
Bytes.putBytes(rawScan.startKey, 0, regionStartKey, 0, shardLength);
Bytes.putBytes(rawScan.endKey, 0, regionStartKey, 0, shardLength);
}
@@ -179,7 +179,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
try (SetThreadName ignored = new SetThreadName("Query %s", queryId)) {
this.serviceStartTime = System.currentTimeMillis();
- region = (HRegion)env.getRegion();
+ region = env.getRegion();
region.startRegionOperation();
// if user change kylin.properties on kylin server, need to manually redeploy coprocessor jar to update KylinConfig of Env.
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
index feb4842..2814ad6 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
@@ -26,8 +26,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.regionserver.BloomType;
@@ -80,8 +79,7 @@ public class CubeHTableUtil {
tableDesc.setValue(IRealizationConstants.HTableSegmentTag, cubeSegment.toString());
Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
- Connection conn = HBaseConnection.get(kylinConfig.getStorageUrl());
- Admin admin = conn.getAdmin();
+ HBaseAdmin admin = new HBaseAdmin(conf);
try {
if (User.isHBaseSecurityEnabled(conf)) {
@@ -94,7 +92,7 @@ public class CubeHTableUtil {
tableDesc.addFamily(cf);
}
- if (admin.tableExists(TableName.valueOf(tableName))) {
+ if (admin.tableExists(tableName)) {
// admin.disableTable(tableName);
// admin.deleteTable(tableName);
throw new RuntimeException("HBase table " + tableName + " exists!");
@@ -103,7 +101,7 @@ public class CubeHTableUtil {
DeployCoprocessorCLI.deployCoprocessor(tableDesc);
admin.createTable(tableDesc, splitKeys);
- Preconditions.checkArgument(admin.isTableAvailable(TableName.valueOf(tableName)), "table " + tableName + " created, but is not available due to some reasons");
+ Preconditions.checkArgument(admin.isTableAvailable(tableName), "table " + tableName + " created, but is not available due to some reasons");
logger.info("create hbase table " + tableName + " done.");
} finally {
IOUtils.closeQuietly(admin);
@@ -112,7 +110,8 @@ public class CubeHTableUtil {
}
public static void deleteHTable(TableName tableName) throws IOException {
- Admin admin = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl()).getAdmin();
+ Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+ HBaseAdmin admin = new HBaseAdmin(conf);
try {
if (admin.tableExists(tableName)) {
logger.info("disabling hbase table " + tableName);
@@ -127,7 +126,8 @@ public class CubeHTableUtil {
/** create a HTable that has the same performance settings as normal cube table, for benchmark purpose */
public static void createBenchmarkHTable(TableName tableName, String cfName) throws IOException {
- Admin admin = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl()).getAdmin();
+ Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+ HBaseAdmin admin = new HBaseAdmin(conf);
try {
if (admin.tableExists(tableName)) {
logger.info("disabling hbase table " + tableName);
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
index df3cf08..eacff9f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
@@ -25,13 +25,13 @@ import java.util.List;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.Bytes;
import org.apache.kylin.common.util.HadoopUtil;
import org.apache.kylin.common.util.HiveCmdBuilder;
import org.apache.kylin.job.exception.ExecuteException;
@@ -100,21 +100,19 @@ public class DeprecatedGCStep extends AbstractExecutable {
List<String> oldTables = getOldHTables();
if (oldTables != null && oldTables.size() > 0) {
String metadataUrlPrefix = KylinConfig.getInstanceFromEnv().getMetadataUrlPrefix();
- Admin admin = null;
+ Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+ HBaseAdmin admin = null;
try {
-
- Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
- admin = conn.getAdmin();
-
+ admin = new HBaseAdmin(conf);
for (String table : oldTables) {
- if (admin.tableExists(TableName.valueOf(table))) {
- HTableDescriptor tableDescriptor = admin.getTableDescriptor(TableName.valueOf(table));
+ if (admin.tableExists(table)) {
+ HTableDescriptor tableDescriptor = admin.getTableDescriptor(Bytes.toBytes(table));
String host = tableDescriptor.getValue(IRealizationConstants.HTableTag);
if (metadataUrlPrefix.equalsIgnoreCase(host)) {
- if (admin.isTableEnabled(TableName.valueOf(table))) {
- admin.disableTable(TableName.valueOf(table));
+ if (admin.isTableEnabled(table)) {
+ admin.disableTable(table);
}
- admin.deleteTable(TableName.valueOf(table));
+ admin.deleteTable(table);
logger.debug("Dropped HBase table " + table);
output.append("Dropped HBase table " + table + " \n");
} else {
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
index 6587d4e..d5b36df 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
@@ -23,8 +23,8 @@ import java.util.List;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Table;
import org.apache.kylin.common.util.ImmutableBitSet;
import org.apache.kylin.cube.CubeSegment;
import org.apache.kylin.cube.cuboid.Cuboid;
@@ -49,7 +49,7 @@ public class HBaseCuboidWriter implements ICuboidWriter {
private final List<KeyValueCreator> keyValueCreators;
private final int nColumns;
- private final Table hTable;
+ private final HTableInterface hTable;
private final CubeDesc cubeDesc;
private final CubeSegment cubeSegment;
private final Object[] measureValues;
@@ -58,7 +58,7 @@ public class HBaseCuboidWriter implements ICuboidWriter {
private AbstractRowKeyEncoder rowKeyEncoder;
private byte[] keybuf;
- public HBaseCuboidWriter(CubeSegment segment, Table hTable) {
+ public HBaseCuboidWriter(CubeSegment segment, HTableInterface hTable) {
this.keyValueCreators = Lists.newArrayList();
this.cubeSegment = segment;
this.cubeDesc = cubeSegment.getCubeDesc();
@@ -117,6 +117,7 @@ public class HBaseCuboidWriter implements ICuboidWriter {
long t = System.currentTimeMillis();
if (hTable != null) {
hTable.put(puts);
+ hTable.flushCommits();
}
logger.info("commit total " + puts.size() + " puts, totally cost:" + (System.currentTimeMillis() - t) + "ms");
puts.clear();
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
index 2f7e164..5b2441c 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
@@ -24,11 +24,11 @@ import java.util.Collections;
import java.util.List;
import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.Bytes;
import org.apache.kylin.job.exception.ExecuteException;
import org.apache.kylin.job.execution.AbstractExecutable;
import org.apache.kylin.job.execution.ExecutableContext;
@@ -69,20 +69,19 @@ public class MergeGCStep extends AbstractExecutable {
List<String> oldTables = getOldHTables();
if (oldTables != null && oldTables.size() > 0) {
String metadataUrlPrefix = KylinConfig.getInstanceFromEnv().getMetadataUrlPrefix();
- Admin admin = null;
+ Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+ HBaseAdmin admin = null;
try {
- Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
- admin = conn.getAdmin();
-
+ admin = new HBaseAdmin(conf);
for (String table : oldTables) {
- if (admin.tableExists(TableName.valueOf(table))) {
- HTableDescriptor tableDescriptor = admin.getTableDescriptor(TableName.valueOf((table)));
+ if (admin.tableExists(table)) {
+ HTableDescriptor tableDescriptor = admin.getTableDescriptor(Bytes.toBytes(table));
String host = tableDescriptor.getValue(IRealizationConstants.HTableTag);
if (metadataUrlPrefix.equalsIgnoreCase(host)) {
- if (admin.isTableEnabled(TableName.valueOf(table))) {
- admin.disableTable(TableName.valueOf(table));
+ if (admin.isTableEnabled(table)) {
+ admin.disableTable(table);
}
- admin.deleteTable(TableName.valueOf(table));
+ admin.deleteTable(table);
logger.debug("Dropped htable: " + table);
output.append("HBase table " + table + " is dropped. \n");
} else {
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
index 56f867a..a150607 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
@@ -21,11 +21,9 @@ package org.apache.kylin.storage.hbase.util;
import java.io.IOException;
import org.apache.commons.cli.Options;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.kylin.common.KylinConfig;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.kylin.common.util.AbstractApplication;
import org.apache.kylin.common.util.OptionsHelper;
import org.apache.kylin.metadata.realization.IRealizationConstants;
@@ -40,8 +38,8 @@ public class CleanHtableCLI extends AbstractApplication {
protected static final Logger logger = LoggerFactory.getLogger(CleanHtableCLI.class);
private void clean() throws IOException {
- Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
- Admin hbaseAdmin = conn.getAdmin();
+ Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+ HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
for (HTableDescriptor descriptor : hbaseAdmin.listTables()) {
String name = descriptor.getNameAsString().toLowerCase();
@@ -52,7 +50,7 @@ public class CleanHtableCLI extends AbstractApplication {
System.out.println();
descriptor.setValue(IRealizationConstants.HTableOwner, "DL-eBay-Kylin@ebay.com");
- hbaseAdmin.modifyTable(TableName.valueOf(descriptor.getNameAsString()), descriptor);
+ hbaseAdmin.modifyTable(descriptor.getNameAsString(), descriptor);
}
}
hbaseAdmin.close();
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
index 581de38..68c0a39 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
@@ -26,19 +26,19 @@ import java.util.Map;
import java.util.Set;
import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Table;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.persistence.JsonSerializer;
import org.apache.kylin.common.persistence.RawResource;
@@ -89,7 +89,7 @@ public class CubeMigrationCLI {
private static ResourceStore srcStore;
private static ResourceStore dstStore;
private static FileSystem hdfsFS;
- private static Admin hbaseAdmin;
+ private static HBaseAdmin hbaseAdmin;
public static final String ACL_INFO_FAMILY = "i";
private static final String ACL_TABLE_NAME = "_acl";
@@ -134,8 +134,8 @@ public class CubeMigrationCLI {
checkAndGetHbaseUrl();
- Connection conn = HBaseConnection.get(srcConfig.getStorageUrl());
- hbaseAdmin = conn.getAdmin();
+ Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+ hbaseAdmin = new HBaseAdmin(conf);
hdfsFS = HadoopUtil.getWorkingFileSystem();
@@ -233,7 +233,6 @@ public class CubeMigrationCLI {
operations.add(new Opt(OptType.COPY_DICT_OR_SNAPSHOT, new Object[] { item, cube.getName() }));
}
}
-
private static void addCubeAndModelIntoProject(CubeInstance srcCube, String cubeName, String projectName) throws IOException {
String projectResPath = ProjectInstance.concatResourcePath(projectName);
if (!dstStore.exists(projectResPath))
@@ -327,8 +326,8 @@ public class CubeMigrationCLI {
switch (opt.type) {
case CHANGE_HTABLE_HOST: {
- TableName tableName = TableName.valueOf((String) opt.params[0]);
- HTableDescriptor desc = hbaseAdmin.getTableDescriptor(tableName);
+ String tableName = (String) opt.params[0];
+ HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
hbaseAdmin.disableTable(tableName);
desc.setValue(IRealizationConstants.HTableTag, dstConfig.getMetadataUrlPrefix());
hbaseAdmin.modifyTable(tableName, desc);
@@ -450,11 +449,11 @@ public class CubeMigrationCLI {
Serializer<ProjectInstance> projectSerializer = new JsonSerializer<ProjectInstance>(ProjectInstance.class);
ProjectInstance project = dstStore.getResource(projectResPath, ProjectInstance.class, projectSerializer);
String projUUID = project.getUuid();
- Table srcAclHtable = null;
- Table destAclHtable = null;
+ HTableInterface srcAclHtable = null;
+ HTableInterface destAclHtable = null;
try {
- srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(TableName.valueOf(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
- destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+ srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
+ destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
// cube acl
Result result = srcAclHtable.get(new Get(Bytes.toBytes(cubeId)));
@@ -474,6 +473,7 @@ public class CubeMigrationCLI {
destAclHtable.put(put);
}
}
+ destAclHtable.flushCommits();
} finally {
IOUtils.closeQuietly(srcAclHtable);
IOUtils.closeQuietly(destAclHtable);
@@ -504,8 +504,8 @@ public class CubeMigrationCLI {
switch (opt.type) {
case CHANGE_HTABLE_HOST: {
- TableName tableName = TableName.valueOf((String) opt.params[0]);
- HTableDescriptor desc = hbaseAdmin.getTableDescriptor(tableName);
+ String tableName = (String) opt.params[0];
+ HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
hbaseAdmin.disableTable(tableName);
desc.setValue(IRealizationConstants.HTableTag, srcConfig.getMetadataUrlPrefix());
hbaseAdmin.modifyTable(tableName, desc);
@@ -539,12 +539,13 @@ public class CubeMigrationCLI {
case COPY_ACL: {
String cubeId = (String) opt.params[0];
String modelId = (String) opt.params[1];
- Table destAclHtable = null;
+ HTableInterface destAclHtable = null;
try {
- destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+ destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
destAclHtable.delete(new Delete(Bytes.toBytes(cubeId)));
destAclHtable.delete(new Delete(Bytes.toBytes(modelId)));
+ destAclHtable.flushCommits();
} finally {
IOUtils.closeQuietly(destAclHtable);
}
@@ -561,7 +562,7 @@ public class CubeMigrationCLI {
}
}
- private static void updateMeta(KylinConfig config) {
+ private static void updateMeta(KylinConfig config){
String[] nodes = config.getRestServers();
for (String node : nodes) {
RestClient restClient = new RestClient(node);
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
index 20d0f7d..8bd4abf 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
@@ -26,10 +26,10 @@ import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.util.OptionsHelper;
import org.apache.kylin.cube.CubeInstance;
@@ -61,7 +61,7 @@ public class CubeMigrationCheckCLI {
private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("The name of cube migrated").create("cube");
private KylinConfig dstCfg;
- private Admin hbaseAdmin;
+ private HBaseAdmin hbaseAdmin;
private List<String> issueExistHTables;
private List<String> inconsistentHTables;
@@ -130,8 +130,9 @@ public class CubeMigrationCheckCLI {
this.dstCfg = kylinConfig;
this.ifFix = isFix;
- Connection conn = HBaseConnection.get(kylinConfig.getStorageUrl());
- hbaseAdmin = conn.getAdmin();
+ Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+ hbaseAdmin = new HBaseAdmin(conf);
+
issueExistHTables = Lists.newArrayList();
inconsistentHTables = Lists.newArrayList();
}
@@ -188,10 +189,10 @@ public class CubeMigrationCheckCLI {
String[] sepNameList = segFullName.split(",");
HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(sepNameList[0]));
logger.info("Change the host of htable " + sepNameList[0] + "belonging to cube " + sepNameList[1] + " from " + desc.getValue(IRealizationConstants.HTableTag) + " to " + dstCfg.getMetadataUrlPrefix());
- hbaseAdmin.disableTable(TableName.valueOf(sepNameList[0]));
+ hbaseAdmin.disableTable(sepNameList[0]);
desc.setValue(IRealizationConstants.HTableTag, dstCfg.getMetadataUrlPrefix());
- hbaseAdmin.modifyTable(TableName.valueOf(sepNameList[0]), desc);
- hbaseAdmin.enableTable(TableName.valueOf(sepNameList[0]));
+ hbaseAdmin.modifyTable(sepNameList[0], desc);
+ hbaseAdmin.enableTable(sepNameList[0]);
}
} else {
logger.info("------ Inconsistent HTables Needed To Be Fixed ------");
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
index e72859d..c8410f9 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
@@ -44,8 +44,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.KylinVersion;
@@ -82,8 +81,7 @@ public class DeployCoprocessorCLI {
KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
Configuration hconf = HBaseConnection.getCurrentHBaseConfiguration();
FileSystem fileSystem = FileSystem.get(hconf);
- Connection conn = HBaseConnection.get(kylinConfig.getStorageUrl());
- Admin hbaseAdmin = conn.getAdmin();
+ HBaseAdmin hbaseAdmin = new HBaseAdmin(hconf);
String localCoprocessorJar;
if ("default".equals(args[0])) {
@@ -167,10 +165,10 @@ public class DeployCoprocessorCLI {
public static void deployCoprocessor(HTableDescriptor tableDesc) {
try {
initHTableCoprocessor(tableDesc);
- logger.info("hbase table " + tableDesc.getTableName() + " deployed with coprocessor.");
+ logger.info("hbase table " + tableDesc.getName() + " deployed with coprocessor.");
} catch (Exception ex) {
- logger.error("Error deploying coprocessor on " + tableDesc.getTableName(), ex);
+ logger.error("Error deploying coprocessor on " + tableDesc.getName(), ex);
logger.error("Will try creating the table without coprocessor.");
}
}
@@ -191,7 +189,7 @@ public class DeployCoprocessorCLI {
desc.addCoprocessor(CubeEndpointClass, hdfsCoprocessorJar, 1001, null);
}
- public static boolean resetCoprocessor(String tableName, Admin hbaseAdmin, Path hdfsCoprocessorJar) throws IOException {
+ public static boolean resetCoprocessor(String tableName, HBaseAdmin hbaseAdmin, Path hdfsCoprocessorJar) throws IOException {
KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
@@ -206,7 +204,7 @@ public class DeployCoprocessorCLI {
logger.info("reset coprocessor on " + tableName);
logger.info("Disable " + tableName);
- hbaseAdmin.disableTable(TableName.valueOf(tableName));
+ hbaseAdmin.disableTable(tableName);
while (desc.hasCoprocessor(CubeObserverClassOld2)) {
desc.removeCoprocessor(CubeObserverClassOld2);
@@ -232,15 +230,16 @@ public class DeployCoprocessorCLI {
desc.setValue(IRealizationConstants.HTableGitTag, commitInfo);
}
- hbaseAdmin.modifyTable(TableName.valueOf(tableName), desc);
+ hbaseAdmin.modifyTable(tableName, desc);
logger.info("Enable " + tableName);
- hbaseAdmin.enableTable(TableName.valueOf(tableName));
+ hbaseAdmin.enableTable(tableName);
return true;
}
- private static List<String> resetCoprocessorOnHTables(final Admin hbaseAdmin, final Path hdfsCoprocessorJar, List<String> tableNames) throws IOException {
+
+ private static List<String> resetCoprocessorOnHTables(final HBaseAdmin hbaseAdmin, final Path hdfsCoprocessorJar, List<String> tableNames) throws IOException {
List<String> processedTables = Collections.synchronizedList(new ArrayList<String>());
ExecutorService coprocessorPool = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() * 2);
CountDownLatch countDownLatch = new CountDownLatch(tableNames.size());
@@ -261,12 +260,12 @@ public class DeployCoprocessorCLI {
private static class ResetCoprocessorWorker implements Runnable {
private final CountDownLatch countDownLatch;
- private final Admin hbaseAdmin;
+ private final HBaseAdmin hbaseAdmin;
private final Path hdfsCoprocessorJar;
private final String tableName;
private final List<String> processedTables;
- public ResetCoprocessorWorker(CountDownLatch countDownLatch, Admin hbaseAdmin, Path hdfsCoprocessorJar, String tableName, List<String> processedTables) {
+ public ResetCoprocessorWorker(CountDownLatch countDownLatch, HBaseAdmin hbaseAdmin, Path hdfsCoprocessorJar, String tableName, List<String> processedTables) {
this.countDownLatch = countDownLatch;
this.hbaseAdmin = hbaseAdmin;
this.hdfsCoprocessorJar = hdfsCoprocessorJar;
@@ -387,7 +386,7 @@ public class DeployCoprocessorCLI {
return coprocessorDir;
}
- private static Set<String> getCoprocessorJarPaths(Admin hbaseAdmin, List<String> tableNames) throws IOException {
+ private static Set<String> getCoprocessorJarPaths(HBaseAdmin hbaseAdmin, List<String> tableNames) throws IOException {
HashSet<String> result = new HashSet<String>();
for (String tableName : tableNames) {
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
index 1cdb2f8..61c73d5 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
@@ -25,11 +25,10 @@ import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Table;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.persistence.JsonSerializer;
import org.apache.kylin.common.persistence.ResourceStore;
@@ -236,9 +235,9 @@ public class ExtendCubeToHybridCLI {
Serializer<ProjectInstance> projectSerializer = new JsonSerializer<ProjectInstance>(ProjectInstance.class);
ProjectInstance project = store.getResource(projectResPath, ProjectInstance.class, projectSerializer);
String projUUID = project.getUuid();
- Table aclHtable = null;
+ HTableInterface aclHtable = null;
try {
- aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(TableName.valueOf(kylinConfig.getMetadataUrlPrefix() + "_acl"));
+ aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(kylinConfig.getMetadataUrlPrefix() + "_acl");
// cube acl
Result result = aclHtable.get(new Get(Bytes.toBytes(origCubeId)));
@@ -258,6 +257,7 @@ public class ExtendCubeToHybridCLI {
aclHtable.put(put);
}
}
+ aclHtable.flushCommits();
} finally {
IOUtils.closeQuietly(aclHtable);
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
index dd5f8fa..86ba22f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
@@ -28,13 +28,13 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
import org.apache.kylin.common.util.Bytes;
import org.apache.kylin.common.util.Pair;
@@ -75,7 +75,7 @@ public class GridTableHBaseBenchmark {
System.out.println("Testing grid table scanning, hit ratio " + hitRatio + ", index ratio " + indexRatio);
String hbaseUrl = "hbase"; // use hbase-site.xml on classpath
- Connection conn = HBaseConnection.get(hbaseUrl);
+ HConnection conn = HBaseConnection.get(hbaseUrl);
createHTableIfNeeded(conn, TEST_TABLE);
prepareData(conn);
@@ -91,10 +91,10 @@ public class GridTableHBaseBenchmark {
}
- private static void testColumnScan(Connection conn, List<Pair<Integer, Integer>> colScans) throws IOException {
+ private static void testColumnScan(HConnection conn, List<Pair<Integer, Integer>> colScans) throws IOException {
Stats stats = new Stats("COLUMN_SCAN");
- Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
+ HTableInterface table = conn.getTable(TEST_TABLE);
try {
stats.markStart();
@@ -122,20 +122,20 @@ public class GridTableHBaseBenchmark {
}
}
- private static void testRowScanNoIndexFullScan(Connection conn, boolean[] hits) throws IOException {
+ private static void testRowScanNoIndexFullScan(HConnection conn, boolean[] hits) throws IOException {
fullScan(conn, hits, new Stats("ROW_SCAN_NO_IDX_FULL"));
}
- private static void testRowScanNoIndexSkipScan(Connection conn, boolean[] hits) throws IOException {
+ private static void testRowScanNoIndexSkipScan(HConnection conn, boolean[] hits) throws IOException {
jumpScan(conn, hits, new Stats("ROW_SCAN_NO_IDX_SKIP"));
}
- private static void testRowScanWithIndex(Connection conn, boolean[] hits) throws IOException {
+ private static void testRowScanWithIndex(HConnection conn, boolean[] hits) throws IOException {
jumpScan(conn, hits, new Stats("ROW_SCAN_IDX"));
}
- private static void fullScan(Connection conn, boolean[] hits, Stats stats) throws IOException {
- Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
+ private static void fullScan(HConnection conn, boolean[] hits, Stats stats) throws IOException {
+ HTableInterface table = conn.getTable(TEST_TABLE);
try {
stats.markStart();
@@ -156,11 +156,11 @@ public class GridTableHBaseBenchmark {
}
}
- private static void jumpScan(Connection conn, boolean[] hits, Stats stats) throws IOException {
+ private static void jumpScan(HConnection conn, boolean[] hits, Stats stats) throws IOException {
final int jumpThreshold = 6; // compensate for Scan() overhead, totally by experience
- Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
+ HTableInterface table = conn.getTable(TEST_TABLE);
try {
stats.markStart();
@@ -204,8 +204,8 @@ public class GridTableHBaseBenchmark {
}
}
- private static void prepareData(Connection conn) throws IOException {
- Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
+ private static void prepareData(HConnection conn) throws IOException {
+ HTableInterface table = conn.getTable(TEST_TABLE);
try {
// check how many rows existing
@@ -258,8 +258,8 @@ public class GridTableHBaseBenchmark {
return bytes;
}
- private static void createHTableIfNeeded(Connection conn, String tableName) throws IOException {
- Admin hbase = conn.getAdmin();
+ private static void createHTableIfNeeded(HConnection conn, String tableName) throws IOException {
+ HBaseAdmin hbase = new HBaseAdmin(conn);
try {
boolean tableExist = false;
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
index 940d64a..6749d6c 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
@@ -24,11 +24,9 @@ import java.util.List;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.kylin.common.KylinConfig;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.kylin.common.util.AbstractApplication;
import org.apache.kylin.common.util.OptionsHelper;
import org.apache.kylin.metadata.realization.IRealizationConstants;
@@ -57,8 +55,8 @@ public class HBaseClean extends AbstractApplication {
private void cleanUp() {
try {
// get all kylin hbase tables
- Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
- Admin hbaseAdmin = conn.getAdmin();
+ Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+ HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
List<String> allTablesNeedToBeDropped = Lists.newArrayList();
@@ -73,12 +71,12 @@ public class HBaseClean extends AbstractApplication {
// drop tables
for (String htableName : allTablesNeedToBeDropped) {
logger.info("Deleting HBase table " + htableName);
- if (hbaseAdmin.tableExists(TableName.valueOf(htableName))) {
- if (hbaseAdmin.isTableEnabled(TableName.valueOf(htableName))) {
- hbaseAdmin.disableTable(TableName.valueOf(htableName));
+ if (hbaseAdmin.tableExists(htableName)) {
+ if (hbaseAdmin.isTableEnabled(htableName)) {
+ hbaseAdmin.disableTable(htableName);
}
- hbaseAdmin.deleteTable(TableName.valueOf(htableName));
+ hbaseAdmin.deleteTable(htableName);
logger.info("Deleted HBase table " + htableName);
} else {
logger.info("HBase table" + htableName + " does not exist");
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
index 1daca0a..937b65f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
@@ -23,7 +23,6 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
-import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
@@ -32,15 +31,12 @@ import java.util.TreeSet;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus;
-import org.apache.hadoop.hbase.HRegionLocation;
+import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.RegionLoad;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.RegionLocator;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.kylin.common.util.Pair;
import org.slf4j.Logger;
@@ -62,31 +58,30 @@ public class HBaseRegionSizeCalculator {
/**
* Computes size of each region for table and given column families.
* */
- public HBaseRegionSizeCalculator(String tableName, Connection hbaseConnection) throws IOException {
+ public HBaseRegionSizeCalculator(HTable table) throws IOException {
+ this(table, new HBaseAdmin(table.getConfiguration()));
+ }
- Table table = null;
- Admin admin = null;
- try {
- table = hbaseConnection.getTable(TableName.valueOf(tableName));
- admin = hbaseConnection.getAdmin();
+ /** Constructor for unit testing */
+ HBaseRegionSizeCalculator(HTable table, HBaseAdmin hBaseAdmin) throws IOException {
+ try {
if (!enabled(table.getConfiguration())) {
logger.info("Region size calculation disabled.");
return;
}
- logger.info("Calculating region sizes for table \"" + table.getName() + "\".");
+ logger.info("Calculating region sizes for table \"" + new String(table.getTableName()) + "\".");
// Get regions for table.
- RegionLocator regionLocator = hbaseConnection.getRegionLocator(table.getName());
- List<HRegionLocation> regionLocationList = regionLocator.getAllRegionLocations();
+ Set<HRegionInfo> tableRegionInfos = table.getRegionLocations().keySet();
Set<byte[]> tableRegions = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
- for (HRegionLocation hRegionLocation : regionLocationList) {
- tableRegions.add(hRegionLocation.getRegionInfo().getRegionName());
+ for (HRegionInfo regionInfo : tableRegionInfos) {
+ tableRegions.add(regionInfo.getRegionName());
}
- ClusterStatus clusterStatus = admin.getClusterStatus();
+ ClusterStatus clusterStatus = hBaseAdmin.getClusterStatus();
Collection<ServerName> servers = clusterStatus.getServers();
final long megaByte = 1024L * 1024L;
@@ -110,7 +105,7 @@ public class HBaseRegionSizeCalculator {
}
}
} finally {
- IOUtils.closeQuietly(admin);
+ IOUtils.closeQuietly(hBaseAdmin);
}
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
index a2f60d4..266f7e7 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
@@ -23,10 +23,9 @@ import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.kylin.common.KylinConfig;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.kylin.metadata.realization.IRealizationConstants;
import org.apache.kylin.storage.hbase.HBaseConnection;
@@ -43,8 +42,8 @@ public class HBaseUsage {
Map<String, List<String>> envs = Maps.newHashMap();
// get all kylin hbase tables
- Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
- Admin hbaseAdmin = conn.getAdmin();
+ Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+ HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
for (HTableDescriptor desc : tableDescriptors) {
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
index 8dd2164..1db60fb 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
@@ -32,15 +32,15 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
-import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.util.Bytes;
import org.apache.kylin.storage.hbase.HBaseConnection;
import org.slf4j.Logger;
@@ -58,11 +58,11 @@ public class HbaseStreamingInput {
private static final byte[] QN = "C".getBytes();
public static void createTable(String tableName) throws IOException {
- Connection conn = getConnection();
- Admin hadmin = conn.getAdmin();
+ HConnection conn = getConnection();
+ HBaseAdmin hadmin = new HBaseAdmin(conn);
try {
- boolean tableExist = hadmin.tableExists(TableName.valueOf(tableName));
+ boolean tableExist = hadmin.tableExists(tableName);
if (tableExist) {
logger.info("HTable '" + tableName + "' already exists");
return;
@@ -120,8 +120,8 @@ public class HbaseStreamingInput {
e.printStackTrace();
}
- Connection conn = getConnection();
- Table table = conn.getTable(TableName.valueOf(tableName));
+ HConnection conn = getConnection();
+ HTableInterface table = conn.getTable(tableName);
byte[] key = new byte[8 + 4];//time + id
@@ -136,7 +136,7 @@ public class HbaseStreamingInput {
Bytes.putInt(key, 8, i);
Put put = new Put(key);
byte[] cell = randomBytes(CELL_SIZE);
- put.addColumn(CF, QN, cell);
+ put.add(CF, QN, cell);
buffer.add(put);
}
table.put(buffer);
@@ -172,8 +172,8 @@ public class HbaseStreamingInput {
}
Random r = new Random();
- Connection conn = getConnection();
- Table table = conn.getTable(TableName.valueOf(tableName));
+ HConnection conn = getConnection();
+ HTableInterface table = conn.getTable(tableName);
long leftBound = getFirstKeyTime(table);
long rightBound = System.currentTimeMillis();
@@ -208,7 +208,7 @@ public class HbaseStreamingInput {
}
}
- private static long getFirstKeyTime(Table table) throws IOException {
+ private static long getFirstKeyTime(HTableInterface table) throws IOException {
long startTime = 0;
Scan scan = new Scan();
@@ -226,8 +226,8 @@ public class HbaseStreamingInput {
}
- private static Connection getConnection() throws IOException {
- return HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+ private static HConnection getConnection() throws IOException {
+ return HConnectionManager.createConnection(HBaseConnection.getCurrentHBaseConfiguration());
}
private static String formatTime(long time) {
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
index ea05ab2..ca1a060 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
@@ -23,11 +23,10 @@ import java.io.IOException;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.kylin.common.KylinConfig;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.kylin.common.util.AbstractApplication;
import org.apache.kylin.common.util.OptionsHelper;
import org.apache.kylin.engine.mr.common.BatchConstants;
@@ -51,8 +50,8 @@ public class HtableAlterMetadataCLI extends AbstractApplication {
String metadataValue;
private void alter() throws IOException {
- Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
- Admin hbaseAdmin = conn.getAdmin();
+ Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+ HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
HTableDescriptor table = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
hbaseAdmin.disableTable(table.getTableName());
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
index df4e912..8ff5b0f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
@@ -30,14 +30,10 @@ import org.apache.commons.cli.Options;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.kylin.common.KylinConfig;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.kylin.common.util.AbstractApplication;
import org.apache.kylin.common.util.OptionsHelper;
import org.apache.kylin.metadata.realization.IRealizationConstants;
-import org.apache.kylin.storage.hbase.HBaseConnection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -56,9 +52,9 @@ public class OrphanHBaseCleanJob extends AbstractApplication {
Set<String> metastoreWhitelistSet = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
private void cleanUnusedHBaseTables(Configuration conf) throws IOException {
- Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+
// get all kylin hbase tables
- Admin hbaseAdmin = conn.getAdmin();
+ HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
List<String> allTablesNeedToBeDropped = new ArrayList<String>();
@@ -77,13 +73,12 @@ public class OrphanHBaseCleanJob extends AbstractApplication {
// drop tables
for (String htableName : allTablesNeedToBeDropped) {
logger.info("Deleting HBase table " + htableName);
- TableName tableName = TableName.valueOf(htableName);
- if (hbaseAdmin.tableExists(tableName)) {
- if (hbaseAdmin.isTableEnabled(tableName)) {
- hbaseAdmin.disableTable(tableName);
+ if (hbaseAdmin.tableExists(htableName)) {
+ if (hbaseAdmin.isTableEnabled(htableName)) {
+ hbaseAdmin.disableTable(htableName);
}
- hbaseAdmin.deleteTable(tableName);
+ hbaseAdmin.deleteTable(htableName);
logger.info("Deleted HBase table " + htableName);
} else {
logger.info("HBase table" + htableName + " does not exist");
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
index bba6745..1ea8e8d 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
@@ -22,13 +22,12 @@ import java.io.IOException;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.token.TokenUtil;
import org.apache.hadoop.security.UserGroupInformation;
@@ -60,12 +59,12 @@ public class PingHBaseCLI {
Scan scan = new Scan();
int limit = 20;
- Connection conn = null;
- Table table = null;
+ HConnection conn = null;
+ HTableInterface table = null;
ResultScanner scanner = null;
try {
- conn = ConnectionFactory.createConnection(hconf);
- table = conn.getTable(TableName.valueOf(hbaseTable));
+ conn = HConnectionManager.createConnection(hconf);
+ table = conn.getTable(hbaseTable);
scanner = table.getScanner(scan);
int count = 0;
for (Result r : scanner) {
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
index db516bb..01edb1f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
@@ -22,12 +22,11 @@ import java.io.IOException;
import java.util.Iterator;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
import org.apache.kylin.common.util.Bytes;
import org.apache.kylin.common.util.BytesUtil;
import org.apache.kylin.storage.hbase.HBaseConnection;
@@ -71,8 +70,8 @@ public class RowCounterCLI {
logger.info("My Scan " + scan.toString());
- Connection conn = ConnectionFactory.createConnection(conf);
- Table tableInterface = conn.getTable(TableName.valueOf(htableName));
+ HConnection conn = HConnectionManager.createConnection(conf);
+ HTableInterface tableInterface = conn.getTable(htableName);
Iterator<Result> iterator = tableInterface.getScanner(scan).iterator();
int counter = 0;
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
index f6b65ab..23e7e10 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
@@ -40,9 +40,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.util.AbstractApplication;
import org.apache.kylin.common.util.CliCommandExecutor;
@@ -59,7 +57,6 @@ import org.apache.kylin.job.execution.AbstractExecutable;
import org.apache.kylin.job.execution.ExecutableManager;
import org.apache.kylin.job.execution.ExecutableState;
import org.apache.kylin.metadata.realization.IRealizationConstants;
-import org.apache.kylin.storage.hbase.HBaseConnection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -80,8 +77,7 @@ public class StorageCleanupJob extends AbstractApplication {
private void cleanUnusedHBaseTables(Configuration conf) throws IOException {
CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
// get all kylin hbase tables
- Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
- Admin hbaseAdmin = conn.getAdmin();
+ HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
List<String> allTablesNeedToBeDropped = new ArrayList<String>();
@@ -157,22 +153,22 @@ public class StorageCleanupJob extends AbstractApplication {
}
class DeleteHTableRunnable implements Callable {
- Admin hbaseAdmin;
+ HBaseAdmin hbaseAdmin;
String htableName;
- DeleteHTableRunnable(Admin hbaseAdmin, String htableName) {
+ DeleteHTableRunnable(HBaseAdmin hbaseAdmin, String htableName) {
this.hbaseAdmin = hbaseAdmin;
this.htableName = htableName;
}
public Object call() throws Exception {
logger.info("Deleting HBase table " + htableName);
- if (hbaseAdmin.tableExists(TableName.valueOf(htableName))) {
- if (hbaseAdmin.isTableEnabled(TableName.valueOf(htableName))) {
- hbaseAdmin.disableTable(TableName.valueOf(htableName));
+ if (hbaseAdmin.tableExists(htableName)) {
+ if (hbaseAdmin.isTableEnabled(htableName)) {
+ hbaseAdmin.disableTable(htableName);
}
- hbaseAdmin.deleteTable(TableName.valueOf(htableName));
+ hbaseAdmin.deleteTable(htableName);
logger.info("Deleted HBase table " + htableName);
} else {
logger.info("HBase table" + htableName + " does not exist");
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
index 42a54c8..e36f662 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
@@ -24,18 +24,16 @@ import java.util.Arrays;
import java.util.List;
import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.cube.CubeInstance;
import org.apache.kylin.cube.CubeManager;
import org.apache.kylin.cube.CubeSegment;
import org.apache.kylin.metadata.model.SegmentStatusEnum;
import org.apache.kylin.metadata.realization.IRealizationConstants;
+import org.apache.kylin.storage.hbase.HBaseConnection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -51,15 +49,14 @@ public class UpdateHTableHostCLI {
private List<String> errorMsgs = Lists.newArrayList();
private List<String> htables;
- private Admin hbaseAdmin;
+ private HBaseAdmin hbaseAdmin;
private KylinConfig kylinConfig;
private String oldHostValue;
public UpdateHTableHostCLI(List<String> htables, String oldHostValue) throws IOException {
this.htables = htables;
this.oldHostValue = oldHostValue;
- Connection conn = ConnectionFactory.createConnection(HBaseConfiguration.create());
- hbaseAdmin = conn.getAdmin();
+ this.hbaseAdmin = new HBaseAdmin(HBaseConnection.getCurrentHBaseConfiguration());
this.kylinConfig = KylinConfig.getInstanceFromEnv();
}
@@ -169,9 +166,9 @@ public class UpdateHTableHostCLI {
HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
if (oldHostValue.equals(desc.getValue(IRealizationConstants.HTableTag))) {
desc.setValue(IRealizationConstants.HTableTag, kylinConfig.getMetadataUrlPrefix());
- hbaseAdmin.disableTable(TableName.valueOf(tableName));
- hbaseAdmin.modifyTable(TableName.valueOf(tableName), desc);
- hbaseAdmin.enableTable(TableName.valueOf(tableName));
+ hbaseAdmin.disableTable(tableName);
+ hbaseAdmin.modifyTable(tableName, desc);
+ hbaseAdmin.enableTable(tableName);
updatedResources.add(tableName);
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/tool/pom.xml
----------------------------------------------------------------------
diff --git a/tool/pom.xml b/tool/pom.xml
index 91040d4..919a903 100644
--- a/tool/pom.xml
+++ b/tool/pom.xml
@@ -60,16 +60,6 @@
<artifactId>hbase-client</artifactId>
<scope>provided</scope>
</dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-api</artifactId>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-common</artifactId>
- <scope>provided</scope>
- </dependency>
<!-- Env & Test -->
<dependency>
http://git-wip-us.apache.org/repos/asf/kylin/blob/4e41c363/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java b/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
index c0042f3..c8bff89 100644
--- a/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
@@ -36,9 +36,9 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Table;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.persistence.JsonSerializer;
import org.apache.kylin.common.persistence.RawResource;
@@ -231,7 +231,6 @@ public class CubeMigrationCLI {
operations.add(new Opt(OptType.COPY_DICT_OR_SNAPSHOT, new Object[] { item, cube.getName() }));
}
}
-
private static void addCubeAndModelIntoProject(CubeInstance srcCube, String cubeName, String projectName) throws IOException {
String projectResPath = ProjectInstance.concatResourcePath(projectName);
if (!dstStore.exists(projectResPath))
@@ -448,11 +447,11 @@ public class CubeMigrationCLI {
Serializer<ProjectInstance> projectSerializer = new JsonSerializer<ProjectInstance>(ProjectInstance.class);
ProjectInstance project = dstStore.getResource(projectResPath, ProjectInstance.class, projectSerializer);
String projUUID = project.getUuid();
- Table srcAclHtable = null;
- Table destAclHtable = null;
+ HTableInterface srcAclHtable = null;
+ HTableInterface destAclHtable = null;
try {
- srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(TableName.valueOf(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
- destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+ srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
+ destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
// cube acl
Result result = srcAclHtable.get(new Get(Bytes.toBytes(cubeId)));
@@ -472,6 +471,7 @@ public class CubeMigrationCLI {
destAclHtable.put(put);
}
}
+ destAclHtable.flushCommits();
} finally {
IOUtils.closeQuietly(srcAclHtable);
IOUtils.closeQuietly(destAclHtable);
@@ -537,12 +537,13 @@ public class CubeMigrationCLI {
case COPY_ACL: {
String cubeId = (String) opt.params[0];
String modelId = (String) opt.params[1];
- Table destAclHtable = null;
+ HTableInterface destAclHtable = null;
try {
- destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+ destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
destAclHtable.delete(new Delete(Bytes.toBytes(cubeId)));
destAclHtable.delete(new Delete(Bytes.toBytes(modelId)));
+ destAclHtable.flushCommits();
} finally {
IOUtils.closeQuietly(destAclHtable);
}
@@ -559,7 +560,7 @@ public class CubeMigrationCLI {
}
}
- private static void updateMeta(KylinConfig config) {
+ private static void updateMeta(KylinConfig config){
String[] nodes = config.getRestServers();
for (String node : nodes) {
RestClient restClient = new RestClient(node);
[13/39] kylin git commit: KYLIN-2421 fix unit test
Posted by li...@apache.org.
KYLIN-2421 fix unit test
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/56a3e6c8
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/56a3e6c8
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/56a3e6c8
Branch: refs/heads/master-hbase0.98
Commit: 56a3e6c8d0c39271ea95d83bbe0f3f8c7db8b41b
Parents: 24fa338
Author: shaofengshi <sh...@apache.org>
Authored: Sat Feb 4 14:43:36 2017 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Sat Feb 4 19:37:59 2017 +0800
----------------------------------------------------------------------
core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java | 2 ++
1 file changed, 2 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/56a3e6c8/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java b/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java
index 86ea1df..20ee43e 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java
@@ -48,6 +48,7 @@ import org.apache.kylin.metadata.model.TblColRef;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
+import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
@@ -102,6 +103,7 @@ public class CubeDescTest extends LocalFileMetadataTestCase {
this.cleanupTestMetadata();
}
+ @Ignore ("To enable spark in IT, the inner cube removed the percentile measure, so ignore this test")
@Test
public void testCiCube() {
CubeDescManager mgr = CubeDescManager.getInstance(getTestConfig());