You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by lu...@apache.org on 2015/09/06 09:59:48 UTC

[24/50] [abbrv] incubator-kylin git commit: KYLIN-956 allow users to configure hbase compression algorithm in kylin.properties

KYLIN-956 allow users to configure hbase compression algorithm in kylin.properties


Project: http://git-wip-us.apache.org/repos/asf/incubator-kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-kylin/commit/260a850d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-kylin/tree/260a850d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-kylin/diff/260a850d

Branch: refs/heads/0.7
Commit: 260a850d44abd9c8651b1ec90b191931136bcfce
Parents: 92b111c
Author: Meng Liang <13...@139.com>
Authored: Wed Aug 26 17:06:16 2015 +0800
Committer: Luke Han <lu...@apache.org>
Committed: Sun Sep 6 14:37:58 2015 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/common/KylinConfig.java    |   7 +
 conf/kylin.properties                           |   3 +
 .../job/deployment/HbaseConfigPrinterCLI.java   | 148 -------------------
 .../kylin/job/hadoop/hbase/CreateHTableJob.java |  38 ++++-
 .../kylin/job/tools/LZOSupportnessChecker.java  |  47 ------
 5 files changed, 44 insertions(+), 199 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/260a850d/common/src/main/java/org/apache/kylin/common/KylinConfig.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/kylin/common/KylinConfig.java b/common/src/main/java/org/apache/kylin/common/KylinConfig.java
index d5eb97a..0809a1f 100644
--- a/common/src/main/java/org/apache/kylin/common/KylinConfig.java
+++ b/common/src/main/java/org/apache/kylin/common/KylinConfig.java
@@ -122,6 +122,8 @@ public class KylinConfig {
 
     public static final String VERSION = "${project.version}";
 
+    public static final String HTABLE_DEFAULT_COMPRESSION_CODEC = "kylin.hbase.default.compression.codec";
+
     // static cached instances
     private static KylinConfig ENV_INSTANCE = null;
 
@@ -449,6 +451,11 @@ public class KylinConfig {
         return Integer.parseInt(this.getOptional("kylin.hbase.client.keyvalue.maxsize", "10485760"));
     }
 
+    public String getHbaseDefaultCompressionCodec() {
+        return getOptional(HTABLE_DEFAULT_COMPRESSION_CODEC);
+
+    }
+
     private String getOptional(String prop) {
         final String property = System.getProperty(prop);
         return property != null ? property : kylinConfig.getString(prop);

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/260a850d/conf/kylin.properties
----------------------------------------------------------------------
diff --git a/conf/kylin.properties b/conf/kylin.properties
index ad1f8fa..8c7c647 100644
--- a/conf/kylin.properties
+++ b/conf/kylin.properties
@@ -60,6 +60,9 @@ kylin.job.yarn.app.rest.check.interval.seconds=10
 # Hive database name for putting the intermediate flat tables
 kylin.job.hive.database.for.intermediatetable=default
 
+#default compression codec for htable,snappy,lzo,gzip,lz4
+kylin.hbase.default.compression.codec=snappy
+
 ## Config for Restful APP ##
 # database connection settings:
 ldap.server=

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/260a850d/job/src/main/java/org/apache/kylin/job/deployment/HbaseConfigPrinterCLI.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/deployment/HbaseConfigPrinterCLI.java b/job/src/main/java/org/apache/kylin/job/deployment/HbaseConfigPrinterCLI.java
deleted file mode 100644
index 4596bb0..0000000
--- a/job/src/main/java/org/apache/kylin/job/deployment/HbaseConfigPrinterCLI.java
+++ /dev/null
@@ -1,148 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.job.deployment;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Map;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.kylin.job.tools.LZOSupportnessChecker;
-
-/**
- * Created by honma on 9/30/14.
- * <p/>
- * This class is assumed to be run by
- * "hbase org.apache.hadoop.util.RunJar kylin-job-0.5.7-SNAPSHOT-job.jar org.apache.kylin.job.deployment.HadoopConfigPrinter "
- * in the shell, so that hbase and hadoop related environment variables will be
- * visible to this class.
- */
-public class HbaseConfigPrinterCLI {
-    public static void main(String[] args) throws IOException {
-        if (args.length != 1) {
-            System.out.println("Usage: hbase org.apache.hadoop.util.RunJar kylin-job-0.5.7-SNAPSHOT-job.jar org.apache.kylin.job.deployment.HadoopConfigPrinter targetFile");
-            System.exit(1);
-        }
-
-        printConfigs(args[0]);
-    }
-
-    private static void printConfigs(String targetFile) throws IOException {
-
-        File output = new File(targetFile);
-        if (output.exists() && output.isDirectory()) {
-            throw new IllegalStateException("The output file: " + targetFile + " is a directory");
-        }
-
-        StringBuilder sb = new StringBuilder();
-
-        sb.append("export KYLIN_LZO_SUPPORTED=" + ConfigLoader.LZO_INFO_LOADER.loadValue() + "\n");
-        sb.append("export KYLIN_LD_LIBRARY_PATH=" + ConfigLoader.LD_LIBRARY_PATH_LOADER.loadValue() + "\n");
-        sb.append("export KYLIN_HBASE_CLASSPATH=" + ConfigLoader.HBASE_CLASSPATH_LOADER.loadValue() + "\n");
-        sb.append("export KYLIN_HBASE_CONF_PATH=" + ConfigLoader.HBASE_CONF_FOLDER_LOADER.loadValue() + "\n");
-        sb.append("export KYLIN_ZOOKEEPER_QUORUM=" + ConfigLoader.ZOOKEEP_QUORUM_LOADER.loadValue() + "\n");
-        sb.append("export KYLIN_ZOOKEEPER_CLIENT_PORT=" + ConfigLoader.ZOOKEEPER_CLIENT_PORT_LOADER.loadValue() + "\n");
-        sb.append("export KYLIN_ZOOKEEPER_ZNODE_PARENT=" + ConfigLoader.ZOOKEEPER_ZNODE_PARENT_LOADER.loadValue() + "\n");
-
-        FileUtils.writeStringToFile(output, sb.toString());
-    }
-
-    @SuppressWarnings("unused")
-    private static void printAllEnv() {
-        for (Map.Entry<String, String> entry : System.getenv().entrySet()) {
-            System.out.println("Key: " + entry.getKey());
-            System.out.println("Value: " + entry.getValue());
-            System.out.println();
-        }
-    }
-
-    enum ConfigLoader {
-
-        LZO_INFO_LOADER {
-            @Override
-            public String loadValue() {
-                return LZOSupportnessChecker.getSupportness() ? "true" : "false";
-            }
-        },
-
-        ZOOKEEP_QUORUM_LOADER {
-            @Override
-            public String loadValue() {
-                Configuration conf = HBaseConfiguration.create();
-                return conf.get(HConstants.ZOOKEEPER_QUORUM);
-            }
-        },
-
-        ZOOKEEPER_ZNODE_PARENT_LOADER {
-            @Override
-            public String loadValue() {
-                Configuration conf = HBaseConfiguration.create();
-                return conf.get(HConstants.ZOOKEEPER_ZNODE_PARENT);
-            }
-        },
-
-        ZOOKEEPER_CLIENT_PORT_LOADER {
-            @Override
-            public String loadValue() {
-                Configuration conf = HBaseConfiguration.create();
-                return conf.get(HConstants.ZOOKEEPER_CLIENT_PORT);
-
-            }
-        },
-
-        LD_LIBRARY_PATH_LOADER {
-            @Override
-            public String loadValue() {
-                return System.getenv("LD_LIBRARY_PATH");
-            }
-        },
-
-        HBASE_CLASSPATH_LOADER {
-            @Override
-            public String loadValue() {
-                return System.getenv("CLASSPATH");
-            }
-        },
-
-        HBASE_CONF_FOLDER_LOADER {
-            @Override
-            public String loadValue() {
-                String output = HBASE_CLASSPATH_LOADER.loadValue();
-                String[] paths = output.split(":");
-                StringBuilder sb = new StringBuilder();
-
-                for (String path : paths) {
-                    path = path.trim();
-                    File f = new File(path);
-                    if (StringUtils.containsIgnoreCase(path, "conf") && f.exists() && f.isDirectory() && f.getName().equalsIgnoreCase("conf")) {
-                        sb.append(":" + path);
-                    }
-                }
-                return sb.toString();
-            }
-        };
-
-        public abstract String loadValue();
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/260a850d/job/src/main/java/org/apache/kylin/job/hadoop/hbase/CreateHTableJob.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/hadoop/hbase/CreateHTableJob.java b/job/src/main/java/org/apache/kylin/job/hadoop/hbase/CreateHTableJob.java
index ffeaabf..f114b5b 100644
--- a/job/src/main/java/org/apache/kylin/job/hadoop/hbase/CreateHTableJob.java
+++ b/job/src/main/java/org/apache/kylin/job/hadoop/hbase/CreateHTableJob.java
@@ -48,7 +48,6 @@ import org.apache.kylin.cube.model.CubeDesc;
 import org.apache.kylin.cube.model.HBaseColumnFamilyDesc;
 import org.apache.kylin.job.hadoop.AbstractHadoopJob;
 import org.apache.kylin.job.tools.DeployCoprocessorCLI;
-import org.apache.kylin.job.tools.LZOSupportnessChecker;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -93,12 +92,43 @@ public class CreateHTableJob extends AbstractHadoopJob {
                 HColumnDescriptor cf = new HColumnDescriptor(cfDesc.getName());
                 cf.setMaxVersions(1);
 
-                if (LZOSupportnessChecker.getSupportness()) {
+                KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
+                String hbaseDefaultCC = kylinConfig.getHbaseDefaultCompressionCodec().toLowerCase();
+
+                switch (hbaseDefaultCC) {
+                case "snappy": {
+                    logger.info("hbase will use snappy to compress data");
+                    cf.setCompressionType(Algorithm.SNAPPY);
+                    break;
+                }
+                case "lzo": {
                     logger.info("hbase will use lzo to compress data");
                     cf.setCompressionType(Algorithm.LZO);
-                } else {
-                    logger.info("hbase will not use lzo to compress data");
+                    break;
+                }
+                case "gz":
+                case "gzip": {
+                    logger.info("hbase will use gzip to compress data");
+                    cf.setCompressionType(Algorithm.GZ);
+                    break;
+                }
+                case "lz4": {
+                    logger.info("hbase will use lz4 to compress data");
+                    cf.setCompressionType(Algorithm.LZ4);
+                    break;
                 }
+                default: {
+                    logger.info("hbase will not user any compression codec to compress data");
+
+                }
+                }
+
+                //if (LZOSupportnessChecker.getSupportness()) {
+                //     logger.info("hbase will use lzo to compress data");
+                //     cf.setCompressionType(Algorithm.LZO);
+                // } else {
+                //     logger.info("hbase will not use lzo to compress data");
+                // }
 
                 cf.setDataBlockEncoding(DataBlockEncoding.FAST_DIFF);
                 cf.setInMemory(false);

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/260a850d/job/src/main/java/org/apache/kylin/job/tools/LZOSupportnessChecker.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/tools/LZOSupportnessChecker.java b/job/src/main/java/org/apache/kylin/job/tools/LZOSupportnessChecker.java
deleted file mode 100644
index aa43b1d..0000000
--- a/job/src/main/java/org/apache/kylin/job/tools/LZOSupportnessChecker.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.job.tools;
-
-import java.io.File;
-
-import org.apache.hadoop.hbase.util.CompressionTest;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Created by honma on 10/21/14.
- */
-public class LZOSupportnessChecker {
-    private static final Logger log = LoggerFactory.getLogger(LZOSupportnessChecker.class);
-
-    public static boolean getSupportness() {
-        try {
-            File temp = File.createTempFile("test", ".tmp");
-            CompressionTest.main(new String[] { "file://" + temp.getAbsolutePath(), "lzo" });
-        } catch (Exception e) {
-            log.info("LZO compression test encounters " + e.toString());
-            return false;
-        }
-        return true;
-    }
-
-    public static void main(String[] args) throws Exception {
-        System.out.println("LZO supported by current env? " + getSupportness());
-    }
-}