You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by li...@apache.org on 2015/01/23 09:32:03 UTC

[16/50] [abbrv] incubator-kylin git commit: [KYLIN-562] better logging

[KYLIN-562] better logging


Project: http://git-wip-us.apache.org/repos/asf/incubator-kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-kylin/commit/46c9a13d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-kylin/tree/46c9a13d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-kylin/diff/46c9a13d

Branch: refs/heads/inverted-index
Commit: 46c9a13d5faf08c4a2b10c9acc50742c9195cc38
Parents: a843cb9
Author: Li, Yang <ya...@ebay.com>
Authored: Thu Jan 22 17:24:44 2015 +0800
Committer: Li, Yang <ya...@ebay.com>
Committed: Thu Jan 22 17:24:44 2015 +0800

----------------------------------------------------------------------
 .../job/hadoop/hbase/CreateHTableJob.java       | 20 ++++++++++----------
 1 file changed, 10 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/46c9a13d/job/src/main/java/com/kylinolap/job/hadoop/hbase/CreateHTableJob.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/com/kylinolap/job/hadoop/hbase/CreateHTableJob.java b/job/src/main/java/com/kylinolap/job/hadoop/hbase/CreateHTableJob.java
index 1384c03..b60b5c7 100644
--- a/job/src/main/java/com/kylinolap/job/hadoop/hbase/CreateHTableJob.java
+++ b/job/src/main/java/com/kylinolap/job/hadoop/hbase/CreateHTableJob.java
@@ -58,7 +58,7 @@ import com.kylinolap.job.tools.LZOSupportnessChecker;
 
 public class CreateHTableJob extends AbstractHadoopJob {
 
-    protected static final Logger log = LoggerFactory.getLogger(CreateHTableJob.class);
+    protected static final Logger logger = LoggerFactory.getLogger(CreateHTableJob.class);
 
     @Override
     public int run(String[] args) throws Exception {
@@ -97,10 +97,10 @@ public class CreateHTableJob extends AbstractHadoopJob {
                 cf.setMaxVersions(1);
 
                 if (LZOSupportnessChecker.getSupportness()) {
-                    log.info("hbase will use lzo to compress data");
+                    logger.info("hbase will use lzo to compress data");
                     cf.setCompressionType(Algorithm.LZO);
                 } else {
-                    log.info("hbase will not use lzo to compress data");
+                    logger.info("hbase will not use lzo to compress data");
                 }
 
                 cf.setDataBlockEncoding(DataBlockEncoding.FAST_DIFF);
@@ -119,21 +119,21 @@ public class CreateHTableJob extends AbstractHadoopJob {
 
             try {
                 initHTableCoprocessor(tableDesc);
-                log.info("hbase table " + tableName + " deployed with coprocessor.");
+                logger.info("hbase table " + tableName + " deployed with coprocessor.");
 
             } catch (Exception ex) {
-                log.error("Error deploying coprocessor on " + tableName, ex);
-                log.error("Will try creating the table without coprocessor.");
+                logger.error("Error deploying coprocessor on " + tableName, ex);
+                logger.error("Will try creating the table without coprocessor.");
             }
 
             admin.createTable(tableDesc, splitKeys);
-            log.info("create hbase table " + tableName + " done.");
+            logger.info("create hbase table " + tableName + " done.");
 
             return 0;
         } catch (Exception e) {
             printUsage(options);
             e.printStackTrace(System.err);
-            log.error(e.getLocalizedMessage(), e);
+            logger.error(e.getLocalizedMessage(), e);
             return 2;
         } finally {
             admin.close();
@@ -175,8 +175,8 @@ public class CreateHTableJob extends AbstractHadoopJob {
             IOUtils.closeStream(reader);
         }
         
-        System.out.println((rowkeyList.size() + 1) + " regions");
-        System.out.println(rowkeyList.size() + " splits");
+        logger.info((rowkeyList.size() + 1) + " regions");
+        logger.info(rowkeyList.size() + " splits");
         for (byte[] split : rowkeyList) {
             System.out.println(StringUtils.byteToHexString(split));
         }