You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by li...@apache.org on 2016/04/29 10:04:48 UTC

[1/3] kylin git commit: minor, refine error message of reresh a cube without partition col

Repository: kylin
Updated Branches:
  refs/heads/master c3cf042c4 -> 07e3c25fd


minor, refine error message of reresh a cube without partition col


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/07e3c25f
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/07e3c25f
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/07e3c25f

Branch: refs/heads/master
Commit: 07e3c25fd11381dbbb999b4d5550863871d681ea
Parents: e1287f4
Author: lidongsjtu <li...@apache.org>
Authored: Fri Apr 29 16:03:04 2016 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Fri Apr 29 16:03:11 2016 +0800

----------------------------------------------------------------------
 webapp/app/partials/jobs/job_refresh.html | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/07e3c25f/webapp/app/partials/jobs/job_refresh.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/jobs/job_refresh.html b/webapp/app/partials/jobs/job_refresh.html
index e3f452f..83e1d01 100644
--- a/webapp/app/partials/jobs/job_refresh.html
+++ b/webapp/app/partials/jobs/job_refresh.html
@@ -84,7 +84,7 @@
         <div ng-if="!metaModel.model.partition_desc.partition_date_column" class="row">
             <div class="col-md-2"></div>
             <div class="col-md-8">
-                <span>No partition date column defined.</span>
+                <span>No partition date column defined. If you want to rebuild the cube, please click "Build".</span>
             </div>
             <div class="col-md-2"></div>
         </div>


[3/3] kylin git commit: minor, ZipFileUtils: remain modified time of zip entries

Posted by li...@apache.org.
minor, ZipFileUtils: remain modified time of zip entries


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/0754a795
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/0754a795
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/0754a795

Branch: refs/heads/master
Commit: 0754a7953163399a842f4e07192a7885f1242b9b
Parents: c3cf042
Author: lidongsjtu <li...@apache.org>
Authored: Fri Apr 29 12:10:11 2016 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Fri Apr 29 16:03:11 2016 +0800

----------------------------------------------------------------------
 .../src/main/java/org/apache/kylin/common/util/ZipFileUtils.java    | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/0754a795/core-common/src/main/java/org/apache/kylin/common/util/ZipFileUtils.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/ZipFileUtils.java b/core-common/src/main/java/org/apache/kylin/common/util/ZipFileUtils.java
index b5ce829..d82a880 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/ZipFileUtils.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/ZipFileUtils.java
@@ -44,6 +44,7 @@ public class ZipFileUtils {
                 compressDirectoryToZipfile(rootDir, sourceDir + normDir(sourceFile.getName()), out);
             } else {
                 ZipEntry entry = new ZipEntry(normDir(StringUtils.isEmpty(rootDir) ? sourceDir : sourceDir.replace(rootDir, "")) + sourceFile.getName());
+                entry.setTime(sourceFile.lastModified());
                 out.putNextEntry(entry);
 
                 FileInputStream in = new FileInputStream(sourceDir + sourceFile.getName());


[2/3] kylin git commit: KYLIN-1614 Backend impl of Job diagnosis dump

Posted by li...@apache.org.
KYLIN-1614 Backend impl of Job diagnosis dump


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/e1287f4f
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/e1287f4f
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/e1287f4f

Branch: refs/heads/master
Commit: e1287f4f8f2c38131941fcb2082b46dd5f2ee50f
Parents: 0754a79
Author: lidongsjtu <li...@apache.org>
Authored: Fri Apr 29 14:39:38 2016 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Fri Apr 29 16:03:11 2016 +0800

----------------------------------------------------------------------
 .../rest/controller/DiagnosisController.java    | 120 ++++++++++++
 .../kylin/rest/service/DiagnosisService.java    |  27 ++-
 .../kylin/tool/AbstractInfoExtractor.java       | 119 ++++++++++++
 .../apache/kylin/tool/CubeMetaExtractor.java    |  38 +---
 .../org/apache/kylin/tool/DiagnosisInfoCLI.java | 149 ++-------------
 .../apache/kylin/tool/HBaseUsageExtractor.java  |  41 +----
 .../apache/kylin/tool/JobDiagnosisInfoCLI.java  | 182 ++++++++++++++++++
 .../org/apache/kylin/tool/JobInfoExtractor.java | 184 -------------------
 .../apache/kylin/tool/KylinLogExtractor.java    |  78 ++++++++
 .../java/org/apache/kylin/tool/ToolUtil.java    |  54 ++++++
 10 files changed, 609 insertions(+), 383 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/e1287f4f/server/src/main/java/org/apache/kylin/rest/controller/DiagnosisController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/DiagnosisController.java b/server/src/main/java/org/apache/kylin/rest/controller/DiagnosisController.java
new file mode 100644
index 0000000..b9da9b2
--- /dev/null
+++ b/server/src/main/java/org/apache/kylin/rest/controller/DiagnosisController.java
@@ -0,0 +1,120 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.kylin.rest.controller;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.List;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.kylin.metadata.badquery.BadQueryEntry;
+import org.apache.kylin.metadata.badquery.BadQueryHistory;
+import org.apache.kylin.rest.exception.InternalErrorException;
+import org.apache.kylin.rest.service.DiagnosisService;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestMethod;
+import org.springframework.web.bind.annotation.ResponseBody;
+
+import com.google.common.collect.Lists;
+
+@Controller
+@RequestMapping(value = "/diag")
+public class DiagnosisController {
+
+    private static final Logger logger = LoggerFactory.getLogger(DiagnosisController.class);
+
+    @Autowired
+    private DiagnosisService dgService;
+
+    /**
+     * Get bad query history
+     */
+    @RequestMapping(value = "/{project}/sql", method = { RequestMethod.GET })
+    @ResponseBody
+    public List<BadQueryEntry> getBadQuerySql(@PathVariable String project) {
+
+        List<BadQueryEntry> badEntry = Lists.newArrayList();
+        try {
+            BadQueryHistory badQueryHistory = dgService.getProjectBadQueryHistory(project);
+            badEntry.addAll(badQueryHistory.getEntries());
+        } catch (IOException e) {
+            throw new InternalErrorException(e + " Caused by: " + e.getMessage(), e);
+        }
+
+        return badEntry;
+    }
+
+    /**
+     * Get diagnosis information for project
+     */
+    @RequestMapping(value = "/project/{project}/download", method = { RequestMethod.GET })
+    @ResponseBody
+    public void dumpProjectDiagnosisInfo(@PathVariable String project, final HttpServletRequest request, final HttpServletResponse response) {
+        String filePath;
+        try {
+            filePath = dgService.dumpProjectDiagnosisInfo(project);
+        } catch (IOException e) {
+            throw new InternalErrorException(e + " Caused by: " + e.getMessage(), e);
+        }
+
+        setDownloadResponse(filePath, response);
+    }
+
+    /**
+     * Get diagnosis information for job
+     */
+    @RequestMapping(value = "/job/{jobId}/download", method = { RequestMethod.GET })
+    @ResponseBody
+    public void dumpJobDiagnosisInfo(@PathVariable String jobId, final HttpServletRequest request, final HttpServletResponse response) {
+        String filePath;
+        try {
+            filePath = dgService.dumpJobDiagnosisInfo(jobId);
+        } catch (IOException e) {
+            throw new InternalErrorException(e + " Caused by: " + e.getMessage(), e);
+        }
+
+        setDownloadResponse(filePath, response);
+    }
+
+    private void setDownloadResponse(String downloadFile, final HttpServletResponse response) {
+        File file = new File(downloadFile);
+        try (InputStream fileInputStream = new FileInputStream(file); OutputStream output = response.getOutputStream();) {
+            response.reset();
+            response.setContentType("application/octet-stream");
+            response.setContentLength((int) (file.length()));
+            response.setHeader("Content-Disposition", "attachment; filename=\"" + file.getName() + "\"");
+            IOUtils.copyLarge(fileInputStream, output);
+            output.flush();
+        } catch (IOException e) {
+            throw new InternalErrorException(e + " Caused by: " + e.getMessage(), e);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/e1287f4f/server/src/main/java/org/apache/kylin/rest/service/DiagnosisService.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/service/DiagnosisService.java b/server/src/main/java/org/apache/kylin/rest/service/DiagnosisService.java
index c497323..a354159 100644
--- a/server/src/main/java/org/apache/kylin/rest/service/DiagnosisService.java
+++ b/server/src/main/java/org/apache/kylin/rest/service/DiagnosisService.java
@@ -18,28 +18,49 @@
 
 package org.apache.kylin.rest.service;
 
+import java.io.File;
 import java.io.IOException;
-import java.util.UUID;
 
 import org.apache.kylin.metadata.badquery.BadQueryHistory;
 import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.tool.DiagnosisInfoCLI;
+import org.apache.kylin.tool.JobDiagnosisInfoCLI;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.springframework.security.access.prepost.PreAuthorize;
 import org.springframework.stereotype.Component;
 
+import com.google.common.io.Files;
+
 @Component("diagnosisService")
 public class DiagnosisService extends BasicService {
 
+    private static final Logger logger = LoggerFactory.getLogger(DiagnosisService.class);
+
+    private File getDumpDir() {
+        return Files.createTempDir();
+    }
+
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN)
     public BadQueryHistory getProjectBadQueryHistory(String project) throws IOException {
         return getBadQueryHistoryManager().getBadQueriesForProject(project);
     }
 
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN)
-    public String dumpDiagnosisInfo(String project) throws IOException {
-        String[] args = { "-project", project, "-destDir", System.getProperty("java.io.tmpdir") + UUID.randomUUID(), "-compress", "true" };
+    public String dumpProjectDiagnosisInfo(String project) throws IOException {
+        String[] args = { "-project", project, "-destDir", getDumpDir().getAbsolutePath() };
+        logger.info("DiagnosisInfoCLI args: " + args);
         DiagnosisInfoCLI diagnosisInfoCli = new DiagnosisInfoCLI();
         diagnosisInfoCli.execute(args);
         return diagnosisInfoCli.getExportDest();
     }
+
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN)
+    public String dumpJobDiagnosisInfo(String jobId) throws IOException {
+        String[] args = { "-jobId", jobId, "-destDir", getDumpDir().getAbsolutePath() };
+        logger.info("JobDiagnosisInfoCLI args: " + args);
+        JobDiagnosisInfoCLI jobInfoExtractor = new JobDiagnosisInfoCLI();
+        jobInfoExtractor.execute(args);
+        return jobInfoExtractor.getExportDest();
+    }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/e1287f4f/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractor.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractor.java b/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractor.java
new file mode 100644
index 0000000..64e1db8
--- /dev/null
+++ b/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractor.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.kylin.tool;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Options;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.kylin.common.util.AbstractApplication;
+import org.apache.kylin.common.util.OptionsHelper;
+import org.apache.kylin.common.util.ZipFileUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public abstract class AbstractInfoExtractor extends AbstractApplication {
+    private static final Logger logger = LoggerFactory.getLogger(AbstractInfoExtractor.class);
+
+    @SuppressWarnings("static-access")
+    private static final Option OPTION_DEST = OptionBuilder.withArgName("destDir").hasArg().isRequired(true).withDescription("specify the dest dir to save the related information").create("destDir");
+
+    @SuppressWarnings("static-access")
+    private static final Option OPTION_COMPRESS = OptionBuilder.withArgName("compress").hasArg().isRequired(false).withDescription("specify whether to compress the output with zip. Default true.").create("compress");
+
+    @SuppressWarnings("static-access")
+    private static final Option OPTION_QUIET = OptionBuilder.withArgName("quiet").hasArg().isRequired(false).withDescription("specify whether to print final result").create("quiet");
+
+
+    private static final String DEFAULT_PACKAGE_PREFIX = "dump";
+
+    protected final Options options;
+
+    protected String packagePrefix;
+    protected File exportDir;
+
+    public AbstractInfoExtractor() {
+        options = new Options();
+        options.addOption(OPTION_DEST);
+        options.addOption(OPTION_COMPRESS);
+        options.addOption(OPTION_QUIET);
+
+        packagePrefix = DEFAULT_PACKAGE_PREFIX;
+    }
+
+    @Override
+    protected Options getOptions() {
+        return options;
+    }
+
+    @Override
+    protected void execute(OptionsHelper optionsHelper) throws Exception {
+        String exportDest = optionsHelper.getOptionValue(options.getOption("destDir"));
+        boolean compress = optionsHelper.hasOption(OPTION_COMPRESS) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_COMPRESS)) : true;
+        boolean quiet = optionsHelper.hasOption(OPTION_QUIET) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_QUIET)) : false;
+
+        if (StringUtils.isEmpty(exportDest)) {
+            throw new RuntimeException("destDir is not set, exit directly without extracting");
+        }
+        if (!exportDest.endsWith("/")) {
+            exportDest = exportDest + "/";
+        }
+
+        // create new folder to contain the output
+        String packageName = packagePrefix + "_" + new SimpleDateFormat("YYYY_MM_dd_HH_mm_ss").format(new Date());
+        if (new File(exportDest).exists()) {
+            exportDest = exportDest + packageName + "/";
+        }
+        exportDir = new File(exportDest);
+
+        executeExtract(optionsHelper, exportDir);
+
+        // compress to zip package
+        if (compress) {
+            File tempZipFile = File.createTempFile(packagePrefix + "_", ".zip");
+            ZipFileUtils.compressZipFile(exportDir.getAbsolutePath(), tempZipFile.getAbsolutePath());
+            FileUtils.cleanDirectory(exportDir);
+
+            File zipFile = new File(exportDir, packageName + ".zip");
+            FileUtils.moveFile(tempZipFile, zipFile);
+            exportDest = zipFile.getAbsolutePath();
+            exportDir = new File(exportDest);
+        }
+
+        if (!quiet) {
+            StringBuffer output = new StringBuffer();
+            output.append("\n========================================");
+            output.append("\nDump " + packagePrefix + " package locates at: \n" + exportDir.getAbsolutePath());
+            output.append("\n========================================");
+            logger.info(output.toString());
+        }
+    }
+
+    protected abstract void executeExtract(OptionsHelper optionsHelper, File exportDir) throws Exception;
+
+    public String getExportDest() {
+        return exportDir.getAbsolutePath();
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/e1287f4f/tool/src/main/java/org/apache/kylin/tool/CubeMetaExtractor.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/CubeMetaExtractor.java b/tool/src/main/java/org/apache/kylin/tool/CubeMetaExtractor.java
index 6b56201..73eec0d 100644
--- a/tool/src/main/java/org/apache/kylin/tool/CubeMetaExtractor.java
+++ b/tool/src/main/java/org/apache/kylin/tool/CubeMetaExtractor.java
@@ -25,12 +25,10 @@ import java.util.List;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.OptionGroup;
-import org.apache.commons.cli.Options;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.ResourceStore;
 import org.apache.kylin.common.persistence.ResourceTool;
-import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.cube.CubeDescManager;
 import org.apache.kylin.cube.CubeInstance;
@@ -66,7 +64,7 @@ import com.google.common.collect.Lists;
  * extract cube related info for debugging/distributing purpose
  * TODO: deal with II case
  */
-public class CubeMetaExtractor extends AbstractApplication {
+public class CubeMetaExtractor extends AbstractInfoExtractor {
 
     private static final Logger logger = LoggerFactory.getLogger(CubeMetaExtractor.class);
 
@@ -84,10 +82,6 @@ public class CubeMetaExtractor extends AbstractApplication {
     @SuppressWarnings("static-access")
     private static final Option OPTION_INCLUDE_SEGMENT_DETAILS = OptionBuilder.withArgName("includeSegmentDetails").hasArg().isRequired(false).withDescription("set this to true if want to extract segment details too, such as dict, tablesnapshot. Default false").create("includeSegmentDetails");
 
-    @SuppressWarnings("static-access")
-    private static final Option OPTION_DEST = OptionBuilder.withArgName("destDir").hasArg().isRequired(false).withDescription("specify the dest dir to save the related metadata").create("destDir");
-
-    private Options options = null;
     private KylinConfig kylinConfig;
     private MetadataManager metadataManager;
     private ProjectManager projectManager;
@@ -109,7 +103,9 @@ public class CubeMetaExtractor extends AbstractApplication {
     List<CubeInstance> cubesToTrimAndSave = Lists.newArrayList();//these cubes needs to be saved skipping segments
 
     public CubeMetaExtractor() {
-        options = new Options();
+        super();
+
+        packagePrefix = "cubemeta";
 
         OptionGroup realizationOrProject = new OptionGroup();
         realizationOrProject.addOption(OPTION_CUBE);
@@ -121,34 +117,14 @@ public class CubeMetaExtractor extends AbstractApplication {
         options.addOption(OPTION_INCLUDE_SEGMENTS);
         options.addOption(OPTION_INCLUDE_JOB);
         options.addOption(OPTION_INCLUDE_SEGMENT_DETAILS);
-        options.addOption(OPTION_DEST);
-
-    }
-
-    @Override
-    protected Options getOptions() {
-        return options;
     }
 
     @Override
-    protected void execute(OptionsHelper optionsHelper) throws Exception {
+    protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throws Exception {
         includeSegments = optionsHelper.hasOption(OPTION_INCLUDE_SEGMENTS) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_SEGMENTS)) : true;
         includeJobs = optionsHelper.hasOption(OPTION_INCLUDE_JOB) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_JOB)) : false;
         includeSegmentDetails = optionsHelper.hasOption(OPTION_INCLUDE_SEGMENT_DETAILS) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_SEGMENT_DETAILS)) : false;
 
-        String dest = null;
-        if (optionsHelper.hasOption(OPTION_DEST)) {
-            dest = optionsHelper.getOptionValue(OPTION_DEST);
-        }
-
-        if (StringUtils.isEmpty(dest)) {
-            throw new RuntimeException("destDir is not set, exit directly without extracting");
-        }
-
-        if (!dest.endsWith("/")) {
-            dest = dest + "/";
-        }
-
         kylinConfig = KylinConfig.getInstanceFromEnv();
         metadataManager = MetadataManager.getInstance(kylinConfig);
         projectManager = ProjectManager.getInstance(kylinConfig);
@@ -193,9 +169,7 @@ public class CubeMetaExtractor extends AbstractApplication {
             }
         }
 
-        executeExtraction(dest);
-
-        logger.info("Extracted metadata files located at: " + new File(dest).getAbsolutePath());
+        executeExtraction(exportDir.getAbsolutePath());
     }
 
     private void executeExtraction(String dest) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/e1287f4f/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java b/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java
index a403ee2..f12f1da 100644
--- a/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java
@@ -20,46 +20,21 @@ package org.apache.kylin.tool;
 
 import java.io.File;
 import java.io.IOException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Date;
 
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
-import org.apache.commons.cli.Options;
 import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.KylinVersion;
-import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.OptionsHelper;
-import org.apache.kylin.common.util.ZipFileUtils;
-import org.apache.kylin.engine.mr.HadoopUtil;
-import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.Lists;
 import com.google.common.io.Files;
 
-public class DiagnosisInfoCLI extends AbstractApplication {
+public class DiagnosisInfoCLI extends AbstractInfoExtractor {
     private static final Logger logger = LoggerFactory.getLogger(DiagnosisInfoCLI.class);
 
-    private static final int DEFAULT_LOG_PERIOD = 3;
-
-    @SuppressWarnings("static-access")
-    private static final Option OPTION_LOG_PERIOD = OptionBuilder.withArgName("logPeriod").hasArg().isRequired(false).withDescription("specify how many days of kylin logs to extract. Default 3.").create("logPeriod");
-
-    @SuppressWarnings("static-access")
-    private static final Option OPTION_COMPRESS = OptionBuilder.withArgName("compress").hasArg().isRequired(false).withDescription("specify whether to compress the output with zip. Default true.").create("compress");
-
-    @SuppressWarnings("static-access")
-    private static final Option OPTION_DEST = OptionBuilder.withArgName("destDir").hasArg().isRequired(true).withDescription("specify the dest dir to save the related metadata").create("destDir");
-
     @SuppressWarnings("static-access")
     private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(false).withDescription("Specify realizations in which project to extract").create("project");
 
@@ -72,23 +47,18 @@ public class DiagnosisInfoCLI extends AbstractApplication {
     @SuppressWarnings("static-access")
     private static final Option OPTION_INCLUDE_LINUX = OptionBuilder.withArgName("includeLinux").hasArg().isRequired(false).withDescription("Specify whether to include os and linux kernel info to extract. Default true.").create("includeLinux");
 
-    private CubeMetaExtractor cubeMetaExtractor;
-    private HBaseUsageExtractor hBaseUsageExtractor;
     private KylinConfig kylinConfig;
-    private Options options;
-    private String exportDest;
 
     public DiagnosisInfoCLI() {
-        cubeMetaExtractor = new CubeMetaExtractor();
-        hBaseUsageExtractor = new HBaseUsageExtractor();
+        super();
+
+        packagePrefix = "diagnosis";
         kylinConfig = KylinConfig.getInstanceFromEnv();
 
-        options = new Options();
-        options.addOption(OPTION_LOG_PERIOD);
-        options.addOption(OPTION_COMPRESS);
-        options.addOption(OPTION_DEST);
         options.addOption(OPTION_PROJECT);
         options.addOption(OPTION_INCLUDE_CONF);
+        options.addOption(OPTION_INCLUDE_HBASE);
+        options.addOption(OPTION_INCLUDE_LINUX);
     }
 
     public static void main(String args[]) {
@@ -97,42 +67,21 @@ public class DiagnosisInfoCLI extends AbstractApplication {
     }
 
     @Override
-    protected Options getOptions() {
-        return options;
-    }
-
-    @Override
-    protected void execute(OptionsHelper optionsHelper) throws Exception {
+    protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throws IOException {
         final String project = optionsHelper.getOptionValue(options.getOption("project"));
-        exportDest = optionsHelper.getOptionValue(options.getOption("destDir"));
-
-        if (StringUtils.isEmpty(exportDest)) {
-            throw new RuntimeException("destDir is not set, exit directly without extracting");
-        }
-        if (!exportDest.endsWith("/")) {
-            exportDest = exportDest + "/";
-        }
-
-        // create new folder to contain the output
-        String packageName = "diagnosis_" + new SimpleDateFormat("YYYY_MM_dd_HH_mm_ss").format(new Date());
-        if (new File(exportDest).exists()) {
-            exportDest = exportDest + packageName + "/";
-        }
-        File exportDir = new File(exportDest);
-
-        // export cube metadata
-        String[] cubeMetaArgs = { "-destDir", exportDest + "metadata", "-project", project };
-        cubeMetaExtractor.execute(cubeMetaArgs);
-
-        int logPeriod = optionsHelper.hasOption(OPTION_LOG_PERIOD) ? Integer.valueOf(optionsHelper.getOptionValue(OPTION_LOG_PERIOD)) : DEFAULT_LOG_PERIOD;
-        boolean compress = optionsHelper.hasOption(OPTION_COMPRESS) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_COMPRESS)) : true;
         boolean includeConf = optionsHelper.hasOption(OPTION_INCLUDE_CONF) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CONF)) : true;
         boolean includeHBase = optionsHelper.hasOption(OPTION_INCLUDE_HBASE) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_HBASE)) : true;
         boolean includeLinux = optionsHelper.hasOption(OPTION_INCLUDE_LINUX) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_LINUX)) : true;
 
+        // export cube metadata
+        String[] cubeMetaArgs = { "-destDir", new File(exportDir, "metadata").getAbsolutePath(), "-project", project, "-compress", "false", "-quiet", "false" };
+        CubeMetaExtractor cubeMetaExtractor = new CubeMetaExtractor();
+        cubeMetaExtractor.execute(cubeMetaArgs);
+
         // export HBase
         if (includeHBase) {
-            String[] hbaseArgs = { "-destDir", exportDest + "hbase", "-project", project };
+            String[] hbaseArgs = { "-destDir", new File(exportDir, "hbase").getAbsolutePath(), "-project", project, "-compress", "false", "-quiet", "false" };
+            HBaseUsageExtractor hBaseUsageExtractor = new HBaseUsageExtractor();
             hBaseUsageExtractor.execute(hbaseArgs);
         }
 
@@ -140,7 +89,7 @@ public class DiagnosisInfoCLI extends AbstractApplication {
         if (includeConf) {
             logger.info("Start to extract kylin conf files.");
             try {
-                FileUtils.copyDirectoryToDirectory(new File(getConfFolder()), exportDir);
+                FileUtils.copyDirectoryToDirectory(new File(ToolUtil.getConfFolder()), exportDir);
             } catch (Exception e) {
                 logger.warn("Error in export conf.", e);
             }
@@ -186,7 +135,7 @@ public class DiagnosisInfoCLI extends AbstractApplication {
             FileUtils.writeStringToFile(new File(basicDir, "lsb_release"), output);
             output = KylinVersion.getKylinClientInformation();
             FileUtils.writeStringToFile(new File(basicDir, "client"), output);
-            output = getHBaseMetaStoreId();
+            output = ToolUtil.getHBaseMetaStoreId();
             FileUtils.writeStringToFile(new File(basicDir, "client"), output, true);
 
         } catch (Exception e) {
@@ -194,68 +143,8 @@ public class DiagnosisInfoCLI extends AbstractApplication {
         }
 
         // export logs
-        if (logPeriod > 0) {
-            logger.info("Start to extract kylin logs in {} days", logPeriod);
-
-            final File kylinLogDir = new File(KylinConfig.getKylinHome(), "logs");
-            final File exportLogsDir = new File(exportDir, "logs");
-            final ArrayList<File> logFiles = Lists.newArrayList();
-            final long logThresholdTime = System.currentTimeMillis() - logPeriod * 24 * 3600 * 1000;
-
-            FileUtils.forceMkdir(exportLogsDir);
-            for (File logFile : kylinLogDir.listFiles()) {
-                if (logFile.lastModified() > logThresholdTime) {
-                    logFiles.add(logFile);
-                }
-            }
-
-            for (File logFile : logFiles) {
-                logger.info("Log file:" + logFile.getAbsolutePath());
-                if (logFile.exists()) {
-                    FileUtils.copyFileToDirectory(logFile, exportLogsDir);
-                }
-            }
-        }
-
-        // compress to zip package
-        if (compress) {
-            File tempZipFile = File.createTempFile("diagnosis_", ".zip");
-            ZipFileUtils.compressZipFile(exportDir.getAbsolutePath(), tempZipFile.getAbsolutePath());
-            FileUtils.cleanDirectory(exportDir);
-
-            File zipFile = new File(exportDir, packageName + ".zip");
-            FileUtils.moveFile(tempZipFile, zipFile);
-            exportDest = zipFile.getAbsolutePath();
-            exportDir = new File(exportDest);
-        }
-
-        StringBuffer output = new StringBuffer();
-        output.append("\n========================================");
-        output.append("\nDiagnosis package locates at: \n" + exportDir.getAbsolutePath());
-        output.append("\n========================================");
-        logger.info(output.toString());
-    }
-
-    public String getExportDest() {
-        return exportDest;
-    }
-
-    private String getConfFolder() {
-        String path = System.getProperty(KylinConfig.KYLIN_CONF);
-        if (StringUtils.isNotEmpty(path)) {
-            return path;
-        }
-        path = KylinConfig.getKylinHome();
-        if (StringUtils.isNotEmpty(path)) {
-            return path + File.separator + "conf";
-        }
-        return null;
-    }
-
-    private String getHBaseMetaStoreId() throws IOException {
-        HBaseAdmin hbaseAdmin = new HBaseAdmin(HBaseConfiguration.create(HadoopUtil.getCurrentConfiguration()));
-        String metaStoreName = kylinConfig.getMetadataUrlPrefix();
-        HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(metaStoreName));
-        return "MetaStore UUID: " + desc.getValue(HBaseConnection.HTABLE_UUID_TAG);
+        String[] logsArgs = { "-destDir", new File(exportDir, "logs").getAbsolutePath(), "-compress", "false", "-quiet", "false" };
+        KylinLogExtractor logExtractor = new KylinLogExtractor();
+        logExtractor.execute(logsArgs);
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/e1287f4f/tool/src/main/java/org/apache/kylin/tool/HBaseUsageExtractor.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/HBaseUsageExtractor.java b/tool/src/main/java/org/apache/kylin/tool/HBaseUsageExtractor.java
index 8d69805..0dbe87e 100644
--- a/tool/src/main/java/org/apache/kylin/tool/HBaseUsageExtractor.java
+++ b/tool/src/main/java/org/apache/kylin/tool/HBaseUsageExtractor.java
@@ -26,14 +26,12 @@ import java.util.List;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.OptionGroup;
-import org.apache.commons.cli.Options;
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.zookeeper.MasterAddressTracker;
 import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
 import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.CliCommandExecutor;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.cube.CubeInstance;
@@ -50,15 +48,13 @@ import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.Lists;
 
-public class HBaseUsageExtractor extends AbstractApplication {
+public class HBaseUsageExtractor extends AbstractInfoExtractor {
 
     private static final Logger logger = LoggerFactory.getLogger(CubeMetaExtractor.class);
     @SuppressWarnings("static-access")
     private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("Specify which cube to extract").create("cube");
     @SuppressWarnings("static-access")
     private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(false).withDescription("Specify realizations in which project to extract").create("project");
-    @SuppressWarnings("static-access")
-    private static final Option OPTION_DEST = OptionBuilder.withArgName("destDir").hasArg().isRequired(false).withDescription("specify the dest dir to save the related metadata").create("destDir");
 
     private List<String> htables = Lists.newArrayList();
     private Configuration conf;
@@ -66,10 +62,9 @@ public class HBaseUsageExtractor extends AbstractApplication {
     private RealizationRegistry realizationRegistry;
     private KylinConfig kylinConfig;
     private ProjectManager projectManager;
-    private Options options = null;
 
     public HBaseUsageExtractor() {
-        options = new Options();
+        super();
 
         OptionGroup realizationOrProject = new OptionGroup();
         realizationOrProject.addOption(OPTION_CUBE);
@@ -77,8 +72,6 @@ public class HBaseUsageExtractor extends AbstractApplication {
         realizationOrProject.setRequired(true);
 
         options.addOptionGroup(realizationOrProject);
-        options.addOption(OPTION_DEST);
-
         conf = HBaseConfiguration.create();
     }
 
@@ -87,11 +80,6 @@ public class HBaseUsageExtractor extends AbstractApplication {
         extractor.execute(args);
     }
 
-    @Override
-    protected Options getOptions() {
-        return options;
-    }
-
     private String getHBaseMasterUrl() throws IOException, KeeperException {
         String host = conf.get("hbase.master.info.bindAddress");
         if (host.equals("0.0.0.0")) {
@@ -103,20 +91,7 @@ public class HBaseUsageExtractor extends AbstractApplication {
     }
 
     @Override
-    protected void execute(OptionsHelper optionsHelper) throws Exception {
-        String dest = null;
-        if (optionsHelper.hasOption(OPTION_DEST)) {
-            dest = optionsHelper.getOptionValue(OPTION_DEST);
-        }
-
-        if (org.apache.commons.lang3.StringUtils.isEmpty(dest)) {
-            throw new RuntimeException("destDir is not set, exit directly without extracting");
-        }
-
-        if (!dest.endsWith("/")) {
-            dest = dest + "/";
-        }
-
+    protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throws Exception {
         kylinConfig = KylinConfig.getInstanceFromEnv();
         cubeManager = CubeManager.getInstance(kylinConfig);
         realizationRegistry = RealizationRegistry.getInstance(kylinConfig);
@@ -142,13 +117,11 @@ public class HBaseUsageExtractor extends AbstractApplication {
             }
         }
 
-        extractCommonInfo(dest);
-        extractHTables(dest);
-
-        logger.info("Extracted metadata files located at: " + new File(dest).getAbsolutePath());
+        extractCommonInfo(exportDir);
+        extractHTables(exportDir);
     }
 
-    private void extractHTables(String dest) throws IOException {
+    private void extractHTables(File dest) throws IOException {
         logger.info("These htables are going to be extracted:");
         for (String htable : htables) {
             logger.info(htable + "(required)");
@@ -168,7 +141,7 @@ public class HBaseUsageExtractor extends AbstractApplication {
         }
     }
 
-    private void extractCommonInfo(String dest) throws IOException {
+    private void extractCommonInfo(File dest) throws IOException {
         logger.info("The hbase master info/conf are going to be extracted...");
 
         // hbase master page

http://git-wip-us.apache.org/repos/asf/kylin/blob/e1287f4f/tool/src/main/java/org/apache/kylin/tool/JobDiagnosisInfoCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/JobDiagnosisInfoCLI.java b/tool/src/main/java/org/apache/kylin/tool/JobDiagnosisInfoCLI.java
new file mode 100644
index 0000000..63b71d2
--- /dev/null
+++ b/tool/src/main/java/org/apache/kylin/tool/JobDiagnosisInfoCLI.java
@@ -0,0 +1,182 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.tool;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.persistence.ResourceStore;
+import org.apache.kylin.common.persistence.ResourceTool;
+import org.apache.kylin.common.util.OptionsHelper;
+import org.apache.kylin.job.constant.ExecutableConstants;
+import org.apache.kylin.job.dao.ExecutableDao;
+import org.apache.kylin.job.dao.ExecutablePO;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+
+public class JobDiagnosisInfoCLI extends AbstractInfoExtractor {
+    private static final Logger logger = LoggerFactory.getLogger(JobDiagnosisInfoCLI.class);
+
+    @SuppressWarnings("static-access")
+    private static final Option OPTION_JOB_ID = OptionBuilder.withArgName("jobId").hasArg().isRequired(true).withDescription("specify the Job ID to extract information. ").create("jobId");
+
+    @SuppressWarnings("static-access")
+    private static final Option OPTION_INCLUDE_CUBE = OptionBuilder.withArgName("includeCube").hasArg().isRequired(false).withDescription("set this to true if want to extract related cube info too. Default true").create("includeCube");
+
+    @SuppressWarnings("static-access")
+    private static final Option OPTION_INCLUDE_YARN_LOGS = OptionBuilder.withArgName("includeYarnLogs").hasArg().isRequired(false).withDescription("set this to true if want to extract related yarn logs too. Default true").create("includeYarnLogs");
+
+    private KylinConfig kylinConfig;
+    private ExecutableDao executableDao;
+
+    List<String> requiredResources = Lists.newArrayList();
+    List<String> yarnLogsResources = Lists.newArrayList();
+
+    public JobDiagnosisInfoCLI() {
+        super();
+
+        packagePrefix = "job";
+
+        options.addOption(OPTION_JOB_ID);
+        options.addOption(OPTION_INCLUDE_CUBE);
+        options.addOption(OPTION_INCLUDE_YARN_LOGS);
+
+        kylinConfig = KylinConfig.getInstanceFromEnv();
+        executableDao = ExecutableDao.getInstance(kylinConfig);
+    }
+
+    @Override
+    protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throws Exception {
+        String jobId = optionsHelper.getOptionValue(OPTION_JOB_ID);
+        boolean includeCube = optionsHelper.hasOption(OPTION_INCLUDE_CUBE) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CUBE)) : true;
+        boolean includeYarnLogs = optionsHelper.hasOption(OPTION_INCLUDE_YARN_LOGS) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_YARN_LOGS)) : true;
+
+        ExecutablePO executablePO = executableDao.getJob(jobId);
+        addRequired(ExecutableDao.pathOfJob(jobId));
+        addRequired(ExecutableDao.pathOfJobOutput(jobId));
+        for (ExecutablePO task : executablePO.getTasks()) {
+            addRequired(ExecutableDao.pathOfJob(task.getUuid()));
+            addRequired(ExecutableDao.pathOfJobOutput(task.getUuid()));
+            if (includeYarnLogs) {
+                yarnLogsResources.add(task.getUuid());
+            }
+        }
+
+        extractResources(exportDir);
+
+        if (includeCube) {
+            String cubeName = executablePO.getParams().get("cubeName");
+            if (!StringUtils.isEmpty(cubeName)) {
+                File metaDir = new File(exportDir, "cube");
+                FileUtils.forceMkdir(metaDir);
+                String[] cubeMetaArgs = { "-cube", cubeName, "-destDir", new File(metaDir, cubeName).getAbsolutePath(), "-includeJobs", "false", "-compress", "false", "-quiet", "false" };
+
+                logger.info("Start to extract related cube: " + StringUtils.join(cubeMetaArgs));
+                CubeMetaExtractor cubeMetaExtractor = new CubeMetaExtractor();
+                cubeMetaExtractor.execute(cubeMetaArgs);
+            }
+        }
+
+        if (includeYarnLogs) {
+            logger.info("Start to related yarn job logs: " + jobId);
+            File yarnLogDir = new File(exportDir, "yarn");
+            FileUtils.forceMkdir(yarnLogDir);
+            for (String taskId : yarnLogsResources) {
+                extractYarnLog(taskId, new File(yarnLogDir, jobId), true);
+            }
+        }
+
+        // export kylin logs
+        String[] logsArgs = { "-destDir", new File(exportDir, "logs").getAbsolutePath(), "-compress", "false", "-quiet", "false" };
+        KylinLogExtractor logExtractor = new KylinLogExtractor();
+        logExtractor.execute(logsArgs);
+    }
+
+    private void extractResources(File destDir) {
+        logger.info("The resource paths going to be extracted:");
+        for (String s : requiredResources) {
+            logger.info(s + "(required)");
+        }
+
+        try {
+            ResourceStore src = ResourceStore.getStore(KylinConfig.getInstanceFromEnv());
+            ResourceStore dst = ResourceStore.getStore(KylinConfig.createInstanceFromUri(destDir.getAbsolutePath()));
+
+            for (String path : requiredResources) {
+                ResourceTool.copyR(src, dst, path);
+            }
+
+        } catch (IOException e) {
+            throw new RuntimeException("Failed to extract job resources. ", e);
+        }
+    }
+
+    private void extractYarnLog(String taskId, File destDir, boolean onlyFail) throws Exception {
+        final Map<String, String> jobInfo = executableDao.getJobOutput(taskId).getInfo();
+        FileUtils.forceMkdir(destDir);
+        if (jobInfo.containsKey(ExecutableConstants.MR_JOB_ID)) {
+            String applicationId = jobInfo.get(ExecutableConstants.MR_JOB_ID).replace("job", "application");
+            if (!onlyFail || !isYarnAppSucc(applicationId)) {
+                File destFile = new File(destDir, applicationId + ".log");
+                String yarnCmd = "yarn logs -applicationId " + applicationId + " > " + destFile.getAbsolutePath();
+                logger.debug(yarnCmd);
+                kylinConfig.getCliCommandExecutor().execute(yarnCmd);
+            }
+        }
+    }
+
+    private boolean isYarnAppSucc(String applicationId) throws IOException {
+        final String yarnCmd = "yarn application -status " + applicationId;
+        final String cmdOutput = kylinConfig.getCliCommandExecutor().execute(yarnCmd).getSecond();
+        final Map<String, String> params = Maps.newHashMap();
+        final String[] cmdOutputLines = cmdOutput.split("\n");
+        for (String cmdOutputLine : cmdOutputLines) {
+            String[] pair = cmdOutputLine.split(":");
+            params.put(pair[0].trim(), pair[1].trim());
+        }
+        for (Map.Entry<String, String> e : params.entrySet()) {
+            logger.info(e.getKey() + ":" + e.getValue());
+        }
+        if (params.containsKey("Final-State") && params.get("Final-State").equals("SUCCEEDED")) {
+            return true;
+        }
+
+        return false;
+    }
+
+    private void addRequired(String record) {
+        logger.info("adding required resource {}", record);
+        requiredResources.add(record);
+    }
+
+    public static void main(String args[]) {
+        JobDiagnosisInfoCLI extractor = new JobDiagnosisInfoCLI();
+        extractor.execute(args);
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/e1287f4f/tool/src/main/java/org/apache/kylin/tool/JobInfoExtractor.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/JobInfoExtractor.java b/tool/src/main/java/org/apache/kylin/tool/JobInfoExtractor.java
deleted file mode 100644
index 11abc38..0000000
--- a/tool/src/main/java/org/apache/kylin/tool/JobInfoExtractor.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.tool;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionBuilder;
-import org.apache.commons.cli.Options;
-import org.apache.commons.lang.StringUtils;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.persistence.ResourceStore;
-import org.apache.kylin.common.persistence.ResourceTool;
-import org.apache.kylin.common.util.AbstractApplication;
-import org.apache.kylin.common.util.OptionsHelper;
-import org.apache.kylin.job.constant.ExecutableConstants;
-import org.apache.kylin.job.dao.ExecutableDao;
-import org.apache.kylin.job.dao.ExecutablePO;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.collect.Lists;
-
-public class JobInfoExtractor extends AbstractApplication {
-    private static final Logger logger = LoggerFactory.getLogger(JobInfoExtractor.class);
-
-    @SuppressWarnings("static-access")
-    private static final Option OPTION_JOB_ID = OptionBuilder.withArgName("jobId").hasArg().isRequired(true).withDescription("specify the Job ID to extract information. ").create("jobId");
-
-    @SuppressWarnings("static-access")
-    private static final Option OPTION_DEST = OptionBuilder.withArgName("destDir").hasArg().isRequired(true).withDescription("specify the dest dir to save the related information").create("destDir");
-
-    @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_CUBE = OptionBuilder.withArgName("includeCube").hasArg().isRequired(false).withDescription("set this to true if want to extract related cube info too. Default true").create("includeCube");
-
-    @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_YARN_LOGS = OptionBuilder.withArgName("includeYarnLogs").hasArg().isRequired(false).withDescription("set this to true if want to extract related yarn logs too. Default true").create("includeYarnLogs");
-
-    private Options options;
-
-    private KylinConfig kylinConfig;
-    private CubeMetaExtractor cubeMetaExtractor;
-
-    private ExecutableDao executableDao;
-
-    List<String> requiredResources = Lists.newArrayList();
-    List<String> yarnLogsResources = Lists.newArrayList();
-
-    public JobInfoExtractor() {
-        cubeMetaExtractor = new CubeMetaExtractor();
-
-        options = new Options();
-        options.addOption(OPTION_JOB_ID);
-        options.addOption(OPTION_DEST);
-        options.addOption(OPTION_INCLUDE_CUBE);
-        options.addOption(OPTION_INCLUDE_YARN_LOGS);
-
-        kylinConfig = KylinConfig.getInstanceFromEnv();
-        executableDao = ExecutableDao.getInstance(kylinConfig);
-    }
-
-    @Override
-    protected Options getOptions() {
-        return options;
-    }
-
-    @Override
-    protected void execute(OptionsHelper optionsHelper) throws Exception {
-        String jobId = optionsHelper.getOptionValue(OPTION_JOB_ID);
-        String dest = optionsHelper.getOptionValue(OPTION_DEST);
-        boolean includeCube = optionsHelper.hasOption(OPTION_INCLUDE_CUBE) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CUBE)) : true;
-        boolean includeYarnLogs = optionsHelper.hasOption(OPTION_INCLUDE_YARN_LOGS) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_YARN_LOGS)) : true;
-
-        if (StringUtils.isEmpty(dest)) {
-            throw new RuntimeException("destDir is not set, exit directly without extracting");
-        }
-
-        if (!dest.endsWith("/")) {
-            dest = dest + "/";
-        }
-
-        ExecutablePO executablePO = executableDao.getJob(jobId);
-        addRequired(ExecutableDao.pathOfJob(jobId));
-        addRequired(ExecutableDao.pathOfJobOutput(jobId));
-        for (ExecutablePO task : executablePO.getTasks()) {
-            addRequired(ExecutableDao.pathOfJob(task.getUuid()));
-            addRequired(ExecutableDao.pathOfJobOutput(task.getUuid()));
-            if (includeYarnLogs) {
-                yarnLogsResources.add(task.getUuid());
-            }
-        }
-        executeExtraction(dest);
-
-        if (includeCube) {
-            String cubeName = executablePO.getParams().get("cubename");
-            String[] cubeMetaArgs = { "-cube", cubeName, "-destDir", dest + "cube_" + cubeName + "/", "-includeJobs", "false" };
-            logger.info("Start to extract related cube: " + StringUtils.join(cubeMetaArgs));
-            cubeMetaExtractor.execute(cubeMetaArgs);
-        }
-
-        if (includeYarnLogs) {
-            logger.info("Start to related yarn job logs: " + jobId);
-            for (String taskId : yarnLogsResources) {
-                extractYarnLog(taskId, dest + "yarn_" + jobId + "/", true);
-            }
-        }
-
-        logger.info("Extracted kylin jobs located at: " + new File(dest).getAbsolutePath());
-    }
-
-    private void executeExtraction(String dest) {
-        logger.info("The resource paths going to be extracted:");
-        for (String s : requiredResources) {
-            logger.info(s + "(required)");
-        }
-
-        try {
-            ResourceStore src = ResourceStore.getStore(KylinConfig.getInstanceFromEnv());
-            ResourceStore dst = ResourceStore.getStore(KylinConfig.createInstanceFromUri(dest));
-
-            for (String path : requiredResources) {
-                ResourceTool.copyR(src, dst, path);
-            }
-
-        } catch (IOException e) {
-            throw new RuntimeException("IOException", e);
-        }
-    }
-
-    private void extractYarnLog(String taskId, String dest, boolean onlySucc) throws Exception {
-        final Map<String, String> jobInfo = executableDao.getJobOutput(taskId).getInfo();
-        if (jobInfo.containsKey(ExecutableConstants.MR_JOB_ID)) {
-            String applicationId = jobInfo.get(ExecutableConstants.MR_JOB_ID).replace("job", "application");
-            if (!onlySucc || isYarnAppSucc(applicationId)) {
-                File destFile = new File(dest + applicationId + ".log");
-
-                String yarnCmd = "yarn logs -applicationId " + applicationId + " > " + destFile.getAbsolutePath();
-                logger.info(yarnCmd);
-                kylinConfig.getCliCommandExecutor().execute(yarnCmd);
-            }
-        }
-    }
-
-    private boolean isYarnAppSucc(String applicationId) throws IOException {
-        final String yarnCmd = "yarn application -status " + applicationId;
-        final String cmdOutput = kylinConfig.getCliCommandExecutor().execute(yarnCmd).getSecond();
-        final String[] cmdOutputLines = cmdOutput.split("\n");
-        for (String cmdOutputLine : cmdOutputLines) {
-            if (cmdOutputLine.equals("Final-State : SUCCEEDED")) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-    private void addRequired(String record) {
-        logger.info("adding required resource {}", record);
-        requiredResources.add(record);
-    }
-
-    public static void main(String args[]) {
-        JobInfoExtractor extractor = new JobInfoExtractor();
-        extractor.execute(args);
-    }
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/e1287f4f/tool/src/main/java/org/apache/kylin/tool/KylinLogExtractor.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/KylinLogExtractor.java b/tool/src/main/java/org/apache/kylin/tool/KylinLogExtractor.java
new file mode 100644
index 0000000..18a915d
--- /dev/null
+++ b/tool/src/main/java/org/apache/kylin/tool/KylinLogExtractor.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.kylin.tool;
+
+import java.io.File;
+import java.util.ArrayList;
+
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.io.FileUtils;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.OptionsHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Lists;
+
+public class KylinLogExtractor extends AbstractInfoExtractor {
+    private static final Logger logger = LoggerFactory.getLogger(KylinLogExtractor.class);
+
+    private static final int DEFAULT_LOG_PERIOD = 3;
+
+    @SuppressWarnings("static-access")
+    private static final Option OPTION_LOG_PERIOD = OptionBuilder.withArgName("logPeriod").hasArg().isRequired(false).withDescription("specify how many days of kylin logs to extract. Default " + DEFAULT_LOG_PERIOD + ".").create("logPeriod");
+
+    public KylinLogExtractor() {
+        super();
+
+        packagePrefix = "logs";
+        options.addOption(OPTION_LOG_PERIOD);
+    }
+
+    @Override
+    protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throws Exception {
+        int logPeriod = optionsHelper.hasOption(OPTION_LOG_PERIOD) ? Integer.valueOf(optionsHelper.getOptionValue(OPTION_LOG_PERIOD)) : DEFAULT_LOG_PERIOD;
+
+        if (logPeriod < 1) {
+            logger.warn("No logs to extract.");
+            return;
+        }
+
+        logger.info("Start to extract kylin logs in {} days", logPeriod);
+
+        final File kylinLogDir = new File(KylinConfig.getKylinHome(), "logs");
+        final ArrayList<File> logFiles = Lists.newArrayList();
+        final long logThresholdTime = System.currentTimeMillis() - logPeriod * 24 * 3600 * 1000;
+
+        for (File logFile : kylinLogDir.listFiles()) {
+            if (logFile.lastModified() > logThresholdTime) {
+                logFiles.add(logFile);
+            }
+        }
+
+        for (File logFile : logFiles) {
+            logger.info("Log file:" + logFile.getAbsolutePath());
+            if (logFile.exists()) {
+                FileUtils.copyFileToDirectory(logFile, exportDir);
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/e1287f4f/tool/src/main/java/org/apache/kylin/tool/ToolUtil.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/ToolUtil.java b/tool/src/main/java/org/apache/kylin/tool/ToolUtil.java
new file mode 100644
index 0000000..021f171
--- /dev/null
+++ b/tool/src/main/java/org/apache/kylin/tool/ToolUtil.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.kylin.tool;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.engine.mr.HadoopUtil;
+import org.apache.kylin.storage.hbase.HBaseConnection;
+
+public class ToolUtil {
+    public static String getConfFolder() {
+        final String CONF = "conf";
+        String path = System.getProperty(KylinConfig.KYLIN_CONF);
+        if (StringUtils.isNotEmpty(path)) {
+            return path;
+        }
+        path = KylinConfig.getKylinHome();
+        if (StringUtils.isNotEmpty(path)) {
+            return path + File.separator + CONF;
+        }
+        return null;
+    }
+
+    public static String getHBaseMetaStoreId() throws IOException {
+        final HBaseAdmin hbaseAdmin = new HBaseAdmin(HBaseConfiguration.create(HadoopUtil.getCurrentConfiguration()));
+        final String metaStoreName = KylinConfig.getInstanceFromEnv().getMetadataUrlPrefix();
+        final HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(metaStoreName));
+        return "MetaStore UUID: " + desc.getValue(HBaseConnection.HTABLE_UUID_TAG);
+    }
+}