You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by lu...@apache.org on 2015/07/21 10:05:35 UTC

[29/50] [abbrv] incubator-kylin git commit: KYLIN-792 , add performance module

KYLIN-792 ,add performance module


Project: http://git-wip-us.apache.org/repos/asf/incubator-kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-kylin/commit/a935c7ba
Tree: http://git-wip-us.apache.org/repos/asf/incubator-kylin/tree/a935c7ba
Diff: http://git-wip-us.apache.org/repos/asf/incubator-kylin/diff/a935c7ba

Branch: refs/heads/master
Commit: a935c7bae805b2e93736e8f7cb93dc0f0e6f467d
Parents: cbcfd59
Author: jiazhong <ji...@ebay.com>
Authored: Wed May 27 23:01:10 2015 +0800
Committer: jiazhong <ji...@ebay.com>
Committed: Thu Jul 2 19:51:40 2015 +0800

----------------------------------------------------------------------
 conf/kylin.properties                           |   12 +
 .../test_case_data/sandbox/kylin.properties     |   14 +
 monitor/pom.xml                                 |  169 +
 .../apache/kylin/monitor/ApiRequestParser.java  |  241 +
 .../java/org/apache/kylin/monitor/Client.java   |   63 +
 .../org/apache/kylin/monitor/ConfigUtils.java   |  220 +
 .../org/apache/kylin/monitor/DebugClient.java   |   62 +
 .../org/apache/kylin/monitor/FileUtils.java     |  116 +
 .../apache/kylin/monitor/HiveJdbcClient.java    |  223 +
 .../kylin/monitor/MonitorMetaManager.java       |  217 +
 .../org/apache/kylin/monitor/QueryParser.java   |  249 +
 monitor/src/main/resources/log4j.properties     |   31 +
 .../org/apache/kylin/monitor/ParseLogTest.java  |   77 +
 pom.xml                                         |    1 +
 script/prepare.sh                               |    2 +
 server/pom.xml                                  |    5 +
 .../rest/controller/PerformanceController.java  |  125 +
 .../kylin/rest/filter/KylinApiFilter.java       |  121 +
 .../apache/kylin/rest/service/CubeService.java  |    4 +
 .../kylin/rest/service/PerformService.java      |  123 +
 server/src/main/resources/kylinSecurity.xml     |    1 +
 server/src/main/webapp/WEB-INF/web.xml          |  122 +-
 webapp/app/css/AdminLTE.css                     | 4782 ++++++++++--------
 webapp/app/index.html                           |   16 +-
 webapp/app/js/app.js                            |    2 +-
 webapp/app/js/controllers/dashboard.js          |  237 +
 webapp/app/js/services/dashboard.js             |   32 +
 webapp/app/js/services/users.js                 |   13 +-
 webapp/app/less/app.less                        |   17 +
 webapp/app/partials/admin/admin.html            |    3 +
 webapp/app/partials/dashboard.html              |  228 +
 webapp/app/partials/header.html                 |    2 +-
 webapp/app/routes.json                          |    7 +
 webapp/bower.json                               |   10 +-
 webapp/grunt.json                               |    2 +
 35 files changed, 5391 insertions(+), 2158 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a935c7ba/conf/kylin.properties
----------------------------------------------------------------------
diff --git a/conf/kylin.properties b/conf/kylin.properties
index cf9756e..b1ebccc 100644
--- a/conf/kylin.properties
+++ b/conf/kylin.properties
@@ -101,3 +101,15 @@ deploy.env=DEV
 
 ###########################config info for sandbox#######################
 kylin.sandbox=true
+
+
+###########################config info for kylin monitor#######################
+# hive jdbc url
+kylin.hive.jdbc.connection.url=
+
+#config where to parse query log,split with comma ,will also read $KYLIN_HOME/tomcat/logs/ by default
+ext.log.base.dir = /tmp/kylin_log1,/tmp/kylin_log2
+
+#will create external hive table to query result csv file
+#will set to kylin_query_log by default if not config here
+query.log.parse.result.table = kylin_query_log

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a935c7ba/examples/test_case_data/sandbox/kylin.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties
index efaef5c..8f73f64 100644
--- a/examples/test_case_data/sandbox/kylin.properties
+++ b/examples/test_case_data/sandbox/kylin.properties
@@ -102,3 +102,17 @@ deploy.env=DEV
 ###########################config info for sandbox#######################
 kylin.sandbox=true
 
+
+###########################config info for kylin monitor#######################
+# hive jdbc url
+kylin.hive.jdbc.connection.url= jdbc:hive2://sandbox:10000
+
+#config where to parse query log,split with comma ,will also read $KYLIN_HOME/tomcat/logs/ by default
+ext.log.base.dir = /tmp/kylin_log1,/tmp/kylin_log2
+
+#will create external hive table to query result csv file
+#will set to kylin_query_log by default if not config here
+query.log.parse.result.table = kylin_query_log
+
+
+

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a935c7ba/monitor/pom.xml
----------------------------------------------------------------------
diff --git a/monitor/pom.xml b/monitor/pom.xml
new file mode 100644
index 0000000..a6fc87c
--- /dev/null
+++ b/monitor/pom.xml
@@ -0,0 +1,169 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <groupId>org.apache.kylin</groupId>
+        <artifactId>kylin</artifactId>
+        <version>0.7.2-incubating-SNAPSHOT</version>
+    </parent>
+
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>kylin-monitor</artifactId>
+    <packaging>jar</packaging>
+    <name>Kylin:Monitor</name>
+    <url>http://maven.apache.org</url>
+
+
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <hadoop.version>2.6.0</hadoop.version>
+        <hive.version>0.14.0</hive.version>
+    </properties>
+
+    <dependencies>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+            <version>1.2.17</version>
+        </dependency>
+        <dependency>
+            <groupId>net.sf.opencsv</groupId>
+            <artifactId>opencsv</artifactId>
+            <version>2.3</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-hdfs</artifactId>
+            <version>${hadoop.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-auth</artifactId>
+            <version>${hadoop.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-common</artifactId>
+            <version>${hadoop.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <!--hbase dependency-->
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-hadoop2-compat</artifactId>
+            <version>${hbase-hadoop2.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-common</artifactId>
+            <version>${hbase-hadoop2.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-client</artifactId>
+            <version>${hbase-hadoop2.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-server</artifactId>
+            <version>${hbase-hadoop2.version}</version>
+            <scope>provided</scope>
+        </dependency>
+
+        <!--hbase dependency-->
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-hadoop2-compat</artifactId>
+            <version>${hbase-hadoop2.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-common</artifactId>
+            <version>${hbase-hadoop2.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-client</artifactId>
+            <version>${hbase-hadoop2.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-server</artifactId>
+            <version>${hbase-hadoop2.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <!-- Hive dependencies -->
+        <dependency>
+            <groupId>org.apache.hive</groupId>
+            <artifactId>hive-jdbc</artifactId>
+            <version>${hive.version}</version>
+            <scope>provided</scope>
+        </dependency>
+
+    </dependencies>
+
+    <build>
+        <plugins>
+
+            <plugin>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <configuration>
+                    <archive>
+                        <manifest>
+                            <mainClass>org.apache.kylin.monitor.Client</mainClass>
+                        </manifest>
+                    </archive>
+
+                    <descriptorRefs>
+                        <descriptorRef>jar-with-dependencies</descriptorRef>
+                    </descriptorRefs>
+                    <appendAssemblyId>false</appendAssemblyId>
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>make-assembly</id>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>single</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
+
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a935c7ba/monitor/src/main/java/org/apache/kylin/monitor/ApiRequestParser.java
----------------------------------------------------------------------
diff --git a/monitor/src/main/java/org/apache/kylin/monitor/ApiRequestParser.java b/monitor/src/main/java/org/apache/kylin/monitor/ApiRequestParser.java
new file mode 100644
index 0000000..0321549
--- /dev/null
+++ b/monitor/src/main/java/org/apache/kylin/monitor/ApiRequestParser.java
@@ -0,0 +1,241 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.monitor;
+
+import au.com.bytecode.opencsv.CSVWriter;
+import org.apache.commons.io.filefilter.RegexFileFilter;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.*;
+import org.apache.log4j.Logger;
+
+import java.io.*;
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * @author jiazhong
+ */
+public class ApiRequestParser {
+
+    final static Logger logger = Logger.getLogger(ApiRequestParser.class);
+    final static Charset ENCODING = StandardCharsets.UTF_8;
+    static String REQUEST_PARSE_RESULT_PATH = null;
+    final static String REQUEST_LOG_FILE_PATTERN = "kylin_request.log.(\\d{4}-\\d{2}-\\d{2})$";
+    final static String REQUEST_LOG_PARSE_RESULT_FILENAME = "kylin_request_log.csv";
+    static String DEPLOY_ENV;
+
+
+    final static String[] KYLIN_REQUEST_CSV_HEADER = {"REQUESTER", "REQ_TIME","REQ_DATE", "URI", "METHOD", "QUERY_STRING", "PAYLOAD", "RESP_STATUS", "TARGET", "ACTION","DEPLOY_ENV"};
+
+    private ConfigUtils monitorConfig;
+
+    public ApiRequestParser() {
+        monitorConfig = ConfigUtils.getInstance();
+        try {
+            monitorConfig.loadMonitorParam();
+            DEPLOY_ENV = monitorConfig.getDeployEnv();
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+    }
+
+    public void start() throws IOException, ParseException {
+        ApiRequestParser.REQUEST_PARSE_RESULT_PATH = ConfigUtils.getInstance().getRequestLogParseResultDir() + REQUEST_LOG_PARSE_RESULT_FILENAME;
+        this.parseRequestInit();
+
+        //get api req log files have been read
+        String[] hasReadFiles = MonitorMetaManager.getReadApiReqLogFileList();
+
+        List<File> files = this.getRequestLogFiles();
+        for (File file : files) {
+            if (!Arrays.asList(hasReadFiles).contains(file.getName())) {
+                this.parseRequestLog(file.getPath(), ApiRequestParser.REQUEST_PARSE_RESULT_PATH);
+                MonitorMetaManager.markApiReqLogFileAsRead(file.getName());
+            }
+        }
+    }
+
+    public void parseRequestInit() throws IOException {
+        logger.info("parse api request initializing...");
+        FileSystem fs = null;
+        try {
+            Configuration conf = new Configuration();
+            fs = FileSystem.get(conf);
+            org.apache.hadoop.fs.Path path = new org.apache.hadoop.fs.Path(ApiRequestParser.REQUEST_PARSE_RESULT_PATH);
+            if (!fs.exists(path)) {
+                fs.create(path);
+
+                //need to close before get FileSystem again
+                fs.close();
+                this.writeResultToHdfs(ApiRequestParser.REQUEST_PARSE_RESULT_PATH, ApiRequestParser.KYLIN_REQUEST_CSV_HEADER);
+            }
+        } catch (Exception e) {
+            fs.close();
+            logger.info("Failed to init:", e);
+        }
+    }
+
+    //parse query log and convert to csv file to hdfs
+    public void parseRequestLog(String filePath, String dPath) throws ParseException, IOException {
+
+        logger.info("Start parsing kylin api request file " + filePath + " !");
+
+//        writer config init
+        FileSystem fs = this.getHdfsFileSystem();
+        org.apache.hadoop.fs.Path resultStorePath = new org.apache.hadoop.fs.Path(dPath);
+        OutputStreamWriter writer = new OutputStreamWriter(fs.append(resultStorePath));
+        CSVWriter cwriter = new CSVWriter(writer,'|',CSVWriter.NO_QUOTE_CHARACTER);
+
+        Pattern p_available = Pattern.compile("/kylin/api/(cubes|user)+.*");
+        Pattern p_request = Pattern.compile("^.*\\[.*KylinApiFilter.logRequest.*\\].*REQUEST:.*REQUESTER=(.*);REQ_TIME=(\\w+ (\\d{4}-\\d{2}-\\d{2}).*);URI=(.*);METHOD=(.*);QUERY_STRING=(.*);PAYLOAD=(.*);RESP_STATUS=(.*);$");
+        Pattern p_uri = Pattern.compile("/kylin/api/(\\w+)(/.*/)*(.*)$");
+        Matcher m_available = p_available.matcher("");
+        Matcher m_request = p_request.matcher("");
+        Matcher m_uri = p_uri.matcher("");
+
+        Path path = Paths.get(filePath);
+        try {
+            BufferedReader reader = Files.newBufferedReader(path, ENCODING);
+            String line = null;
+            while ((line = reader.readLine()) != null) {
+                //reset the input
+                m_available.reset(line);
+                m_request.reset(line);
+
+                //filter unnecessary info
+                if (m_available.find()) {
+                    //filter GET info
+                    if (m_request.find() && !m_request.group(5).equals("GET")) {
+
+                        List<String> groups = new ArrayList<String>();
+                        for (int i = 1; i <= m_request.groupCount(); i++) {
+                            groups.add(m_request.group(i));
+                        }
+
+                        String uri = m_request.group(4);
+                        m_uri.reset(uri);
+                        if (m_uri.find()) {
+
+                            //add target
+                            groups.add(m_uri.group(1));
+
+                            //add action
+                            if (m_uri.group(1).equals("cubes")) {
+                                switch (m_request.group(5)) {
+                                    case "DELETE":
+                                        groups.add("drop");
+                                        break;
+                                    case "POST":
+                                        groups.add("save");
+                                        break;
+                                    default:
+                                        //add parse action
+                                        groups.add(m_uri.group(3));
+                                        break;
+                                }
+                            }
+
+                        }
+                        groups.add(DEPLOY_ENV);
+                        String[] recordArray = groups.toArray(new String[groups.size()]);
+                        //write to hdfs
+                        cwriter.writeNext(recordArray);
+                    }
+                }
+
+
+            }
+        } catch (IOException ex) {
+            logger.info("Failed to write to hdfs:", ex);
+        } finally {
+            writer.close();
+            cwriter.close();
+            fs.close();
+        }
+
+        logger.info("Finish parsing file " + filePath + " !");
+    }
+
+    public void writeResultToHdfs(String dPath, String[] record) throws IOException {
+        OutputStreamWriter writer = null;
+        CSVWriter cwriter = null;
+        FileSystem fs = null;
+        try {
+
+            fs = this.getHdfsFileSystem();
+            org.apache.hadoop.fs.Path resultStorePath = new org.apache.hadoop.fs.Path(dPath);
+            writer = new OutputStreamWriter(fs.append(resultStorePath));
+            cwriter = new CSVWriter(writer,'|',CSVWriter.NO_QUOTE_CHARACTER);
+            cwriter.writeNext(record);
+
+        } catch (IOException e) {
+            logger.info("Exception", e);
+        } finally {
+            writer.close();
+            cwriter.close();
+            fs.close();
+        }
+    }
+
+    public List<File> getRequestLogFiles() {
+        List<File> logFiles = new ArrayList<File>();
+
+//        String request_log_file_pattern = monitorConfig.getRequestLogFilePattern();
+
+        List<String> request_log_dir_list = monitorConfig.getLogBaseDir();
+        FileFilter filter = new RegexFileFilter(REQUEST_LOG_FILE_PATTERN);
+
+        for (String path : request_log_dir_list) {
+            logger.info("fetch api request log file from path:" + path);
+            File request_log_dir = new File(path);
+            File[] request_log_files = request_log_dir.listFiles(filter);
+            if (request_log_files == null) {
+                logger.warn("no api request log file found under path" + path);
+                break;
+            }
+            Collections.addAll(logFiles, request_log_files);
+        }
+
+        return logFiles;
+    }
+
+    public FileSystem getHdfsFileSystem() throws IOException {
+        Configuration conf = new Configuration();
+//        conf.set("dfs.client.block.write.replace-datanode-on-failure.policy", "NEVER");
+        FileSystem fs = null;
+        try {
+            fs = FileSystem.get(conf);
+        } catch (IOException e) {
+            fs.close();
+            logger.info("Failed to get hdfs FileSystem", e);
+        }
+        return fs;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a935c7ba/monitor/src/main/java/org/apache/kylin/monitor/Client.java
----------------------------------------------------------------------
diff --git a/monitor/src/main/java/org/apache/kylin/monitor/Client.java b/monitor/src/main/java/org/apache/kylin/monitor/Client.java
new file mode 100644
index 0000000..eadce54
--- /dev/null
+++ b/monitor/src/main/java/org/apache/kylin/monitor/Client.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+
+package org.apache.kylin.monitor;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
+
+import java.io.File;
+
+
+/**
+ * Created by jiazhong on 2015/5/7.
+ */
+public class Client {
+
+
+    static {
+        //set monitor log path
+        String KYLIN_HOME = ConfigUtils.getKylinHome();
+        String CATALINA_HOME = null;
+        if(!StringUtils.isEmpty(KYLIN_HOME)){
+            CATALINA_HOME = ConfigUtils.getKylinHome() + File.separator + "tomcat"+File.separator;
+            System.out.println("will use "+CATALINA_HOME+"/logs to put monitor log");
+        }else{
+            CATALINA_HOME = "";
+            System.out.println("will use default path to put monitor log");
+        }
+        //log4j config will use this
+        System.setProperty("CATALINA_HOME",CATALINA_HOME);
+    }
+    final static Logger logger = Logger.getLogger(Client.class);
+
+
+
+    public static void main(String[] args) {
+        logger.info("monitor client start parsing...");
+        QueryParser queryParser = new QueryParser();
+        HiveJdbcClient jdbcClient = new HiveJdbcClient();
+        try {
+            queryParser.start();
+            jdbcClient.start();
+        } catch (Exception e) {
+            logger.info("Exception",e);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a935c7ba/monitor/src/main/java/org/apache/kylin/monitor/ConfigUtils.java
----------------------------------------------------------------------
diff --git a/monitor/src/main/java/org/apache/kylin/monitor/ConfigUtils.java b/monitor/src/main/java/org/apache/kylin/monitor/ConfigUtils.java
new file mode 100644
index 0000000..f38b701
--- /dev/null
+++ b/monitor/src/main/java/org/apache/kylin/monitor/ConfigUtils.java
@@ -0,0 +1,220 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+
+package org.apache.kylin.monitor;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
+import java.io.*;
+import java.lang.reflect.Method;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Properties;
+
+/**
+ * Created by jiazhong on 2015/4/28.
+ */
+public class ConfigUtils {
+
+    final static Logger logger = Logger.getLogger(ConfigUtils.class);
+
+    private static ConfigUtils ourInstance = new ConfigUtils();
+
+    private Properties monitorConfig = new Properties();
+
+    public static ConfigUtils getInstance() {
+        return ourInstance;
+    }
+
+    private ConfigUtils() {
+    }
+
+    public static final String KYLIN_EXT_LOG_BASE_DIR = "ext.log.base.dir";
+    public static final String KYLIN_METADATA_URL = "kylin.metadata.url";
+
+    public static final String KYLIN_HOME = "KYLIN_HOME";
+    public static final String KYLIN_CONF = "KYLIN_CONF";
+    public static final String KYLIN_LOG_CONF_HOME = "KYLIN_LOG_CONF_HOME";
+    public static final String CATALINA_HOME = "CATALINA_HOME";
+
+    public static final String KYLIN_HDFS_WORKING_DIR = "kylin.hdfs.working.dir";
+
+    public static final String KYLIN_MONITOR_CONF_PROP_FILE = "kylin.properties";
+    public static final String QUERY_LOG_PARSE_RESULT_TABLE = "query.log.parse.result.table";
+    public static final String DEFAULT_QUERY_LOG_PARSE_RESULT_TABLE = "kylin_query_log";
+
+    public static final String DEPLOY_ENV = "deploy.env";
+
+
+    public static final String HIVE_JDBC_CON_URL = "kylin.hive.jdbc.connection.url";
+
+
+    public void loadMonitorParam() throws IOException {
+        Properties props = new Properties();
+        InputStream resourceStream = this.getKylinPropertiesAsInputSteam();
+        props.load(resourceStream);
+        this.monitorConfig = props;
+    }
+
+
+    public static InputStream getKylinPropertiesAsInputSteam() {
+        File propFile = getKylinMonitorProperties();
+        if (propFile == null || !propFile.exists()) {
+            logger.error("fail to locate kylin.properties");
+            throw new RuntimeException("fail to locate kylin.properties");
+        }
+        try {
+            return new FileInputStream(propFile);
+        } catch (FileNotFoundException e) {
+            logger.error("this should not happen");
+            throw new RuntimeException(e);
+        }
+
+    }
+
+
+    private static File getKylinMonitorProperties() {
+        String kylinConfHome = System.getProperty(KYLIN_CONF);
+        if (!StringUtils.isEmpty(kylinConfHome)) {
+            logger.info("load kylin.properties file from " + kylinConfHome + ". (from KYLIN_CONF System.property)");
+            return getKylinPropertiesFile(kylinConfHome);
+        }
+
+        logger.warn("KYLIN_CONF property was not set, will seek KYLIN_HOME env variable");
+
+        String kylinHome = getKylinHome();
+        if (StringUtils.isEmpty(kylinHome))
+            throw new RuntimeException("Didn't find KYLIN_CONF or KYLIN_HOME, please set one of them");
+
+        String path = kylinHome + File.separator + "conf";
+        logger.info("load kylin.properties file from " + kylinHome+". (from KYLIN_HOME System.env)");
+        return getKylinPropertiesFile(path);
+
+    }
+
+    public static String getKylinHome() {
+        String kylinHome = System.getenv(KYLIN_HOME);
+        if (StringUtils.isEmpty(kylinHome)) {
+            logger.warn("KYLIN_HOME was not set");
+            return kylinHome;
+        }
+        logger.info("KYLIN_HOME is :"+kylinHome);
+        return kylinHome;
+    }
+
+    private static File getKylinPropertiesFile(String path) {
+        if (path == null) {
+            return null;
+        }
+        return new File(path, KYLIN_MONITOR_CONF_PROP_FILE);
+    }
+
+
+    /*
+     * get where to path log
+     */
+    public List<String> getLogBaseDir() {
+        List<String> logDirList = new ArrayList<String>();
+
+        String kylinLogConfHome = System.getProperty(KYLIN_LOG_CONF_HOME);
+        if (!StringUtils.isEmpty(kylinLogConfHome)) {
+            logger.info("Use KYLIN_LOG_CONF_HOME=" + KYLIN_LOG_CONF_HOME);
+            logDirList.add(kylinLogConfHome);
+        }
+
+        String kylinExtLogBaseDir = getExtLogBaseDir();
+        if (!StringUtils.isEmpty(kylinExtLogBaseDir)) {
+            String[] extPaths = kylinExtLogBaseDir.split(",");
+            for(String path:extPaths){
+                if(!StringUtils.isEmpty(path)){
+                    logger.info("Use ext log dir=" + path);
+                    logDirList.add(path.trim());
+                }
+            }
+        }
+
+        String kylinHome = getKylinHome();
+        if (!StringUtils.isEmpty(kylinHome))
+            if(logDirList.isEmpty()){
+               throw new RuntimeException("Didn't find KYLIN_CONF or KYLIN_HOME or KYLIN_EXT_LOG_BASE_DIR, please set one of them");
+            }
+        else{
+            String path = kylinHome + File.separator + "tomcat" + File.separator + "logs";
+            logDirList.add(path);
+        }
+
+        return logDirList;
+    }
+
+
+    public  String getMetadataUrl(){
+        return this.monitorConfig.getProperty(KYLIN_METADATA_URL);
+    }
+
+    public String getExtLogBaseDir() {
+        return this.monitorConfig.getProperty(KYLIN_EXT_LOG_BASE_DIR);
+    }
+
+    public String getKylinHdfsWorkingDir() {
+        return this.monitorConfig.getProperty(KYLIN_HDFS_WORKING_DIR);
+    }
+    public String getQueryLogParseResultDir() {
+        return this.getKylinHdfsWorkingDir()+"/performance/query/";
+    }
+
+    public String getQueryLogResultTable() {
+        String query_log_parse_result_table = this.monitorConfig.getProperty(QUERY_LOG_PARSE_RESULT_TABLE);
+        if(!StringUtils.isEmpty(query_log_parse_result_table)){
+            return query_log_parse_result_table;
+        }else{
+            return DEFAULT_QUERY_LOG_PARSE_RESULT_TABLE;
+        }
+    }
+
+    public String getRequestLogParseResultDir() {
+        return this.getKylinHdfsWorkingDir()+"/performance/request/";
+    }
+
+    public String getHiveJdbcConUrl() {
+        return this.monitorConfig.getProperty(HIVE_JDBC_CON_URL);
+    }
+
+    public String getLogParseResultMetaDir() {
+        return this.getKylinHdfsWorkingDir()+"/performance/metadata/";
+    }
+
+    public String getDeployEnv(){
+        return this.monitorConfig.getProperty(DEPLOY_ENV);
+    }
+
+
+    public static void addClasspath(String path) throws Exception {
+        File file = new File(path);
+
+        if (file.exists()) {
+            URLClassLoader urlClassLoader = (URLClassLoader) ClassLoader.getSystemClassLoader();
+            Class<URLClassLoader> urlClass = URLClassLoader.class;
+            Method method = urlClass.getDeclaredMethod("addURL", new Class[] { URL.class });
+            method.setAccessible(true);
+            method.invoke(urlClassLoader, new Object[] { file.toURI().toURL() });
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a935c7ba/monitor/src/main/java/org/apache/kylin/monitor/DebugClient.java
----------------------------------------------------------------------
diff --git a/monitor/src/main/java/org/apache/kylin/monitor/DebugClient.java b/monitor/src/main/java/org/apache/kylin/monitor/DebugClient.java
new file mode 100644
index 0000000..a7230e6
--- /dev/null
+++ b/monitor/src/main/java/org/apache/kylin/monitor/DebugClient.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+
+package org.apache.kylin.monitor;
+
+import org.apache.log4j.Logger;
+import java.io.File;
+
+
+/**
+ * Created by jiazhong on 2015/5/7
+ */
+public class DebugClient {
+
+    static{
+        //set catalina.home temp
+        System.setProperty(ConfigUtils.CATALINA_HOME, "../server/");
+    }
+
+    final static Logger logger = Logger.getLogger(DebugClient.class);
+
+
+    public static void main(String[] args) throws Exception {
+
+        // test_case_data/sandbox/ contains HDP 2.2 site xmls which is dev sandbox
+        ConfigUtils.addClasspath(new File("../examples/test_case_data/sandbox").getAbsolutePath());
+
+        //set log base dir ,will also get from $KYLIN_HOME/tomcat/logs and config [ext.log.base.dir] in kylin.properties
+        System.setProperty(ConfigUtils.KYLIN_LOG_CONF_HOME, "../server/logs");
+
+        //get kylin.properties ,if not exist will get from $KYLIN_HOME/conf/
+        System.setProperty(ConfigUtils.KYLIN_CONF, "../examples/test_case_data/sandbox");
+
+
+        QueryParser queryParser = new QueryParser();
+        HiveJdbcClient jdbcClient = new HiveJdbcClient();
+
+        try {
+            queryParser.start();
+            jdbcClient.start();
+        } catch (Exception e) {
+            logger.info("Exception ",e);
+        }
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a935c7ba/monitor/src/main/java/org/apache/kylin/monitor/FileUtils.java
----------------------------------------------------------------------
diff --git a/monitor/src/main/java/org/apache/kylin/monitor/FileUtils.java b/monitor/src/main/java/org/apache/kylin/monitor/FileUtils.java
new file mode 100644
index 0000000..555409b
--- /dev/null
+++ b/monitor/src/main/java/org/apache/kylin/monitor/FileUtils.java
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+
+package org.apache.kylin.monitor;
+
+import au.com.bytecode.opencsv.CSVWriter;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.log4j.Logger;
+
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+
+/**
+ * Created by jiazhong on 2015/6/18.
+ */
+public class FileUtils {
+
+    final static Logger logger = Logger.getLogger(FileUtils.class);
+
+    public static boolean pathCheck(String filePath) throws IOException {
+        logger.info("checking file:"+filePath);
+        FileSystem fs = null;
+        try {
+            Configuration conf = new Configuration();
+            fs = FileSystem.get(conf);
+            org.apache.hadoop.fs.Path path = new org.apache.hadoop.fs.Path(filePath);
+            if (!fs.exists(path)) {
+                fs.create(path);
+                fs.close();
+                return false;
+            }
+        } catch (Exception e) {
+            fs.close();
+            logger.info("Failed to init:", e);
+        }
+        return true;
+    }
+
+
+    /*
+    * write parse result to hdfs
+    */
+    public static void clearHdfsFile(String dPath) throws IOException {
+        OutputStreamWriter writer = null;
+        FileSystem fs = null;
+        try {
+            fs = getHdfsFileSystem();
+            org.apache.hadoop.fs.Path resultStorePath = new org.apache.hadoop.fs.Path(dPath);
+            writer = new OutputStreamWriter(fs.create(resultStorePath, true));
+
+        } catch (Exception e) {
+            logger.info("Exception", e);
+        } finally {
+            writer.close();
+            fs.close();
+        }
+    }
+
+    /*
+    * write parse result to hdfs
+    */
+    public static void appendResultToHdfs(String dPath, String[] record) throws IOException {
+        OutputStreamWriter writer = null;
+        CSVWriter cwriter = null;
+        FileSystem fs = null;
+        try {
+            fs = getHdfsFileSystem();
+            org.apache.hadoop.fs.Path resultStorePath = new org.apache.hadoop.fs.Path(dPath);
+            writer = new OutputStreamWriter(fs.append(resultStorePath));
+            cwriter = new CSVWriter(writer, '|', CSVWriter.NO_QUOTE_CHARACTER);
+
+            cwriter.writeNext(record);
+
+        } catch (Exception e) {
+            logger.info("Exception", e);
+        } finally {
+            writer.close();
+            cwriter.close();
+            fs.close();
+        }
+    }
+
+    /*
+     * get hdfs fileSystem
+     */
+    public static FileSystem getHdfsFileSystem() throws IOException {
+        Configuration conf = new Configuration();
+//        conf.set("dfs.client.block.write.replace-datanode-on-failure.policy", "NEVER");
+        FileSystem fs = null;
+        try {
+            fs = FileSystem.newInstance(conf);
+        } catch (IOException e) {
+            fs.close();
+            logger.info("Failed to get hdfs FileSystem", e);
+        }
+        return fs;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a935c7ba/monitor/src/main/java/org/apache/kylin/monitor/HiveJdbcClient.java
----------------------------------------------------------------------
diff --git a/monitor/src/main/java/org/apache/kylin/monitor/HiveJdbcClient.java b/monitor/src/main/java/org/apache/kylin/monitor/HiveJdbcClient.java
new file mode 100644
index 0000000..71a3a68
--- /dev/null
+++ b/monitor/src/main/java/org/apache/kylin/monitor/HiveJdbcClient.java
@@ -0,0 +1,223 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+
+package org.apache.kylin.monitor;
+
+import org.apache.log4j.Logger;
+import org.datanucleus.util.StringUtils;
+
+import java.io.IOException;
+import java.sql.*;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+
+/**
+ * Created by jiazhong on 2015/6/17.
+ */
+public class HiveJdbcClient {
+
+    static String SQL_GENERATE_QUERY_LOG_TABLE = "CREATE EXTERNAL TABLE IF NOT EXISTS [QUERY_LOG_TABLE_NAME] (REQUEST_TIME STRING,REQUEST_DATE DATE, QUERY_SQL STRING,QUERY_USER STRING,IS_SUCCESS STRING,QUERY_LATENCY DECIMAL(19,4),QUERY_PROJECT STRING,REALIZATION_NAMES STRING,CUBOID_IDS STRING,TOTAL_SCAN_COUNT INT,RESULT_ROW_COUNT INT,ACCEPT_PARTIAL STRING,IS_PARTIAL_RESULT STRING,HIT_CACHE STRING,MESSAGE STRING,DEPLOY_ENV STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\\n' LOCATION '[QUERY_LOG_PARSE_RESULT_DIR]' TBLPROPERTIES (\"SKIP.HEADER.LINE.COUNT\"=\"1\")";
+
+    static String SQL_TOTAL_QUERY_USER = "SELECT  COUNT(DISTINCT QUERY_USER) FROM [QUERY_LOG_TABLE_NAME]";
+
+    static String SQL_AVG_DAY_QUERY = "SELECT AVG(A.COUNT_QUERY) FROM (SELECT COUNT(*) COUNT_QUERY,REQUEST_DATE FROM  [QUERY_LOG_TABLE_NAME] GROUP BY REQUEST_DATE) A";
+
+    static String SQL_LAST_30_DAYILY_QUERY_COUNT = "SELECT REQUEST_DATE, COUNT(*) FROM  [QUERY_LOG_TABLE_NAME]  WHERE  REQUEST_DATE>=[START_DATE] AND REQUEST_DATE<[END_DATE]  GROUP BY REQUEST_DATE";
+
+
+    //last 30 days
+    static String SQL_90_PERCENTTILE_LAST_30_DAY = "SELECT PERCENTILE_APPROX(LOG.QUERY_LATENCY,0.9) FROM (SELECT QUERY_LATENCY FROM [QUERY_LOG_TABLE_NAME]  WHERE IS_SUCCESS='true' AND REQUEST_DATE>=[START_DATE] AND REQUEST_DATE<[END_DATE]) LOG";
+
+    //0.9,0.95 [each day] percentile in last 30 days
+    static String SQL_EACH_DAY_PERCENTILE = "SELECT REQUEST_DATE, PERCENTILE_APPROX(QUERY_LATENCY,ARRAY(0.9,0.95)),COUNT(*) FROM  [QUERY_LOG_TABLE_NAME]  WHERE IS_SUCCESS='true' AND REQUEST_DATE>=[START_DATE] AND REQUEST_DATE<[END_DATE]  GROUP BY REQUEST_DATE";
+
+    //0.9,0.95 [project] percentile in last 30 days
+    static String SQL_DAY_PERCENTILE_BY_PROJECT = "SELECT QUERY_PROJECT, PERCENTILE_APPROX(QUERY_LATENCY,ARRAY(0.9,0.95)) FROM  [QUERY_LOG_TABLE_NAME]  WHERE IS_SUCCESS='true' AND  REQUEST_DATE>=[START_DATE] AND REQUEST_DATE<[END_DATE]  GROUP BY QUERY_PROJECT";
+
+
+
+    static String QUERY_LOG_TABLE_NAME = "KYLIN_QUERY_LOG";
+
+    final static Logger logger = Logger.getLogger(HiveJdbcClient.class);
+
+
+    private static String driverName = "org.apache.hive.jdbc.HiveDriver";
+    private static ConfigUtils monitorConfig = ConfigUtils.getInstance();
+
+    static {
+        try {
+            Class.forName(driverName);
+            monitorConfig.loadMonitorParam();
+            QUERY_LOG_TABLE_NAME = monitorConfig.getQueryLogResultTable();
+            if (StringUtils.isEmpty(QUERY_LOG_TABLE_NAME)) {
+                logger.error("table name not defined ,please set param [query.log.parse.result.table] in kylin.properties");
+            }
+
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+
+
+    /*
+     * will create external hive table for [query] log parse result csv file on hdfs
+     * and will generate metric data in csv file on hdfs for web dashboard
+     */
+    public void start() throws SQLException, IOException {
+
+        String CON_URL = monitorConfig.getHiveJdbcConUrl();
+
+        Connection con = DriverManager.getConnection(CON_URL, "", "");
+        Statement stmt = con.createStatement();
+        ResultSet res = null;
+
+        SQL_GENERATE_QUERY_LOG_TABLE = generateQueryLogSql();
+        logger.info("Running Sql (Create Table):" + SQL_GENERATE_QUERY_LOG_TABLE);
+        stmt.execute(SQL_GENERATE_QUERY_LOG_TABLE);
+
+        SQL_TOTAL_QUERY_USER = generateUserCountSql();
+        logger.info("Running Sql (Total User):" + SQL_TOTAL_QUERY_USER);
+        res = stmt.executeQuery(SQL_TOTAL_QUERY_USER);
+
+        String total_query_user_path = monitorConfig.getLogParseResultMetaDir() + "total_query_user.csv";
+        FileUtils.pathCheck(total_query_user_path);
+        FileUtils.clearHdfsFile(total_query_user_path);
+        while (res.next()) {
+            FileUtils.appendResultToHdfs(total_query_user_path, new String[]{res.getString(1)});
+            logger.info("Total User:" + res.getString(1));
+        }
+
+        SQL_AVG_DAY_QUERY = generateAvgDayQuery();
+        logger.info("Running Sql (Avg Day Query):" + SQL_AVG_DAY_QUERY);
+        res = stmt.executeQuery(SQL_AVG_DAY_QUERY);
+
+        String avg_day_query_path = monitorConfig.getLogParseResultMetaDir() + "avg_day_query.csv";
+        FileUtils.pathCheck(avg_day_query_path);
+        FileUtils.clearHdfsFile(avg_day_query_path);
+        while (res.next()) {
+            FileUtils.appendResultToHdfs(avg_day_query_path, new String[]{res.getString(1)});
+            logger.info("avg day query:" + res.getString(1));
+        }
+
+
+        SQL_LAST_30_DAYILY_QUERY_COUNT = generateLast30DayilyQueryCount();
+        logger.info("Running Sql (Daily Query Count):" + SQL_LAST_30_DAYILY_QUERY_COUNT);
+        res = stmt.executeQuery(SQL_LAST_30_DAYILY_QUERY_COUNT);
+
+        String last_30_daily_query_count_path = monitorConfig.getLogParseResultMetaDir() + "last_30_daily_query_count.csv";
+        FileUtils.pathCheck(last_30_daily_query_count_path);
+        FileUtils.clearHdfsFile(last_30_daily_query_count_path);
+        while (res.next()) {
+            FileUtils.appendResultToHdfs(last_30_daily_query_count_path, new String[]{res.getString(1),res.getString(2)});
+            logger.info("last 30 daily query count:" + res.getString(1)+","+res.getString(2));
+        }
+
+
+        //90 percentile latency for all query in last 30 days
+        SQL_90_PERCENTTILE_LAST_30_DAY = generateNintyPercentileSql();
+        logger.info("Running Sql (last 30 days ,90 percentile query latency):" + SQL_90_PERCENTTILE_LAST_30_DAY);
+        res = stmt.executeQuery(SQL_90_PERCENTTILE_LAST_30_DAY);
+
+        String last_30_day_90_percentile_latency = monitorConfig.getLogParseResultMetaDir() + "last_30_day_90_percentile_latency.csv";
+        FileUtils.pathCheck(last_30_day_90_percentile_latency);
+        FileUtils.clearHdfsFile(last_30_day_90_percentile_latency);
+        while (res.next()) {
+            FileUtils.appendResultToHdfs(last_30_day_90_percentile_latency, new String[]{res.getString(1)});
+            logger.info("last 30 day 90 percentile latency:" + res.getString(1));
+        }
+
+        //90,95 project percentile latency for all query in last 30 days
+        SQL_DAY_PERCENTILE_BY_PROJECT = generateProjectPercentileSql();
+        logger.info("Running Sql (last 30 days ,90,95 percentile query latency by project):" + SQL_DAY_PERCENTILE_BY_PROJECT);
+        res = stmt.executeQuery(SQL_DAY_PERCENTILE_BY_PROJECT);
+
+        String last_30_day_project_percentile_latency_path = monitorConfig.getLogParseResultMetaDir() + "project_90_95_percentile_latency.csv";
+        FileUtils.pathCheck(last_30_day_project_percentile_latency_path);
+        FileUtils.clearHdfsFile(last_30_day_project_percentile_latency_path);
+        while (res.next() && res.getMetaData().getColumnCount() == 2) {
+            FileUtils.appendResultToHdfs(last_30_day_project_percentile_latency_path, new String[]{res.getString(1), res.getString(2)});
+            logger.info(res.getString(1) + "," + res.getString(2));
+        }
+
+        //0.9,0.95 percentile latency of every day in last 30 day
+        SQL_EACH_DAY_PERCENTILE = generateEachDayPercentileSql();
+        logger.info("Running sql (0.9,0.95 latency):" + SQL_EACH_DAY_PERCENTILE);
+        String each_day_percentile_file = monitorConfig.getLogParseResultMetaDir() + "each_day_90_95_percentile_latency.csv";
+        FileUtils.pathCheck(each_day_percentile_file);
+        FileUtils.clearHdfsFile(each_day_percentile_file);
+
+        res = stmt.executeQuery(SQL_EACH_DAY_PERCENTILE);
+        while (res.next() && res.getMetaData().getColumnCount() == 3) {
+            FileUtils.appendResultToHdfs(each_day_percentile_file, new String[]{res.getString(1), res.getString(2),res.getString(3)});
+            logger.info(res.getString(1) + "," + res.getString(2)+ "," + res.getString(3));
+        }
+
+    }
+
+    public String generateQueryLogSql() {
+        String query_log_parse_result_dir = monitorConfig.getQueryLogParseResultDir();
+        String query_log_table_name = monitorConfig.getQueryLogResultTable();
+        return SQL_GENERATE_QUERY_LOG_TABLE.replace("[QUERY_LOG_PARSE_RESULT_DIR]", query_log_parse_result_dir).replace("[QUERY_LOG_TABLE_NAME]", query_log_table_name);
+    }
+
+    public String generateUserCountSql() {
+        return SQL_TOTAL_QUERY_USER.replace("[QUERY_LOG_TABLE_NAME]", QUERY_LOG_TABLE_NAME);
+    }
+
+    public String generateAvgDayQuery() {
+        return SQL_AVG_DAY_QUERY.replace("[QUERY_LOG_TABLE_NAME]", QUERY_LOG_TABLE_NAME);
+    }
+
+    public String generateLast30DayilyQueryCount() {
+        SQL_LAST_30_DAYILY_QUERY_COUNT = SQL_LAST_30_DAYILY_QUERY_COUNT.replace("[QUERY_LOG_TABLE_NAME]", QUERY_LOG_TABLE_NAME);
+        return monthStasticSqlConvert(SQL_LAST_30_DAYILY_QUERY_COUNT);
+    }
+
+
+
+    //last 30 days
+    public String generateNintyPercentileSql() {
+        SQL_90_PERCENTTILE_LAST_30_DAY = SQL_90_PERCENTTILE_LAST_30_DAY.replace("[QUERY_LOG_TABLE_NAME]", QUERY_LOG_TABLE_NAME);
+        return monthStasticSqlConvert(SQL_90_PERCENTTILE_LAST_30_DAY);
+    }
+
+    //last 30 days,each day 90,95 percentile
+    public String generateProjectPercentileSql() {
+        SQL_DAY_PERCENTILE_BY_PROJECT = SQL_DAY_PERCENTILE_BY_PROJECT.replace("[QUERY_LOG_TABLE_NAME]", QUERY_LOG_TABLE_NAME);
+        return monthStasticSqlConvert(SQL_DAY_PERCENTILE_BY_PROJECT);
+    }
+
+    public String generateEachDayPercentileSql() {
+        SQL_EACH_DAY_PERCENTILE = SQL_EACH_DAY_PERCENTILE.replace("[QUERY_LOG_TABLE_NAME]", QUERY_LOG_TABLE_NAME);
+        return monthStasticSqlConvert(SQL_EACH_DAY_PERCENTILE);
+    }
+
+    public String monthStasticSqlConvert(String sql) {
+
+        SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd");
+        Calendar cal = Calendar.getInstance();
+        cal.add(Calendar.DATE, -1);
+        String endDate = format.format(cal.getTime());
+        cal.add(Calendar.DATE, -30);
+        String startDate = format.format(cal.getTime());
+        return sql.replace("[START_DATE]", "'" + startDate + "'").replace("[END_DATE]", "'" + endDate + "'");
+    }
+
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a935c7ba/monitor/src/main/java/org/apache/kylin/monitor/MonitorMetaManager.java
----------------------------------------------------------------------
diff --git a/monitor/src/main/java/org/apache/kylin/monitor/MonitorMetaManager.java b/monitor/src/main/java/org/apache/kylin/monitor/MonitorMetaManager.java
new file mode 100644
index 0000000..c01cf66
--- /dev/null
+++ b/monitor/src/main/java/org/apache/kylin/monitor/MonitorMetaManager.java
@@ -0,0 +1,217 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+
+package org.apache.kylin.monitor;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.log4j.Logger;
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * Created by jiazhong on 2015/5/25.
+ */
+public class MonitorMetaManager {
+
+    private static ConfigUtils monitorConfig = ConfigUtils.getInstance();
+
+    static String TABLE_NAME = "kylin_metadata";
+    final static String COLUMN_FAMILY = "f";
+    final static String COLUMN = "c";
+    final static String ROW_KEY_QUERY_READ_FILES = "/performance/query_log_files_already_read";
+    final static String ROW_KEY_QUERY_READING_FILE = "/performance/query_log_file_reading";
+    final static String ROW_KEY_QUERY_READING_FILE_LINE = "/performance/query_log_file_reading";
+
+
+    final static String ROW_KEY_API_REQ_LOG_READ_FILES = "/performance/api_req_log_files_already_read";
+
+
+    final static Logger logger = Logger.getLogger(MonitorMetaManager.class);
+
+    static Configuration conf = null;
+
+    static {
+        try {
+            monitorConfig.loadMonitorParam();
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+        conf = HBaseConfiguration.create();
+    }
+
+
+    /*
+     * meta data initialize
+     * @unused
+     */
+    public static void init() throws Exception {
+        MonitorMetaManager.TABLE_NAME = MonitorMetaManager.getMetadataUrlPrefix();
+        logger.info("Monitor Metadata Table :"+MonitorMetaManager.TABLE_NAME);
+        logger.info("init monitor metadata,create table if not exist");
+        MonitorMetaManager.creatTable(TABLE_NAME, new String[]{COLUMN_FAMILY});
+    }
+
+    public static String getMetadataUrlPrefix() {
+        String hbaseMetadataUrl = monitorConfig.getMetadataUrl();
+        String defaultPrefix = "kylin_metadata";
+        int cut = hbaseMetadataUrl.indexOf('@');
+        String tmp = cut < 0 ? defaultPrefix : hbaseMetadataUrl.substring(0, cut);
+        return tmp;
+    }
+
+
+    /*
+     * mark query file as read after parsing
+     */
+    public static void markQueryFileAsRead(String filename) throws IOException {
+        String read_query_log_file = MonitorMetaManager.getReadQueryLogFiles();
+        if(StringUtils.isEmpty(read_query_log_file)){
+            read_query_log_file =  filename;
+        }else{
+            read_query_log_file = read_query_log_file.concat(",").concat(filename);
+        }
+        MonitorMetaManager.updateData(TABLE_NAME, ROW_KEY_QUERY_READ_FILES, COLUMN_FAMILY,COLUMN, read_query_log_file);
+    }
+
+    /*
+     * mark reading file for tracking
+     */
+    public static void markQueryReadingFile(String query_reading_file) throws IOException {
+        MonitorMetaManager.updateData(TABLE_NAME, ROW_KEY_QUERY_READING_FILE, COLUMN_FAMILY,COLUMN, query_reading_file);
+    }
+
+    /*
+     * mark reading line for tracking
+     */
+    public static void markQueryReadingLine(String line_num) throws IOException {
+        MonitorMetaManager.updateData(TABLE_NAME, ROW_KEY_QUERY_READING_FILE_LINE, COLUMN_FAMILY,COLUMN, line_num);
+    }
+
+
+    /*
+     * get has been read file name list
+     */
+    public static String[] getReadQueryLogFileList() throws IOException {
+        String fileList = MonitorMetaManager.getReadQueryLogFiles();
+        return fileList.split(",");
+    }
+
+    /*
+     * get has been read query log file
+     */
+    public static String getReadQueryLogFiles() throws IOException {
+        return getListWithRowkey(TABLE_NAME, ROW_KEY_QUERY_READ_FILES);
+    }
+
+
+    /*
+     * get has been read file
+    */
+    public static String getListWithRowkey(String table, String rowkey) throws IOException {
+        Result result = getResultByRowKey(table, rowkey);
+        String fileList = null;
+        if (result.list() != null) {
+            for (KeyValue kv : result.list()) {
+                fileList = Bytes.toString(kv.getValue());
+            }
+
+        }
+        fileList = fileList==null?"":fileList;
+        return fileList;
+    }
+
+
+    /*
+     * mark api req log file as read after parsing
+     */
+    public static void markApiReqLogFileAsRead(String filename) throws IOException {
+        String read_api_req_log_files = MonitorMetaManager.getReadApiReqLogFiles();
+        if (StringUtils.isEmpty(read_api_req_log_files)) {
+            read_api_req_log_files = filename;
+        } else {
+            read_api_req_log_files = read_api_req_log_files.concat(",").concat(filename);
+        }
+        MonitorMetaManager.updateData(TABLE_NAME, ROW_KEY_API_REQ_LOG_READ_FILES, COLUMN_FAMILY,COLUMN, read_api_req_log_files);
+    }
+
+
+    /*
+     * get has been read log file name list
+     */
+    public static String[] getReadApiReqLogFileList() throws IOException {
+        String fileList = MonitorMetaManager.getReadApiReqLogFiles();
+        return fileList.split(",");
+    }
+
+    /*
+    * get has been read api request log file
+    */
+    public static String getReadApiReqLogFiles() throws IOException {
+        return getListWithRowkey(TABLE_NAME, ROW_KEY_API_REQ_LOG_READ_FILES);
+    }
+
+
+    /*
+     * create table in hbase
+     */
+    public static void creatTable(String tableName, String[] family) throws Exception {
+        HBaseAdmin admin = new HBaseAdmin(conf);
+        HTableDescriptor desc = new HTableDescriptor(tableName);
+        for (int i = 0; i < family.length; i++) {
+            desc.addFamily(new HColumnDescriptor(family[i]));
+        }
+        if (admin.tableExists(tableName)) {
+            logger.info("table Exists!");
+        } else {
+            admin.createTable(desc);
+            logger.info("create table Success!");
+        }
+    }
+
+    /*
+     * update cell in hbase
+     */
+    public static void updateData(String tableName, String rowKey, String family,String column, String value) throws IOException {
+        HTable table = new HTable(conf, Bytes.toBytes(tableName));
+        Put put = new Put(rowKey.getBytes());
+        put.add(family.getBytes(), column.getBytes(), value.getBytes());
+        try {
+            table.put(put);
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+        logger.info("end insert data ......");
+    }
+
+    /*
+     * get result by rowkey
+     */
+    public static Result getResultByRowKey(String tableName, String rowKey) throws IOException {
+        HTable table = new HTable(conf, Bytes.toBytes(tableName));
+        Get get = new Get(Bytes.toBytes(rowKey));
+        Result result = table.get(get);
+        return result;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a935c7ba/monitor/src/main/java/org/apache/kylin/monitor/QueryParser.java
----------------------------------------------------------------------
diff --git a/monitor/src/main/java/org/apache/kylin/monitor/QueryParser.java b/monitor/src/main/java/org/apache/kylin/monitor/QueryParser.java
new file mode 100644
index 0000000..fc8a403
--- /dev/null
+++ b/monitor/src/main/java/org/apache/kylin/monitor/QueryParser.java
@@ -0,0 +1,249 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.monitor;
+
+import au.com.bytecode.opencsv.CSVWriter;
+import org.apache.commons.io.filefilter.RegexFileFilter;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.*;
+import org.apache.log4j.Logger;
+
+import java.io.*;
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.*;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * @author jiazhong
+ */
+public class QueryParser {
+
+    final static Logger logger = Logger.getLogger(QueryParser.class);
+    final static Charset ENCODING = StandardCharsets.UTF_8;
+    final static String QUERY_LOG_FILE_PATTERN = "kylin_query.log.(\\d{4}-\\d{2}-\\d{2})$";
+    final static String QUERY_LOG_PARSE_RESULT_FILENAME = "kylin_query_log.csv";
+    static String QUERY_PARSE_RESULT_PATH = null;
+    static  String DEPLOY_ENV;
+
+    final static String[] KYLIN_QUERY_CSV_HEADER = {"REQ_TIME","REQ_DATE", "SQL", "USER", "IS_SUCCESS", "LATENCY", "PROJECT", "REALIZATION NAMES", "CUBOID IDS", "TOTAL SCAN COUNT", "RESULT ROW COUNT", "ACCEPT PARTIAL", "IS PARTIAL RESULT", "HIT CACHE", "MESSAGE","DEPLOY_ENV"};
+
+    private ConfigUtils monitorConfig;
+
+    public QueryParser() {
+        monitorConfig = ConfigUtils.getInstance();
+        try {
+            monitorConfig.loadMonitorParam();
+            DEPLOY_ENV = monitorConfig.getDeployEnv();
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+    }
+
+    /*
+     * will parse kylin query log files ,and append result to csv on hdfs
+     * files read will be marked,will not read again
+     */
+
+    public void start() throws IOException, ParseException {
+        QueryParser.QUERY_PARSE_RESULT_PATH = ConfigUtils.getInstance().getQueryLogParseResultDir() + QUERY_LOG_PARSE_RESULT_FILENAME;
+        this.parseQueryInit();
+
+        //get query file has been read
+        String[] hasReadFiles = MonitorMetaManager.getReadQueryLogFileList();
+
+        //get all log files
+        List<File> files = this.getQueryLogFiles();
+
+        for (File file : files) {
+            if (!Arrays.asList(hasReadFiles).contains(file.getName())) {
+                this.parseQueryLog(file.getPath(), QueryParser.QUERY_PARSE_RESULT_PATH);
+                MonitorMetaManager.markQueryFileAsRead(file.getName());
+            }
+        }
+    }
+
+    public void parseQueryInit() throws IOException {
+        logger.info("parse query initializing...");
+        FileSystem fs = null;
+        try {
+            Configuration conf = new Configuration();
+            fs = FileSystem.get(conf);
+            org.apache.hadoop.fs.Path path = new org.apache.hadoop.fs.Path(QueryParser.QUERY_PARSE_RESULT_PATH);
+            if (!fs.exists(path)) {
+                fs.create(path);
+                fs.close(); //need to close before get FileSystem again
+                this.writeResultToHdfs(QueryParser.QUERY_PARSE_RESULT_PATH, QueryParser.KYLIN_QUERY_CSV_HEADER);
+            }
+        } catch (IOException e) {
+            fs.close();
+            logger.info("Failed to init:", e);
+        }
+    }
+
+    //parse query log and convert to csv file to hdfs
+    public void parseQueryLog(String filePath, String dPath) throws ParseException, IOException {
+
+        logger.info("Start parsing file " + filePath + " !");
+
+//        writer config init
+        FileSystem fs = this.getHdfsFileSystem();
+        org.apache.hadoop.fs.Path resultStorePath = new org.apache.hadoop.fs.Path(dPath);
+        OutputStreamWriter writer = new OutputStreamWriter(fs.append(resultStorePath));
+        CSVWriter cwriter = new CSVWriter(writer,'|',CSVWriter.NO_QUOTE_CHARACTER);
+
+        SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS");
+        Pattern p_query_start = Pattern.compile("^\\[.*\\]:\\[(.*),.*\\]\\[.*\\]\\[.*QueryService.logQuery.*\\].*");
+        Pattern p_query_end = Pattern.compile("^Message:(.*)$");
+        Pattern p_query_body = Pattern.compile("^\\[.*\\]:\\[((\\d{4}-\\d{2}-\\d{2}).*)\\]\\[.*\\]\\[.*\\].*\n^=+\\[QUERY\\]=+\n^SQL:(.*)\n^User:(.*)\n^Success:(.*)\n^Duration:(.*)\n^Project:(.*)\n^(Realization Names|Cube Names): \\[(.*)\\]\n^Cuboid Ids: \\[(.*)\\]\n^Total scan count:(.*)\n^Result row count:(.*)\n^Accept Partial:(.*)\n(^Is Partial Result:(.*)\n)?^Hit Cache:(.*)\n^Message:(.*)", Pattern.MULTILINE);
+        Matcher m_query_start = p_query_start.matcher("");
+        Matcher m_query_end = p_query_end.matcher("");
+        Matcher m_query_body = p_query_body.matcher("");
+
+        boolean query_start = false;
+        StringBuffer query_body = new StringBuffer("");
+        Path path = Paths.get(filePath);
+        try {
+            BufferedReader reader = Files.newBufferedReader(path, ENCODING);
+            String line = null;
+            while ((line = reader.readLine()) != null) {
+                m_query_start.reset(line); //reset the input
+                m_query_end.reset(line);
+
+                // set start flag ,clear StringBuffer
+                if (m_query_start.find()) {
+                    query_start = true;
+                    query_body = new StringBuffer("");
+                }
+                if (query_start) {
+                    query_body.append(line + "\n");
+                }
+                if (m_query_end.find()) {
+                    query_start = false;
+                    m_query_body.reset(query_body);
+                    logger.info("parsing query...");
+                    logger.info(query_body);
+//                    skip group(8) and group(14)
+                    if (m_query_body.find()) {
+                        ArrayList<String> groups = new ArrayList<String>();
+                        int grp_count = m_query_body.groupCount();
+                        for (int i = 1; i <= grp_count; i++) {
+                            if (i != 8 && i != 14) {
+                                String grp_item = m_query_body.group(i);
+                                grp_item = grp_item ==null?"":grp_item.trim();
+                                groups.add(grp_item);
+                            }
+                        }
+
+                        long start_time = format.parse(groups.get(0)).getTime() - (int) (Double.parseDouble(groups.get(5)) * 1000);
+                        groups.set(0, format.format(new Date(start_time)));
+                        groups.add(DEPLOY_ENV);
+                        String[] recordArray = groups.toArray(new String[groups.size()]);
+//                        write to hdfs
+                        cwriter.writeNext(recordArray);
+
+                    }
+
+                }
+
+            }
+        } catch (IOException ex) {
+            logger.info("Failed to write to hdfs:", ex);
+        } finally {
+            writer.close();
+            cwriter.close();
+            fs.close();
+        }
+
+        logger.info("Finish parsing file " + filePath + " !");
+
+    }
+
+    /*
+     * write parse result to hdfs
+     */
+    public void writeResultToHdfs(String dPath, String[] record) throws IOException {
+        OutputStreamWriter writer = null;
+        CSVWriter cwriter = null;
+        FileSystem fs = null;
+        try {
+            fs = this.getHdfsFileSystem();
+            org.apache.hadoop.fs.Path resultStorePath = new org.apache.hadoop.fs.Path(dPath);
+            writer = new OutputStreamWriter(fs.append(resultStorePath));
+            cwriter = new CSVWriter(writer,'|',CSVWriter.NO_QUOTE_CHARACTER);
+
+            cwriter.writeNext(record);
+
+        } catch (IOException e) {
+            logger.info("Exception", e);
+        } finally {
+            writer.close();
+            cwriter.close();
+            fs.close();
+        }
+    }
+
+
+    /*
+     * get all query log files
+     */
+    public List<File> getQueryLogFiles() {
+        List<File> logFiles = new ArrayList<File>();
+
+        List<String> query_log_dir_list = monitorConfig.getLogBaseDir();
+        FileFilter filter = new RegexFileFilter(QUERY_LOG_FILE_PATTERN);
+
+        for (String path : query_log_dir_list) {
+            logger.info("fetching query log file from path:" + path);
+            File query_log_dir = new File(path);
+            File[] query_log_files = query_log_dir.listFiles(filter);
+            if (query_log_files == null) {
+                logger.warn("no query log file found under path" + path);
+                continue;
+            }
+
+            Collections.addAll(logFiles, query_log_files);
+        }
+        return logFiles;
+    }
+
+
+    /*
+     * get hdfs fileSystem
+     */
+    public FileSystem getHdfsFileSystem() throws IOException {
+        Configuration conf = new Configuration();
+        conf.set("dfs.client.block.write.replace-datanode-on-failure.policy", "NEVER");
+        FileSystem fs = null;
+        try {
+            fs = FileSystem.get(conf);
+        } catch (IOException e) {
+            fs.close();
+            logger.info("Failed to get hdfs FileSystem", e);
+        }
+        return fs;
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a935c7ba/monitor/src/main/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/monitor/src/main/resources/log4j.properties b/monitor/src/main/resources/log4j.properties
new file mode 100644
index 0000000..6dec581
--- /dev/null
+++ b/monitor/src/main/resources/log4j.properties
@@ -0,0 +1,31 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+log4j.rootLogger=INFO,stdout
+
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=L4J [%d{yyyy-MM-dd HH:mm:ss,SSS}][%p][%c] - %m%n
+
+log4j.appender.monitor=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.monitor.layout=org.apache.log4j.PatternLayout
+log4j.appender.monitor.File=${CATALINA_HOME}logs/kylin_monitor.log
+log4j.appender.monitor.layout.ConversionPattern=[%t]:[%d{yyyy-MM-dd HH:mm:ss,SSS}][%p][%l] - %m%n
+log4j.appender.monitor.Append=true
+
+#log4j.logger.org.apache.hadoop=ERROR
+log4j.logger.org.apache.kylin=DEBUG,monitor

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a935c7ba/monitor/src/test/java/org/apache/kylin/monitor/ParseLogTest.java
----------------------------------------------------------------------
diff --git a/monitor/src/test/java/org/apache/kylin/monitor/ParseLogTest.java b/monitor/src/test/java/org/apache/kylin/monitor/ParseLogTest.java
new file mode 100644
index 0000000..7b4e7b7
--- /dev/null
+++ b/monitor/src/test/java/org/apache/kylin/monitor/ParseLogTest.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+
+package org.apache.kylin.monitor;
+
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.ParseException;
+
+/**
+ * Created by jiazhong on 2015/5/27.
+ */
+public class ParseLogTest {
+
+    private QueryParser queryParser;
+    private ApiRequestParser apiReqParser;
+
+    @BeforeClass
+    public static void beforeClass() throws Exception {
+        //set catalina.home temp
+        System.setProperty(ConfigUtils.CATALINA_HOME, "../server/");
+
+        //test_case_data/sandbox/ contains HDP 2.2 site xmls which is dev sandbox
+        ConfigUtils.addClasspath(new File("../examples/test_case_data/sandbox").getAbsolutePath());
+
+        //get log base dir
+        System.setProperty(ConfigUtils.KYLIN_LOG_CONF_HOME, "../examples/test_case_data/performance_data");
+
+        //get kylin.properties
+        System.setProperty(ConfigUtils.KYLIN_CONF, "../examples/test_case_data/sandbox");
+
+    }
+
+
+    @Before
+    public void before(){
+        queryParser = new QueryParser();
+        apiReqParser = new ApiRequestParser();
+    }
+
+    @Test
+    public void test() throws Exception {
+        testQueryParseing();
+        testApiReqParsing();
+    }
+
+    private void testQueryParseing() throws IOException, ParseException {
+        queryParser.getQueryLogFiles();
+    }
+
+    private void testApiReqParsing(){
+        apiReqParser.getRequestLogFiles();
+    }
+
+
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a935c7ba/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 5748328..cdd8191 100644
--- a/pom.xml
+++ b/pom.xml
@@ -557,6 +557,7 @@
         <module>query</module>
         <module>server</module>
         <module>jdbc</module>
+        <module>monitor</module>
         <module>invertedindex</module>
     </modules>
 

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a935c7ba/script/prepare.sh
----------------------------------------------------------------------
diff --git a/script/prepare.sh b/script/prepare.sh
index a8b1c7d..f0702f8 100755
--- a/script/prepare.sh
+++ b/script/prepare.sh
@@ -33,11 +33,13 @@ cp server/target/kylin-server-${version}.war tomcat/webapps/kylin.war
 cp job/target/kylin-job-${version}-job.jar lib/kylin-job-${version}.jar
 cp storage/target/kylin-storage-${version}-coprocessor.jar lib/kylin-coprocessor-${version}.jar
 cp jdbc/target/kylin-jdbc-${version}.jar lib/kylin-jdbc-${version}.jar
+cp monitor/target/kylin-monitor-${version}.jar lib/kylin-monitor-${version}.jar
 # Copied file becomes 000 for some env (e.g. my Cygwin)
 chmod 644 tomcat/webapps/kylin.war
 chmod 644 lib/kylin-job-${version}.jar
 chmod 644 lib/kylin-coprocessor-${version}.jar
 chmod 644 lib/kylin-jdbc-${version}.jar
+chmod 644 lib/kylin-monitor-${version}.jar
 
 echo "add js css to war"
 if [ ! -d "webapp/dist" ]

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a935c7ba/server/pom.xml
----------------------------------------------------------------------
diff --git a/server/pom.xml b/server/pom.xml
index 1578c62..8500426 100644
--- a/server/pom.xml
+++ b/server/pom.xml
@@ -444,6 +444,11 @@
             <version>${jetty.version}</version>
             <scope>test</scope>
         </dependency>
+        <dependency>
+            <groupId>com.github.isrsal</groupId>
+            <artifactId>spring-mvc-logger</artifactId>
+            <version>0.2</version>
+        </dependency>
     </dependencies>
 
     <build>

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a935c7ba/server/src/main/java/org/apache/kylin/rest/controller/PerformanceController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/PerformanceController.java b/server/src/main/java/org/apache/kylin/rest/controller/PerformanceController.java
new file mode 100644
index 0000000..735db51
--- /dev/null
+++ b/server/src/main/java/org/apache/kylin/rest/controller/PerformanceController.java
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.rest.controller;
+
+import com.codahale.metrics.annotation.Metered;
+import com.codahale.metrics.annotation.Timed;
+import net.sf.ehcache.CacheManager;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.rest.service.CubeService;
+import org.apache.kylin.rest.service.PerformService;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.*;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * Handle query requests.
+ *
+ * @author xduo
+ */
+@Controller
+@RequestMapping(value = "/performance")
+public class PerformanceController extends BasicController {
+
+    private static final Logger logger = LoggerFactory.getLogger(PerformanceController.class);
+
+
+    @Autowired
+    private PerformService performService;
+
+    @Autowired
+    private CubeService cubeService;
+
+    @Autowired
+    private CacheManager cacheManager;
+
+    @RequestMapping(value = "/eachDayPercentile", method = RequestMethod.GET)
+    @ResponseBody
+    @Timed(name = "eachDayPercentile")
+    public List<String[]> eachDayPercentile() throws IOException {
+        return performService.eachDayPercentile();
+    }
+
+    @RequestMapping(value = "/projectPercentile", method = RequestMethod.GET)
+    @ResponseBody
+    @Timed(name = "projectPercentile")
+    public List<String[]> projectPercentile() throws IOException {
+        return performService.projectPercentile();
+    }
+
+
+    @RequestMapping(value = "/last30DayPercentile", method = RequestMethod.GET)
+    @ResponseBody
+    @Timed(name = "last30DayPercentile")
+    public List<String[]> last30DayPercentile() throws IOException {
+        return performService.last30DayPercentile();
+    }
+
+    @RequestMapping(value = "/cubesStorage", method = {RequestMethod.GET})
+    @ResponseBody
+    @Metered(name = "cubesStorage")
+    public List<CubeInstance> getCubeStorage() {
+        return cubeService.listAllCubes(null, null);
+    }
+
+
+    @RequestMapping(value = "/totalQueryUser", method = {RequestMethod.GET})
+    @ResponseBody
+    @Metered(name = "totalQueryUser")
+    public List<String[]> totalQueryUser() throws IOException {
+        return performService.getTotalQueryUser();
+    }
+
+    @RequestMapping(value = "/dailyQueryCount", method = {RequestMethod.GET})
+    @ResponseBody
+    @Metered(name = "dailyQueryCount")
+    public List<String[]> dailyQueryCount() throws IOException {
+        return performService.dailyQueryCount();
+    }
+
+    @RequestMapping(value = "/avgDayQuery", method = {RequestMethod.GET})
+    @ResponseBody
+    @Metered(name = "avgDayQuery")
+    public List<String[]> avgDayQuery() throws IOException {
+        return performService.avgDayQuery();
+    }
+
+    @RequestMapping(value = "/listCubes", method = {RequestMethod.GET})
+    @ResponseBody
+    @Metered(name = "listCubes")
+    public List<CubeInstance> getCubes() {
+        return cubeService.listAllCubes(null,null);
+    }
+
+
+
+    public void setCubeService(CubeService cubeService) {
+        this.cubeService = cubeService;
+    }
+
+    public void setPerformService(PerformService performService) {
+        this.performService = performService;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a935c7ba/server/src/main/java/org/apache/kylin/rest/filter/KylinApiFilter.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/filter/KylinApiFilter.java b/server/src/main/java/org/apache/kylin/rest/filter/KylinApiFilter.java
new file mode 100644
index 0000000..035b57d
--- /dev/null
+++ b/server/src/main/java/org/apache/kylin/rest/filter/KylinApiFilter.java
@@ -0,0 +1,121 @@
+package org.apache.kylin.rest.filter;
+
+import com.github.isrsal.logging.RequestWrapper;
+import com.github.isrsal.logging.ResponseWrapper;
+import com.sun.jersey.core.util.Base64;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.security.core.Authentication;
+import org.springframework.security.core.context.SecurityContext;
+import org.springframework.web.filter.OncePerRequestFilter;
+
+import javax.servlet.FilterChain;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpSession;
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.nio.charset.Charset;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.concurrent.atomic.AtomicLong;
+
+/**
+ * Created by jiazhong on 2015/4/20.
+ */
+
+public class KylinApiFilter extends OncePerRequestFilter {
+    protected static final Logger logger = LoggerFactory.getLogger(KylinApiFilter.class);
+    private static final String REQUEST_PREFIX = "Request: ";
+    private static final String RESPONSE_PREFIX = "Response: ";
+    private AtomicLong id = new AtomicLong(1L);
+
+    public KylinApiFilter() {
+    }
+
+    protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) throws ServletException, IOException {
+        if(logger.isDebugEnabled()) {
+            long requestId = this.id.incrementAndGet();
+            request = new RequestWrapper(Long.valueOf(requestId), (HttpServletRequest)request);
+            response = new ResponseWrapper(Long.valueOf(requestId), (HttpServletResponse)response);
+        }
+
+        try {
+            filterChain.doFilter((ServletRequest)request, (ServletResponse)response);
+        } finally {
+            if(logger.isDebugEnabled()) {
+                this.logRequest((HttpServletRequest)request,(ResponseWrapper)response);
+//                this.logResponse((ResponseWrapper)response);
+            }
+
+        }
+
+    }
+
+    private void logRequest(HttpServletRequest request,ResponseWrapper response) {
+        StringBuilder msg = new StringBuilder();
+        msg.append("REQUEST: ");
+        HttpSession session = request.getSession(true);
+        SecurityContext context = (SecurityContext)session.getAttribute("SPRING_SECURITY_CONTEXT");
+
+        String requester="";
+        if(context!=null){
+            Authentication authentication=  context.getAuthentication();
+            if(authentication!=null){
+                requester = authentication.getName();
+            }
+
+        }else {
+            final String authorization = request.getHeader("Authorization");
+            if (authorization != null && authorization.startsWith("Basic")) {
+                // Authorization: Basic base64credentials
+                String base64Credentials = authorization.substring("Basic".length()).trim();
+                String credentials = new String(Base64.decode(base64Credentials), Charset.forName("UTF-8"));
+                // credentials = username:password
+                String[] values = credentials.split(":", 2);
+                requester = values[0];
+            }
+        }
+        msg.append("REQUESTER="+requester);
+
+        SimpleDateFormat format = new SimpleDateFormat("z yyyy-MM-dd HH:mm:ss");
+        msg.append(";REQ_TIME=" + format.format(new Date()));
+        msg.append(";URI=").append(request.getRequestURI());
+        msg.append(";METHOD=").append(request.getMethod());
+        msg.append(";QUERY_STRING=").append(request.getQueryString());
+        if(request instanceof RequestWrapper && !this.isMultipart(request)) {
+            RequestWrapper requestWrapper = (RequestWrapper)request;
+
+            try {
+                String e = requestWrapper.getCharacterEncoding() != null?requestWrapper.getCharacterEncoding():"UTF-8";
+                msg.append(";PAYLOAD=").append(new String(requestWrapper.toByteArray(), e));
+            } catch (UnsupportedEncodingException var6) {
+                logger.warn("Failed to parse request payload", var6);
+            }
+        }
+        msg.append(";RESP_STATUS="+response.getStatus()).append(";");
+
+        logger.debug(msg.toString());
+    }
+
+    private boolean isMultipart(HttpServletRequest request) {
+        return request.getContentType() != null && request.getContentType().startsWith("multipart/form-data");
+    }
+
+    private void logResponse(ResponseWrapper response) {
+        StringBuilder msg = new StringBuilder();
+        msg.append("RESPONSE: ");
+        msg.append("REQUEST_ID=").append(response.getId());
+
+        try {
+            msg.append("; payload=").append(new String(response.toByteArray(), response.getCharacterEncoding()));
+        } catch (UnsupportedEncodingException var4) {
+            logger.warn("Failed to parse response payload", var4);
+        }
+
+        logger.debug(msg.toString());
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a935c7ba/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
index de97a7b..71e9f19 100644
--- a/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -19,6 +19,9 @@
 package org.apache.kylin.rest.service;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.HBaseRegionSizeCalculator;
@@ -556,4 +559,5 @@ public class CubeService extends BasicService {
     }
 
 
+
 }