You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@eagle.apache.org by ha...@apache.org on 2015/12/03 04:04:57 UTC

incubator-eagle git commit: [EAGLE-58] Enhance hive query log parsing

Repository: incubator-eagle
Updated Branches:
  refs/heads/master 0637cad6c -> de30b151e


[EAGLE-58] Enhance hive query log parsing

JIRA: https://issues.apache.org/jira/browse/EAGLE-58
PR: https://github.com/apache/incubator-eagle/pull/12

Author: Zhao, Qingwen <qi...@ebay.com>
Reviewer: Chen, Hao <ha...@apache.org>

Closes #12.


Project: http://git-wip-us.apache.org/repos/asf/incubator-eagle/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-eagle/commit/de30b151
Tree: http://git-wip-us.apache.org/repos/asf/incubator-eagle/tree/de30b151
Diff: http://git-wip-us.apache.org/repos/asf/incubator-eagle/diff/de30b151

Branch: refs/heads/master
Commit: de30b151e0780c1242099c8c2aa24b2cccf09145
Parents: 0637cad
Author: Hao Chen <hc...@ebay.com>
Authored: Thu Dec 3 11:02:45 2015 +0800
Committer: Hao Chen <hc...@ebay.com>
Committed: Thu Dec 3 11:02:45 2015 +0800

----------------------------------------------------------------------
 .../hive/resolver/HiveCommandResolver.java      |   2 +-
 .../HiveJobRunningMonitoringMain.java           |   2 -
 .../jobrunning/HiveQueryParserExecutor.java     |  18 +-
 .../JobConfigurationAdaptorExecutor.java        |   7 +-
 .../apache/eagle/security/hive/ql/Parser.java   | 183 ++++++++++---------
 .../eagle/security/hive/ql/TestParser.java      | 156 +++++++++++++---
 6 files changed, 254 insertions(+), 114 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/de30b151/eagle-security/eagle-security-hive-web/src/main/java/org/apache/eagle/service/security/hive/resolver/HiveCommandResolver.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hive-web/src/main/java/org/apache/eagle/service/security/hive/resolver/HiveCommandResolver.java b/eagle-security/eagle-security-hive-web/src/main/java/org/apache/eagle/service/security/hive/resolver/HiveCommandResolver.java
index ba28a0a..f1b2fc4 100644
--- a/eagle-security/eagle-security-hive-web/src/main/java/org/apache/eagle/service/security/hive/resolver/HiveCommandResolver.java
+++ b/eagle-security/eagle-security-hive-web/src/main/java/org/apache/eagle/service/security/hive/resolver/HiveCommandResolver.java
@@ -33,7 +33,7 @@ import java.util.regex.Pattern;
 public class HiveCommandResolver implements AttributeResolvable<GenericAttributeResolveRequest,String> {
     private final static Logger LOG = LoggerFactory.getLogger(HiveCommandResolver.class);
 
-    private final static String [] cmdStrs = {"SELECT", "UPDATE", "DELETE FROM"};
+    private final static String [] cmdStrs = {"SELECT", "UPDATE", "DELETE", "ALTER", "CREATE", "DROP"};
 
     private final static String HIVE_ATTRIBUTE_RESOLVE_FORMAT_HINT = String.format("hive command must be in {%s}", StringUtils.join(cmdStrs, ","));
 

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/de30b151/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveJobRunningMonitoringMain.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveJobRunningMonitoringMain.java b/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveJobRunningMonitoringMain.java
index c3e192a..09e7475 100644
--- a/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveJobRunningMonitoringMain.java
+++ b/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveJobRunningMonitoringMain.java
@@ -34,8 +34,6 @@ public class HiveJobRunningMonitoringMain {
 
 	public static void main(String[] args) throws Exception{
         Config config = new ConfigOptionParser().load(args);
-        //System.setProperty("config.trace", "loads");
-        //Config config = ConfigFactory.load();
 
         LOG.info("Config class: " + config.getClass().getCanonicalName());
 

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/de30b151/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveQueryParserExecutor.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveQueryParserExecutor.java b/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveQueryParserExecutor.java
index 42e65bd..757f51d 100644
--- a/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveQueryParserExecutor.java
+++ b/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveQueryParserExecutor.java
@@ -61,7 +61,7 @@ public class HiveQueryParserExecutor extends JavaStormStreamExecutor2<String, Ma
         String user = (String)input.get(0);
         @SuppressWarnings("unchecked")
         Map<String, Object> hiveQueryLog = (Map<String, Object>)input.get(1);
-        LOG.info("Receive hive query log: " + hiveQueryLog);
+        //if(LOG.isDebugEnabled()) LOG.debug("Receive hive query log: " + hiveQueryLog);
 
         String query = null;
         String db = null;
@@ -95,15 +95,22 @@ public class HiveQueryParserExecutor extends JavaStormStreamExecutor2<String, Ma
             }
         }
 
-        HiveQLParserContent parserContent;
+        HiveQLParserContent parserContent = null;
         Parser queryParser = new Parser();
         try {
             parserContent = queryParser.run(query);
         } catch (Exception ex) {
             LOG.error("Failed running hive query parser.", ex);
-            throw new IllegalStateException(ex);
+            //throw new IllegalStateException(ex);
+        }
+        if(parserContent == null) {
+            LOG.warn("Event ignored as it can't be correctly parsed, the query log is " + query);
+            return;
+        }
+        if(parserContent.getTableColumnMap().size() == 0) {
+            LOG.warn("Unsupported command for parsing " + query);
+            return;
         }
-
         /**
          * Generate "resource" field: /db/table/column
          * "resource" -> </db/table/column1,/db/table/column2,...>
@@ -111,8 +118,7 @@ public class HiveQueryParserExecutor extends JavaStormStreamExecutor2<String, Ma
         StringBuilder resources = new StringBuilder();
         String prefix = ",";
         String connector = "/";
-        for (Entry<String, Set<String>> entry
-                : parserContent.getTableColumnMap().entrySet()) {
+        for (Entry<String, Set<String>> entry : parserContent.getTableColumnMap().entrySet()) {
             String table = entry.getKey();
             Set<String> colSet = entry.getValue();
             /**

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/de30b151/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/JobConfigurationAdaptorExecutor.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/JobConfigurationAdaptorExecutor.java b/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/JobConfigurationAdaptorExecutor.java
index cceabc2..3bf4f29 100644
--- a/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/JobConfigurationAdaptorExecutor.java
+++ b/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/JobConfigurationAdaptorExecutor.java
@@ -61,7 +61,12 @@ public class JobConfigurationAdaptorExecutor extends JavaStormStreamExecutor2<St
         if (type.equals(ResourceType.JOB_CONFIGURATION)) {
             Map<String, String> configs = (Map<String, String>)input.get(3);
             if (filter.acceptJobConf(configs)) {
-                LOG.info("Got a hive job, jobID: " + jobId + ", query: " + configs.get(JobConstants.HIVE_QUERY_STRING));
+                if(LOG.isDebugEnabled()) {
+                    LOG.debug("Got a hive job, jobID: " + jobId + ", query: " + configs.get(JobConstants.HIVE_QUERY_STRING));
+                } else {
+                    LOG.info("Got a hive job, jobID: " + jobId);
+                }
+
                 Map<String, Object> map = convertMap(configs);
                 outputCollector.collect(new Tuple2(user, map));
             }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/de30b151/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/ql/Parser.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/ql/Parser.java b/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/ql/Parser.java
index 19543c5..5ab0f4b 100644
--- a/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/ql/Parser.java
+++ b/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/ql/Parser.java
@@ -76,7 +76,8 @@ public class Parser {
   private void parseQL(ASTNode ast) {
     switch (ast.getType()) {
     case HiveParser.TOK_QUERY:
-      visitSubtree(ast);
+      parseQueryClause(ast);
+      addTablesColumnsToMap(tableSet, columnSet);
       break;
 
     case HiveParser.TOK_UPDATE_TABLE:
@@ -85,14 +86,29 @@ public class Parser {
       break;
 
     case HiveParser.TOK_DELETE_FROM:
-      setOperation("DELETE FROM");
+      setOperation("DELETE");
+      visitSubtree(ast);
+      break;
+
+    case HiveParser.TOK_CREATETABLE:
+      setOperation("CREATE");
+      visitSubtree(ast);
+      break;
+
+    case HiveParser.TOK_DROPTABLE:
+      setOperation("DROP");
+      visitSubtree(ast);
+      break;
+
+    case HiveParser.TOK_ALTERTABLE:
+      setOperation("ALTER");
       visitSubtree(ast);
       break;
 
     default:
-      LOG.error("Unsupporting query operation " + ast.getType());
+      LOG.error("Unsupported query operation " + ast.getText());
       throw new IllegalStateException("Query operation is not supported "
-          + ast.getType());
+          + ast.getText());
     }
   }
 
@@ -102,51 +118,46 @@ public class Parser {
       for (Node n : ast.getChildren()) {
         ASTNode asn = (ASTNode)n;
         switch (asn.getToken().getType()) {
-        case HiveParser.TOK_TABNAME:
-          tableSet.add(ast.getChild(0).getChild(0).getText());
-          break;
-        case HiveParser.TOK_SET_COLUMNS_CLAUSE:
-          for (int i = 0; i < asn.getChildCount(); i++) {
-            addToColumnSet((ASTNode)asn.getChild(i).getChild(0));
-          }
-        case HiveParser.TOK_FROM:
-          parseFromClause((ASTNode)asn.getChild(0));
-          break;
-        case HiveParser.TOK_INSERT:
-          for (int i = 0; i < asn.getChildCount(); i++) {
-            parseInsertClause((ASTNode)asn.getChild(i));                           
-          }
-          break;
-        case HiveParser.TOK_UNIONTYPE: 
-          int childcount = asn.getChildCount();
-          for (int i = 0; i < childcount; i++) {    
-            parseQL((ASTNode)asn.getChild(i));
-          }
-          break;
+          case HiveParser.TOK_TABNAME:
+            //tableSet.add(ast.getChild(0).getChild(0).getText());
+            parserContent.getTableColumnMap().put(ast.getChild(0).getChild(0).getText(), new HashSet<>(columnSet));
+            break;
+          case HiveParser.TOK_SET_COLUMNS_CLAUSE:
+            for (int i = 0; i < asn.getChildCount(); i++) {
+              addToColumnSet((ASTNode) asn.getChild(i).getChild(0));
+            }
+            break;
+          case HiveParser.TOK_QUERY:
+            parseQueryClause(asn);
+            break;
+          case HiveParser.TOK_UNIONTYPE:
+          case HiveParser.TOK_UNIONALL:
+          case HiveParser.TOK_UNIONDISTINCT:
+            visitSubtree(asn);
+            break;
         }
       }
-
       // Add tableSet and columnSet to tableColumnMap
       addTablesColumnsToMap(tableSet, columnSet);
     }
   }
-  
-  private void parseSubQuery(ASTNode subQuery) {
-
-    int cc = 0;
-    int cp = 0;
-
-    switch (subQuery.getToken().getType()) {
-    case HiveParser.TOK_QUERY:
-      visitSubtree(subQuery);
-      break;
-    case HiveParser.TOK_UNIONTYPE: 
-      cc = subQuery.getChildCount();
 
-      for ( cp = 0; cp < cc; ++cp) {    
-        parseSubQuery((ASTNode)subQuery.getChild(cp));
+  private void parseQueryClause(ASTNode ast) {
+    int len = ast.getChildCount();
+    if (len > 0) {
+      for (Node n : ast.getChildren()) {
+        ASTNode asn = (ASTNode) n;
+        switch (asn.getToken().getType()) {
+          case HiveParser.TOK_FROM:
+            parseFromClause((ASTNode) asn.getChild(0));
+            break;
+          case HiveParser.TOK_INSERT:
+            for (int i = 0; i < asn.getChildCount(); i++) {
+              parseInsertClause((ASTNode) asn.getChild(i));
+            }
+            break;
+        }
       }
-      break;
     }
   }
 
@@ -168,7 +179,6 @@ public class Parser {
       setOperation("SELECT");
       parseSelectClause(ast);
       break;
-
     }
   }
 
@@ -188,7 +198,6 @@ public class Parser {
         String child_1 = qf.getChild(1).toString();
         tableAliasMap.put(child_1, child_0);
       }
-
       break;
 
     case HiveParser.TOK_LEFTOUTERJOIN:
@@ -207,31 +216,40 @@ public class Parser {
         ASTNode atm = (ASTNode)qf.getChild(cp);
         parseFromClause(atm);
       }
-      break;  
+      break;
+
     case HiveParser.TOK_SUBQUERY:
+      visitSubtree(qf);
+      break;
 
-      parseSubQuery((ASTNode)qf.getChild(0));
-      break;   
     case HiveParser.TOK_LATERAL_VIEW:
+    case HiveParser.TOK_LATERAL_VIEW_OUTER:
       cc = qf.getChildCount();
       for ( cp = 0; cp < cc; ++cp) {    
         parseFromClause((ASTNode)qf.getChild(cp));
       }
-      break;                                                          
+      break;
     }
   }
   
   private void parseSelectClause(ASTNode ast) {
     for (int i = 0; i < ast.getChildCount(); i++) {
-      ASTNode astmp = (ASTNode)ast.getChild(i);
-      switch (astmp.getChild(0).getType()) {
+      ASTNode selectXpr = (ASTNode)ast.getChild(i);
+      for(int j = 0; j < selectXpr.getChildCount(); j++) {
+        parseSelectExpr((ASTNode)selectXpr.getChild(j));
+      }
+    }
+  }
+
+  private void parseSelectExpr(ASTNode asn) {
+    switch (asn.getType()) {
       case HiveParser.TOK_TABLE_OR_COL:
-        addToColumnSet(astmp);
+        addToColumnSet((ASTNode)asn.getParent());
         break;
 
       case HiveParser.TOK_ALLCOLREF:
         String tableName;
-        ASTNode node = (ASTNode)astmp.getChild(0).getChild(0);
+        ASTNode node = (ASTNode)asn.getChild(0);
         if (node != null && node.getType() == HiveParser.TOK_TABNAME) {
           String strTemp = node.getChild(0).getText();
           tableName = convAliasToReal(tableAliasMap, strTemp);
@@ -249,41 +267,46 @@ public class Parser {
 
       case HiveParser.TOK_FUNCTION:
         // Traverse children to get TOK_TABLE_OR_COL
-        Set<String> tempSet = new HashSet<String>();
-        parseTokFunction(ast, tempSet);
+        Set<String> tempSet = new HashSet<>();
+        parseTokFunction(asn, tempSet);
         columnSet.addAll(tempSet);
         break;
 
       case HiveParser.TOK_FUNCTIONSTAR:
         break;
 
+      case HiveParser.DOT:
+        String tbAlias = asn.getChild(0).getChild(0).getText();
+        tableName = convAliasToReal(tableAliasMap, tbAlias);
+        String strTemp = asn.getChild(1).getText();
+        String col = convAliasToReal(columnAliasMap, strTemp);
+        addTableColumnToMap(tableName, col);
+        break;
+
       default:
-        if(astmp.getChild(0).getText().equals(".")) {
-          String tbAlias = astmp.getChild(0).getChild(0).getChild(0).getText();
-          tableName = convAliasToReal(tableAliasMap, tbAlias);
-          String strTemp = astmp.getChild(0).getChild(1).getText();
-          String col = convAliasToReal(columnAliasMap, strTemp);
-          addTableColumnToMap(tableName, col);
-        }
-        else {
-          tempSet = new HashSet<String>();
-          parseTokFunction(astmp, tempSet);
-          columnSet.addAll(tempSet);
+        if(asn.getChildCount() > 1) {
+          for(int i = 0; i < asn.getChildCount(); i++) {
+            parseSelectExpr((ASTNode)asn.getChild(i));
+          }
         }
-      }
     }
+
   }
 
   private void parseTokFunction(ASTNode ast, Set<String> set) {
-    if (ast.getType() == HiveParser.TOK_TABLE_OR_COL) {
-      String colRealName = convAliasToReal(columnAliasMap, ast.getChild(0).getText());
-      set.add(colRealName);
-    }
-    for (int i = 0; i < ast.getChildCount(); i++) {
-      ASTNode n = (ASTNode)ast.getChild(i);
-      if (n != null) {
-          parseTokFunction(n, set);
-      }
+    switch(ast.getType()) {
+      case HiveParser.TOK_TABLE_OR_COL:
+        String colRealName = convAliasToReal(columnAliasMap, ast.getChild(0).getText());
+        set.add(colRealName);
+        break;
+      case HiveParser.TOK_FUNCTION:
+        for (int i = 0; i < ast.getChildCount(); i++) {
+          ASTNode n = (ASTNode)ast.getChild(i);
+          if (n != null) {
+            parseTokFunction(n, set);
+          }
+        }
+        break;
     }
   }
 
@@ -313,15 +336,11 @@ public class Parser {
   private void addTablesColumnsToMap(Set<String> tbs, Set<String> cols) {
     Map<String, Set<String>> map = parserContent.getTableColumnMap();
     for (String tb : tbs) {
-      if (map != null) {
+      if (map != null && map.get(tb) != null) {
         Set<String> temp = map.get(tb);
-        if (temp != null) {
-          Set<String> value = new HashSet<String>(temp);
-          value.addAll(cols);
-          map.put(tb, value);
-        } else {
-          map.put(tb, cols);
-        }
+        Set<String> value = new HashSet<>(temp);
+        value.addAll(cols);
+        map.put(tb, value);
       } else {
         map.put(tb, cols);
       }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/de30b151/eagle-security/eagle-security-hive/src/test/java/org/apache/eagle/security/hive/ql/TestParser.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hive/src/test/java/org/apache/eagle/security/hive/ql/TestParser.java b/eagle-security/eagle-security-hive/src/test/java/org/apache/eagle/security/hive/ql/TestParser.java
index 4e920b1..f99d520 100644
--- a/eagle-security/eagle-security-hive/src/test/java/org/apache/eagle/security/hive/ql/TestParser.java
+++ b/eagle-security/eagle-security-hive/src/test/java/org/apache/eagle/security/hive/ql/TestParser.java
@@ -16,14 +16,8 @@
  */
 package org.apache.eagle.security.hive.ql;
 
-import static org.junit.Assert.*;
-
-import java.nio.charset.Charset;
-import java.nio.file.Files;
-import java.nio.file.Paths;
 import java.util.HashMap;
 import java.util.HashSet;
-import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
@@ -32,8 +26,6 @@ import org.apache.hadoop.hive.ql.parse.ParseException;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 public class TestParser {
   //private static final Logger LOG = LoggerFactory.getLogger(TestParser.class);
@@ -53,10 +45,14 @@ public class TestParser {
 
     Assert.assertTrue("Query operations are not matched.",
         content.getOperation().equals(expectedOperation));
-    Assert.assertTrue("Insert tables are not matched.",
-        content.getInsertTable().equals(expectedInsertTable));
-    //Assert.assertTrue("Table and column mapping is incorrect.",
-    //    content.getTableColumnMap().equals(expectedTableColumn));
+    if(content.getInsertTable() != null && expectedInsertTable != null) {
+        Assert.assertTrue("Insert tables are not matched.",
+                content.getInsertTable().equals(expectedInsertTable));
+    }
+    if(content.getTableColumnMap() != null && expectedTableColumn != null) {
+        Assert.assertTrue("Table and column mapping is incorrect.",
+                content.getTableColumnMap().equals(expectedTableColumn));
+    }
   }
 
   private void printTree(ASTNode root, int indent) {
@@ -83,7 +79,7 @@ public class TestParser {
         + "and date_key>=2015071400 and date_key<=2015071300";
     String expectedOperation = "SELECT";
     String expectedInsertTable = "TOK_TMP_FILE";
-    Map<String, Set<String>> expectedTableColumn = new HashMap<String, Set<String>>();
+    Map<String, Set<String>> expectedTableColumn = new HashMap<String, Set<String>>();;
     Set<String> set = new HashSet<String>();
     set.add("*");
     expectedTableColumn.put("cts_common_prod_sd_2015060600_ed_2015071300", set);
@@ -98,7 +94,7 @@ public class TestParser {
         + "where action='IM' and dt='20150615'";
     String expectedOperation = "SELECT DISTINCT";
     String expectedInsertTable = "TOK_TMP_FILE";
-    Map<String, Set<String>> expectedTableColumn = new HashMap<String, Set<String>>();
+    Map<String, Set<String>> expectedTableColumn = null;
     Set<String> set = new HashSet<String>();
     set.add("action_timestamp");
     set.add("exchange_id");
@@ -115,7 +111,7 @@ public class TestParser {
         + "FROM FlightInfo2008 WHERE Month=1";
     String expectedOperation = "SELECT";
     String expectedInsertTable = "myflightinfo";
-    Map<String, Set<String>> expectedTableColumn = new HashMap<String, Set<String>>();
+    Map<String, Set<String>> expectedTableColumn = new HashMap<String, Set<String>>();;
     Set<String> set = new HashSet<String>();
     set.add("Year");
     set.add("Month");
@@ -125,21 +121,21 @@ public class TestParser {
     _testParsingQuery(query, expectedOperation, expectedInsertTable, expectedTableColumn);
   }
 
-  //@Test
+  @Test
   public void testSelectExprStatement() throws Exception {
-    String query = "INSERT OVERWRITE TABLE top_level_viewer_dpms select scandate , pathtype , pathname , pathlevel , spacesize * 3 , diskspacequota , 0 pathsize_increase , namespacequota , filecount , dircount , username , groupname, 'XYZ' system from hdfsdu where asofdate = '20150908' and pathlevel <= 3";
-    String expectedOperation = "INSERT";
+    String query = "INSERT OVERWRITE TABLE top_level_viewer_dpms select scandate , pathtype , pathname , pathlevel , spacesize * 3 , diskspacequota , pathsize_increase , namespacequota , filecount , dircount , username , groupname, 'XYZ' system from hdfsdu where asofdate = '20150908' and pathlevel <= 3";
+    String expectedOperation = "SELECT";
     String expectedInsertTable = "top_level_viewer_dpms";
-    Map<String, Set<String>> expectedTableColumn = new HashMap<String, Set<String>>();
+    Map<String, Set<String>> expectedTableColumn = null;
     _testParsingQuery(query, expectedOperation, expectedInsertTable, expectedTableColumn);
   }
 
   @Test
   public void testAliaTableStatement() throws Exception {
-    String query = "select a.phone_number  from customer_details a, call_detail_records b where a.phone_number=b.phone_number";
+    String query = "select a.phone_number from customer_details a, call_detail_records b where a.phone_number=b.phone_number";
     String expectedOperation = "SELECT";
     String expectedInsertTable = "TOK_TMP_FILE";
-    Map<String, Set<String>> expectedTableColumn = new HashMap<String, Set<String>>();
+    Map<String, Set<String>> expectedTableColumn = null;
     _testParsingQuery(query, expectedOperation, expectedInsertTable, expectedTableColumn);
   }
 
@@ -148,8 +144,124 @@ public class TestParser {
     String query = "SELECT upper(name), salary, deductions[\"Federal Taxes\"] as fed_taxes, round(salary * (1 - deductions[\"Federal Taxes\"])) as salary_minus_fed_taxes FROM employees LIMIT 2";
     String expectedOperation = "SELECT";
     String expectedInsertTable = "TOK_TMP_FILE";
-    Map<String, Set<String>> expectedTableColumn = new HashMap<String, Set<String>>();
+    Map<String, Set<String>> expectedTableColumn = null;
     _testParsingQuery(query, expectedOperation, expectedInsertTable, expectedTableColumn);
   }
 
+    @Test
+    public void testFromStatement1() throws Exception {
+        String query = "INSERT OVERWRITE TABLE p13nquality.cust_dna_vq_cat_feed_target PARTITION ( dt='20151121')\n" +
+                "select distinct user_id, concat(categ_id,get_json_object(dep3, '$.categ_id'), level_id, get_json_object(dep3, '$.level_id'), site_id, get_json_object(dep3, '$.site_id')) from (\n" +
+                "select user_id, if(instr(dep2, \"name\")>0, get_json_object(dep2, '$.vq_rank'), dep2) dep3 from (\n" +
+                "select user_id, if(instr(dep1, \"name\")>0, concat(dep1, \"}\"), dep1) dep2\n" +
+                "from (\n" +
+                "select user_id , split(regexp_replace(regexp_replace(department, \"\\\\[|\\\\]\", \"\"), \"},\" , \"}|\"), \"\\\\|\") as dep\n" +
+                "from\n" +
+                "(select user_id, BEAvroParser(record_value)['department'] as department\n" +
+                "from p13n_user_hour_queue\n" +
+                "where dt between '20151121' and '20151122'\n" +
+                "and schema_name = 'cust_dna_vq_cat_feed') a ) b\n" +
+                "lateral view outer explode(dep) c as dep1) d ) e";
+        String expectedOperation = "SELECT";
+        String expectedInsertTable = "p13nquality";
+        Map<String, Set<String>> expectedTableColumn = null;
+        _testParsingQuery(query, expectedOperation, expectedInsertTable, expectedTableColumn);
+    }
+
+    @Test
+    public void testFromStatement2() throws Exception {
+        String query = "insert overwrite table bm2_item4_5\n" +
+                "SELECT dt,cobrand,device_type,geo_ind,byr_region,slr_id,slr_region,item_format,price_tranche,vertical,sap_category_id,site_id,user_seg,sort,page_id,page_number,item_rank,relist_flag,app_name,SUM(impr_cnt) impr_cnt\n" +
+                "FROM ( SELECT\n" +
+                "dt,\n" +
+                "cobrand,\n" +
+                "device_type,\n" +
+                "geo_ind,\n" +
+                "byr_region,\n" +
+                "slr_id,\n" +
+                "slr_region,\n" +
+                "item_format,\n" +
+                "price_tranche,\n" +
+                "vertical,\n" +
+                "sap_category_id,\n" +
+                "site_id,\n" +
+                "user_seg,\n" +
+                "sort,\n" +
+                "page_id,\n" +
+                "page_number,\n" +
+                "item_rank,\n" +
+                "relist_flag,\n" +
+                "item_rank,\n" +
+                "relist_flag,\n" +
+                "app_name,\n" +
+                "impr_cnt\n" +
+                "FROM  (SELECT * FROM bm2_item3_5 WHERE user_id > 0) a\n" +
+                "LEFT JOIN bm2_user_SEG_lkup_5 b\n" +
+                "ON a.user_id=b.user_id\n" +
+                "UNION ALL\n" +
+                "SELECT dt,cobrand,device_type,geo_ind,byr_region,slr_id,slr_region,item_format,price_tranche,vertical,sap_category_id,site_id,'NA' AS user_seg,sort,page_id,page_number,item_rank,relist_flag,app_name,impr_cnt\n" +
+                "FROM bm2_item3_5 WHERE user_id < 0\n" +
+                ")a";
+        String expectedOperation = "SELECT";
+        String expectedInsertTable = "bm2_item4_5";
+        Map<String, Set<String>> expectedTableColumn = null;
+        _testParsingQuery(query, expectedOperation, expectedInsertTable, expectedTableColumn);
+    }
+
+    @Test
+    public void testCreateTable() throws Exception {
+        String query = "CREATE TABLE page_view(viewTime INT, userid BIGINT,\n" +
+                "                page_url STRING, referrer_url STRING,\n" +
+                "                ip STRING COMMENT 'IP Address of the User')\n" +
+                "COMMENT 'This is the page view table'\n" +
+                "PARTITIONED BY(dt STRING, country STRING)\n" +
+                "STORED AS SEQUENCEFILE";
+        String expectedOperation = "CREATE";
+        _testParsingQuery(query, expectedOperation, null, null);
+    }
+
+    @Test
+    public void testCreateTable2() throws Exception {
+        String query = "CREATE TABLE mf_cguid_to_uid_step2 as\n" +
+                "        SELECT\n" +
+                "                user_id,\n" +
+                "                max(ts) as max_ts\n" +
+                "        FROM\n" +
+                "                mf_cguid_to_uid_step1\n" +
+                "        GROUP BY user_id";
+        String expectedOperation = "SELECT";
+        _testParsingQuery(query, expectedOperation, null, null);
+
+    }
+
+    @Test
+    public void testAlertTable() throws Exception {
+        String query = "ALTER TABLE pv_users DROP PARTITION (ds='2008-08-08')";
+        String expectedOperation = "ALTER";
+        _testParsingQuery(query, expectedOperation, null, null);
+    }
+
+    @Test
+    public void testDropTable() throws Exception {
+        String query = "DROP TABLE pv_users";
+        String expectedOperation = "DROP";
+        _testParsingQuery(query, expectedOperation, null, null);
+    }
+
+    @Test
+    public void testUnionAll() throws Exception {
+        String query = "INSERT OVERWRITE TABLE be_view_event_user_t1_miss PARTITION ( dt='20151125', hour='06') select a.uid ,a.site_id ,a.page_id ,a.curprice ,a.itm ,a.itmcond ,a.itmtitle ,a.l1 ,a.l2 ,a.leaf ,a.meta ,a.st ,a.dc ,a.tr ,a.eventtimestamp ,a.cln ,a.siid ,a.ciid ,a.sellerid ,a.pri from (select a1.* from (select * from soj_view_event where dt='20151125' and hour='06') a1 inner join (select uid from soj_viewitem_event_uid_sample where dt='20151125' and hour='06') b1 on a1.uid = b1.uid) a left outer join (select c.* from (select * from be_viewitem_event_user_t1 where dt='20151125' and hour='06') a2 lateral view json_tuple(a2.values, 'u', 'site_id', 'p', 'current_price', 'item_id', 'item_condition', 'item_title', 'l1_cat_id', 'l2_cat_id', 'leaf_cat_id', 'meta_cat_id','sale_type_enum', 'shipping_country_id', 'time_remain_secs', 'timestamp', 'collection_id', 'source_impression_id', 'current_impression_id', 'sellerid' ) c as uid, site_id, page_id, curprice, itm, itmcond, itmti
 tle, l1, l2, leaf, meta, st, dc, tr, eventtimestamp, cln, siid, ciid, sellerid ) b on a.uid = b.uid and a.site_id = b.site_id and a.page_id = b.page_id and coalesce(a.curprice, 'null') = coalesce(b.curprice, 'null') and coalesce(a.itm, 'null') = coalesce(b.itm, 'null') and coalesce(a.itmcond, 'null') = coalesce(b.itmcond, 'null') and coalesce(trim(reflect(\"java.net.URLDecoder\", \"decode\",regexp_replace(a.itmtitle,\"\\\\+\",\" \"),\"utf-8\")),'null') = coalesce(trim(b.itmtitle),'null') and coalesce(a.l1, 'null') = coalesce(b.l1, 'null') and coalesce(a.l2,'null') = coalesce(b.l2,'null') and coalesce(a.leaf,'null') = coalesce(b.leaf,'null') and coalesce(a.meta,'null') = coalesce(b.meta,'null') and coalesce(a.st,'null') = coalesce(b.st,'null') and coalesce(a.dc,'null') = coalesce(b.dc,'null') and coalesce(reflect(\"java.net.URLDecoder\", \"decode\",a.tr,\"utf-8\"),'null') = coalesce(b.tr,'null') and a.eventtimestamp = b.eventtimestamp and coalesce(a.cln,'null') = coalesce(b.cln,'null
 ') and coalesce(a.siid,'null') = coalesce(b.siid,'null') and coalesce(a.ciid,'null') = coalesce(b.ciid,'null') and coalesce(a.sellerid, 'null') = coalesce(b.sellerid, 'null') where b.uid is null distribute by a.uid sort by a.uid , a.eventtimestamp";
+        String expectedOperation = "SELECT";
+        String expectedInsertTable = "TOK_TMP_FILE";
+        _testParsingQuery(query, expectedOperation, expectedInsertTable, null);
+    }
+
+    @Test
+    public void testLateralView() throws Exception {
+        String query = "select game_id, user_id from test_lateral_view_shengli lateral view explode(split(userl_ids,'\\\\[\\\\[\\\\[')) snTable as user_id";
+        String expectedOperation = "SELECT";
+        String expectedInsertTable = "TOK_TMP_FILE";
+        _testParsingQuery(query, expectedOperation, expectedInsertTable, null);
+    }
+
 }