You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by cw...@apache.org on 2011/12/15 08:03:01 UTC

svn commit: r1214636 - in /hive/trunk: jdbc/src/test/org/apache/hadoop/hive/jdbc/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/parse/

Author: cws
Date: Thu Dec 15 07:03:00 2011
New Revision: 1214636

URL: http://svn.apache.org/viewvc?rev=1214636&view=rev
Log:
HIVE-727. Hive Server getSchema() returns wrong schema for 'Explain' queries (Prasad Mujumdar via cws)

Modified:
    hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java

Modified: hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java?rev=1214636&r1=1214635&r2=1214636&view=diff
==============================================================================
--- hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (original)
+++ hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java Thu Dec 15 07:03:00 2011
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.jdbc;
 
+import static org.apache.hadoop.hive.ql.exec.ExplainTask.EXPL_COLUMN_NAME;
 import static org.apache.hadoop.hive.ql.processors.SetProcessor.SET_COLUMN_NAME;
 
 import java.sql.Connection;
@@ -198,6 +199,24 @@ public class TestJdbcDriver extends Test
         expectedException);
   }
 
+  /**
+   * verify 'explain ...' resultset
+   * @throws SQLException
+   */
+  public void testExplainStmt() throws SQLException {
+    Statement stmt = con.createStatement();
+
+    ResultSet res = stmt.executeQuery(
+        "explain select c1, c2, c3, c4, c5 as a, c6, c7, c8, c9, c10, c11, c12, " +
+        "c1*2, sentences(null, null, null) as b from " + dataTypeTableName + " limit 1");
+
+    ResultSetMetaData md = res.getMetaData();
+    assertEquals(md.getColumnCount(), 1); // only one result column
+    assertEquals(md.getColumnLabel(1), EXPL_COLUMN_NAME); // verify the column name
+    //verify that there is data in the resultset
+    assertTrue("Nothing returned explain", res.next());
+  }
+
   public void testPrepareStatement() {
 
     String sql = "from (select count(1) from "
@@ -1016,22 +1035,22 @@ public class TestJdbcDriver extends Test
    * validate schema generated by "set" command
    * @throws SQLException
    */
-public void testSetCommand() throws SQLException {
-  // execute set command
-  String sql = "set -v";
-  Statement stmt = con.createStatement();
-  ResultSet res = stmt.executeQuery(sql);
-
-  // Validate resultset columns
-  ResultSetMetaData md = res.getMetaData() ;
-  assertEquals(1, md.getColumnCount());
-  assertEquals(SET_COLUMN_NAME, md.getColumnLabel(1));
+  public void testSetCommand() throws SQLException {
+    // execute set command
+    String sql = "set -v";
+    Statement stmt = con.createStatement();
+    ResultSet res = stmt.executeQuery(sql);
+
+    // Validate resultset columns
+    ResultSetMetaData md = res.getMetaData() ;
+    assertEquals(1, md.getColumnCount());
+    assertEquals(SET_COLUMN_NAME, md.getColumnLabel(1));
 
-  //check if there is data in the resultset
-  assertTrue("Nothing returned by set -v", res.next());
+    //check if there is data in the resultset
+    assertTrue("Nothing returned by set -v", res.next());
 
-  res.close();
-  stmt.close();
+    res.close();
+    stmt.close();
   }
 
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java?rev=1214636&r1=1214635&r2=1214636&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java Thu Dec 15 07:03:00 2011
@@ -18,21 +18,25 @@
 
 package org.apache.hadoop.hive.ql.exec;
 
+import static org.apache.hadoop.hive.serde.Constants.STRING_TYPE_NAME;
+
 import java.io.OutputStream;
 import java.io.PrintStream;
 import java.io.Serializable;
 import java.lang.annotation.Annotation;
 import java.lang.reflect.Method;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Comparator;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 import java.util.Set;
 import java.util.TreeMap;
-import java.util.Map.Entry;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.plan.Explain;
@@ -50,7 +54,7 @@ import java.lang.reflect.InvocationTarge
  **/
 public class ExplainTask extends Task<ExplainWork> implements Serializable {
   private static final long serialVersionUID = 1L;
-
+  public static final String EXPL_COLUMN_NAME = "Explain";
   public ExplainTask() {
     super();
   }
@@ -647,4 +651,14 @@ public class ExplainTask extends Task<Ex
     throw new RuntimeException("Unexpected call");
   }
 
+  public List<FieldSchema> getResultSchema() {
+    FieldSchema tmpFieldSchema = new FieldSchema();
+    List<FieldSchema> colList = new ArrayList<FieldSchema>();
+
+    tmpFieldSchema.setName(EXPL_COLUMN_NAME);
+    tmpFieldSchema.setType(STRING_TYPE_NAME);
+
+    colList.add(tmpFieldSchema);
+    return colList;
+  }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java?rev=1214636&r1=1214635&r2=1214636&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java Thu Dec 15 07:03:00 2011
@@ -29,6 +29,7 @@ import java.util.List;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
@@ -507,4 +508,8 @@ public abstract class Task<T extends Ser
   public String getJobID() {
     return jobID;
   }
+
+  public List<FieldSchema> getResultSchema() {
+    return null;
+  }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java?rev=1214636&r1=1214635&r2=1214636&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java Thu Dec 15 07:03:00 2011
@@ -24,6 +24,7 @@ import java.util.List;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.plan.ExplainWork;
@@ -33,6 +34,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
  *
  */
 public class ExplainSemanticAnalyzer extends BaseSemanticAnalyzer {
+  List<FieldSchema> fieldList;
 
   public ExplainSemanticAnalyzer(HiveConf conf) throws SemanticException {
     super(conf);
@@ -47,7 +49,7 @@ public class ExplainSemanticAnalyzer ext
         .getChild(0));
     sem.analyze((ASTNode) ast.getChild(0), ctx);
     sem.validate();
-    
+
     boolean extended = false;
     boolean formatted = false;
     if (ast.getChildCount() == 2) {
@@ -68,12 +70,20 @@ public class ExplainSemanticAnalyzer ext
       tasks.add(fetchTask);
     }
 
-    rootTasks.add(
-      TaskFactory.get(new ExplainWork(ctx.getResFile().toString(),
-        tasks, 
-        ((ASTNode) ast.getChild(0)).toStringTree(), 
+    Task<? extends Serializable> explTask =
+        TaskFactory.get(new ExplainWork(ctx.getResFile().toString(),
+        tasks,
+        ((ASTNode) ast.getChild(0)).toStringTree(),
         extended,
-        formatted), 
-      conf));
+        formatted),
+      conf);
+
+    fieldList = explTask.getResultSchema();
+    rootTasks.add(explTask);
+  }
+
+  @Override
+  public List<FieldSchema> getResultSchema() {
+    return fieldList;
   }
 }