You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2009/09/22 20:58:21 UTC

svn commit: r817769 - in /hadoop/hive/trunk: ./ jdbc/src/java/org/apache/hadoop/hive/jdbc/ jdbc/src/test/org/apache/hadoop/hive/jdbc/ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/hadoop/hive/ql/parse/ service/if/ service/src/gen-javabe...

Author: namit
Date: Tue Sep 22 18:58:10 2009
New Revision: 817769

URL: http://svn.apache.org/viewvc?rev=817769&view=rev
Log:
HIVE-795. Better error messages from Hive Server (Bill Graham via namit)

Summary:

Trac Bug: #

Blame Rev:

Reviewed By: namit

Test Plan:

Revert Plan:

Database Impact:

Memcache Impact:

Other Notes:

EImportant:

- begin *PUBLIC* platform impact section -
Bugzilla: #
- end platform impact -


Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java
    hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java
    hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java
    hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveStatement.java
    hadoop/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
    hadoop/hive/trunk/service/if/hive_service.thrift
    hadoop/hive/trunk/service/src/gen-javabean/org/apache/hadoop/hive/service/HiveServerException.java
    hadoop/hive/trunk/service/src/gen-php/hive_service_types.php
    hadoop/hive/trunk/service/src/gen-py/hive_service/ttypes.py
    hadoop/hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=817769&r1=817768&r2=817769&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Tue Sep 22 18:58:10 2009
@@ -132,6 +132,8 @@
     HIVE-817. hive.hwi.war.file incorrectly tries to use environment variables to locate war file
     (Edward Capriolo via namit)
 
+    HIVE-795. Better error messages from Hive Server (Bill Graham via namit)
+
 Release 0.4.0 -  Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java?rev=817769&r1=817768&r2=817769&view=diff
==============================================================================
--- hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java (original)
+++ hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java Tue Sep 22 18:58:10 2009
@@ -43,8 +43,8 @@
 import org.apache.thrift.protocol.TBinaryProtocol;
 import org.apache.thrift.transport.TSocket;
 import org.apache.thrift.transport.TTransport;
-import org.apache.hadoop.hive.conf.HiveConf;
-import java.net.URI;
+import org.apache.thrift.transport.TTransportException;
+import org.apache.hadoop.hive.metastore.api.MetaException;
 
 public class HiveConnection implements java.sql.Connection {
   JdbcSessionState session;
@@ -56,25 +56,32 @@
 
   private static final String URI_PREFIX = "jdbc:hive://";
   /**
-   * TODO: - throw more specific exception
-   *       - parse uri (use java.net.URI?)
+   * TODO: - parse uri (use java.net.URI?)
    */
-  public HiveConnection(String uri, Properties info) throws Exception {
+  public HiveConnection(String uri, Properties info) throws SQLException {
     session = new JdbcSessionState(new HiveConf(SessionState.class));
     session.in = null;
     session.out = null;
     session.err = null;
     SessionState.start(session);
+    String originalUri = uri;
 
     if (!uri.startsWith(URI_PREFIX)) {
-      throw new Exception("Invalid URL: " + uri);
+      throw new SQLException("Invalid URL: " + uri, "08S01");
     }
+
     // remove prefix
     uri = uri.substring(URI_PREFIX.length());
 
     // If uri is not specified, use local mode.
     if (uri.isEmpty()) {
-      client = new HiveServer.HiveServerHandler();
+      try {
+        client = new HiveServer.HiveServerHandler();
+      }
+      catch (MetaException e) {
+        throw new SQLException("Error accessing Hive metastore: " +
+                                e.getMessage(), "08S01");
+      }
     } else {
       // parse uri
       // form: hostname:port/databasename
@@ -92,7 +99,13 @@
       transport = new TSocket(host, port);
       TProtocol protocol = new TBinaryProtocol(transport);
       client = new HiveClient(protocol);
-      transport.open();
+      try {
+        transport.open();
+      }
+      catch (TTransportException e) {
+        throw new SQLException("Could not establish connecton to " +
+                                originalUri + ": " + e.getMessage(), "08S01");
+      }
     }
     isClosed = false;
   }

Modified: hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java?rev=817769&r1=817768&r2=817769&view=diff
==============================================================================
--- hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java (original)
+++ hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java Tue Sep 22 18:58:10 2009
@@ -22,6 +22,7 @@
 import java.sql.ResultSet;
 import java.sql.RowIdLifetime;
 import java.sql.SQLException;
+import java.sql.DatabaseMetaData;
 import java.net.URL;
 import java.util.jar.Manifest;
 import java.util.jar.Attributes;
@@ -619,8 +620,7 @@
    */
 
   public int getSQLStateType() throws SQLException {
-    // TODO Auto-generated method stub
-    throw new SQLException("Method not supported");
+    return DatabaseMetaData.sqlStateSQL99;
   }
 
   /* (non-Javadoc)

Modified: hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java?rev=817769&r1=817768&r2=817769&view=diff
==============================================================================
--- hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java (original)
+++ hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java Tue Sep 22 18:58:10 2009
@@ -105,11 +105,7 @@
 
 
   public Connection connect(String url, Properties info) throws SQLException {
-    try {
-      return new HiveConnection(url, info);
-    } catch (Exception ex) {
-      throw new SQLException(ex.toString());
-    }
+    return new HiveConnection(url, info);
   }
 
   /**

Modified: hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveStatement.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveStatement.java?rev=817769&r1=817768&r2=817769&view=diff
==============================================================================
--- hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveStatement.java (original)
+++ hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveStatement.java Tue Sep 22 18:58:10 2009
@@ -24,6 +24,7 @@
 import java.sql.SQLWarning;
 import java.util.Vector;
 import org.apache.hadoop.hive.service.HiveInterface;
+import org.apache.hadoop.hive.service.HiveServerException;
 
 public class HiveStatement implements java.sql.Statement {
   JdbcSessionState session;
@@ -167,9 +168,13 @@
     try {
       this.resultSet = null;
       client.execute(sql);
-    } catch (Exception ex) {
-      throw new SQLException(ex.toString());
+    } 
+    catch (HiveServerException e) {
+      throw new SQLException(e.getMessage(), e.getSQLState(), e.getErrorCode());
     }
+    catch (Exception ex) {
+      throw new SQLException(ex.toString(), "08S01");
+    }                     
     this.resultSet = new HiveResultSet(client, maxRows);
     return this.resultSet;
   }

Modified: hadoop/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java?rev=817769&r1=817768&r2=817769&view=diff
==============================================================================
--- hadoop/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (original)
+++ hadoop/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java Tue Sep 22 18:58:10 2009
@@ -199,6 +199,57 @@
     assertTrue("Statement should be closed", stmt.isClosed());
   }
 
+  public void testErrorMessages() throws SQLException {
+    String invalidSyntaxSQLState = "42000";
+    int parseErrorCode = 10;
+
+    //These tests inherently cause exceptions to be written to the test output
+    //logs. This is undesirable, since you it might appear to someone looking
+    //at the test output logs as if something is failing when it isn't. Not sure
+    //how to get around that.
+    doTestErrorCase("SELECTT * FROM " + tableName,
+            "cannot recognize input 'SELECTT'",
+            invalidSyntaxSQLState, 11);
+    doTestErrorCase("SELECT * FROM some_table_that_does_not_exist",
+            "Table not found", "42S02", parseErrorCode);
+    doTestErrorCase("drop table some_table_that_does_not_exist",
+            "Table not found", "42S02", parseErrorCode);
+    doTestErrorCase("SELECT invalid_column FROM " + tableName,
+            "Invalid Table Alias or Column Reference",
+            invalidSyntaxSQLState, parseErrorCode);
+    doTestErrorCase("SELECT invalid_function(key) FROM " + tableName,
+            "Invalid Function", invalidSyntaxSQLState, parseErrorCode);
+
+    //TODO: execute errors like this currently don't return good messages (i.e.
+    //'Table already exists'). This is because the Driver class calls
+    //Task.executeTask() which swallows meaningful exceptions and returns a status
+    //code. This should be refactored.
+    doTestErrorCase("create table " + tableName + " (key int, value string)",
+            "Query returned non-zero code: 9, cause: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask",
+            "08S01", 9);
+  }
+
+  private void doTestErrorCase(String sql, String expectedMessage,
+                                           String expectedSQLState,
+                                           int expectedErrorCode) throws SQLException {
+    Statement stmt = con.createStatement();
+    boolean exceptionFound = false;
+    try {
+      stmt.executeQuery(sql);
+    }
+    catch(SQLException e) {
+      assertTrue("Adequate error messaging not found for '" + sql + "': " +
+              e.getMessage(), e.getMessage().contains(expectedMessage));
+      assertEquals("Expected SQLState not found for '" + sql + "'",
+              expectedSQLState, e.getSQLState());
+      assertEquals("Expected error code not found for '" + sql + "'",
+              expectedErrorCode, e.getErrorCode());
+      exceptionFound = true;
+    }
+
+    assertNotNull("Exception should have been thrown for query: " + sql, exceptionFound);
+  }
+
   public void testShowTables() throws SQLException {
     Statement stmt = con.createStatement();
     assertNotNull("Statement is null", stmt);
@@ -239,6 +290,7 @@
 
     assertEquals("Hive", meta.getDatabaseProductName());
     assertEquals("0", meta.getDatabaseProductVersion());
+    assertEquals(DatabaseMetaData.sqlStateSQL99, meta.getSQLStateType());
     assertNull(meta.getProcedures(null, null, null));
     assertFalse(meta.supportsCatalogsInTableDefinitions());
     assertFalse(meta.supportsSchemasInTableDefinitions());

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=817769&r1=817768&r2=817769&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Tue Sep 22 18:58:10 2009
@@ -35,6 +35,7 @@
 import org.apache.hadoop.hive.ql.parse.ParseException;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory;
+import org.apache.hadoop.hive.ql.parse.ErrorMsg;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.hive.ql.exec.ExecDriver;
@@ -69,6 +70,8 @@
   private DataInput resStream;
   private Context ctx;
   private QueryPlan plan;
+  private String errorMessage;
+  private String SQLState;
 
   public int countJobs(List<Task<? extends Serializable>> tasks) {
     return countJobs(tasks, new ArrayList<Task<? extends Serializable>>());
@@ -255,16 +258,21 @@
       plan = new QueryPlan(command, sem);
       return (0);
     } catch (SemanticException e) {
-      console.printError("FAILED: Error in semantic analysis: "
-          + e.getMessage(), "\n"
+      errorMessage = "FAILED: Error in semantic analysis: " + e.getMessage();
+      SQLState = ErrorMsg.findSQLState(e.getMessage());
+      console.printError(errorMessage, "\n"
           + org.apache.hadoop.util.StringUtils.stringifyException(e));
       return (10);
     } catch (ParseException e) {
-      console.printError("FAILED: Parse Error: " + e.getMessage(), "\n"
+      errorMessage = "FAILED: Parse Error: " + e.getMessage();
+      SQLState = ErrorMsg.findSQLState(e.getMessage());
+      console.printError(errorMessage, "\n"
           + org.apache.hadoop.util.StringUtils.stringifyException(e));
       return (11);
     } catch (Exception e) {
-      console.printError("FAILED: Unknown exception : " + e.getMessage(), "\n"
+      errorMessage = "FAILED: Unknown exception: " + e.getMessage();
+      SQLState = ErrorMsg.findSQLState(e.getMessage());
+      console.printError(errorMessage, "\n"
           + org.apache.hadoop.util.StringUtils.stringifyException(e));
       return (12);
     }
@@ -276,13 +284,50 @@
   public QueryPlan getPlan() {
     return plan;
   }
-  
+
   public int run(String command) {
+    DriverResponse response = runCommand(command);
+    return response.getResponseCode();
+  }
+
+  public DriverResponse runCommand(String command) {
+    errorMessage = null;
+    SQLState = null;
+
     int ret = compile(command);
     if (ret != 0)
-      return (ret);
+      return new DriverResponse(ret, errorMessage, SQLState);
+
+    ret = execute();
+    if (ret != 0)
+      return new DriverResponse(ret, errorMessage, SQLState);
+
+    return new DriverResponse(ret);
+  }
 
-    return execute();
+  /**
+   * Encapsulates the basic response info returned by the Driver. Typically
+   * <code>errorMessage</code> and <code>SQLState</code> will only be set if
+   * the <code>responseCode</code> is not 0.
+   */
+  public class DriverResponse {
+    private int responseCode;
+    private String errorMessage;
+    private String SQLState;
+
+    public DriverResponse(int responseCode) {
+      this(responseCode, null, null);
+    }
+
+    public DriverResponse(int responseCode, String errorMessage, String SQLState) {
+      this.responseCode = responseCode;
+      this.errorMessage = errorMessage;
+      this.SQLState = SQLState;
+    }
+
+    public int getResponseCode() { return responseCode; }
+    public String getErrorMessage() { return errorMessage; }
+    public String getSQLState() { return SQLState; }
   }
 
   private List<PreExecute> getPreExecHooks() throws Exception {
@@ -384,8 +429,11 @@
           SessionState.get().getHiveHistory().endTask(queryId, tsk);
         }
         if (exitVal != 0) {
-          console.printError("FAILED: Execution Error, return code " + exitVal
-              + " from " + tsk.getClass().getName());
+          //TODO: This error messaging is not very informative. Fix that.
+          errorMessage = "FAILED: Execution Error, return code " + exitVal
+                         + " from " + tsk.getClass().getName();
+          SQLState = "08S01";
+          console.printError(errorMessage);
           return 9;
         }
 
@@ -413,7 +461,10 @@
       if (SessionState.get() != null)
         SessionState.get().getHiveHistory().setQueryProperty(queryId,
             Keys.QUERY_RET_CODE, String.valueOf(12));
-      console.printError("FAILED: Unknown exception : " + e.getMessage(), "\n"
+      //TODO: do better with handling types of Exception here
+      errorMessage = "FAILED: Unknown exception : " + e.getMessage();
+      SQLState = "08S01";
+      console.printError(errorMessage, "\n"
           + org.apache.hadoop.util.StringUtils.stringifyException(e));
       return (12);
     } finally {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java?rev=817769&r1=817768&r2=817769&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java Tue Sep 22 18:58:10 2009
@@ -20,13 +20,25 @@
 
 import org.antlr.runtime.tree.*;
 
+import java.util.Map;
+import java.util.HashMap;
+import java.util.regex.Pattern;
+import java.util.regex.Matcher;
+
 /**
  * List of error messages thrown by the parser
  **/
 
 public enum ErrorMsg {
+  //SQLStates are taken from Section 12.5 of ISO-9075.
+  //See http://www.contrib.andrew.cmu.edu/~shadow/sql/sql1992.txt
+  //Most will just rollup to the generic syntax error state of 42000, but
+  //specific errors can override the that state.
+  //See this page for how MySQL uses SQLState codes:
+  //http://dev.mysql.com/doc/refman/5.0/en/connector-j-reference-error-sqlstates.html
+
   GENERIC_ERROR("Exception while processing"),
-  INVALID_TABLE("Table not found"),
+  INVALID_TABLE("Table not found", "42S02"),
   INVALID_COLUMN("Invalid Column Reference"),
   INVALID_TABLE_OR_COLUMN("Invalid Table Alias or Column Reference"),
   AMBIGUOUS_TABLE_OR_COLUMN("Ambiguous Table Alias or Column Reference"),
@@ -39,7 +51,7 @@
   INVALID_FUNCTION_SIGNATURE("Function Argument Type Mismatch"),
   INVALID_OPERATOR_SIGNATURE("Operator Argument Type Mismatch"),
   INVALID_ARGUMENT("Wrong Arguments"),
-  INVALID_ARGUMENT_LENGTH("Arguments Length Mismatch"),
+  INVALID_ARGUMENT_LENGTH("Arguments Length Mismatch", "21000"),
   INVALID_ARGUMENT_TYPE("Argument Type Mismatch"),
   INVALID_JOIN_CONDITION_1("Both Left and Right Aliases Encountered in Join"),
   INVALID_JOIN_CONDITION_2("Neither Left nor Right Aliases Encountered in Join"),
@@ -87,8 +99,79 @@
   NEED_PARTITION_ERROR("need to specify partition columns because the destination table is partitioned.");
 
   private String mesg;
+  private String SQLState;
+
+  private static char SPACE = ' ';
+  private static Pattern ERROR_MESSAGE_PATTERN = Pattern.compile(".*line [0-9]+:[0-9]+ (.*)");
+  private static Map<String, ErrorMsg> mesgToErrorMsgMap = new HashMap<String, ErrorMsg>();
+  private static int minMesgLength = -1;
+
+  static {
+    for (ErrorMsg errorMsg : values()) {
+      mesgToErrorMsgMap.put(errorMsg.getMsg().trim(), errorMsg);
+
+      int length = errorMsg.getMsg().trim().length();
+      if (minMesgLength == -1 || length < minMesgLength)
+        minMesgLength = length;
+    }
+  }
+
+  /**
+   * For a given error message string, searches for a <code>ErrorMsg</code>
+   * enum that appears to be a match. If an match is found, returns the
+   * <code>SQLState</code> associated with the <code>ErrorMsg</code>. If a match
+   * is not found or <code>ErrorMsg</code> has no <code>SQLState</code>, returns
+   * the <code>SQLState</code> bound to the <code>GENERIC_ERROR</code>
+   * <code>ErrorMsg</code>.
+   *
+   * @param mesg An error message string
+   * @return SQLState
+   */
+  public static String findSQLState(String mesg) {
+
+    //first see if there is a direct match
+    ErrorMsg errorMsg = mesgToErrorMsgMap.get(mesg);
+    if (errorMsg != null) {
+      if (errorMsg.getSQLState() != null)
+        return errorMsg.getSQLState();
+      else
+        return GENERIC_ERROR.getSQLState();
+    }
+
+    //if not see if the mesg follows type of format, which is typically the case:
+    //line 1:14 Table not found table_name
+    String truncatedMesg = mesg.trim();
+    Matcher match = ERROR_MESSAGE_PATTERN.matcher(mesg);
+    if (match.matches()) truncatedMesg = match.group(1);
+
+    //appends might exist after the root message, so strip tokens off until we match
+    while (truncatedMesg.length() > minMesgLength) {
+      errorMsg = mesgToErrorMsgMap.get(truncatedMesg.trim());
+      if (errorMsg != null) {
+        if (errorMsg.getSQLState() != null)
+          return errorMsg.getSQLState();
+        else
+          return GENERIC_ERROR.getSQLState();
+      }
+
+      int lastSpace = truncatedMesg.lastIndexOf(SPACE);
+      if (lastSpace == -1) break;
+
+      // hack off the last word and try again
+      truncatedMesg = truncatedMesg.substring(0, lastSpace).trim();
+    }
+
+    return GENERIC_ERROR.getSQLState();
+  }
+
   ErrorMsg(String mesg) {
+    //42000 is the generic SQLState for syntax error.
+    this(mesg, "42000");
+  }
+
+  ErrorMsg(String mesg, String SQLState) {
     this.mesg = mesg;
+    this.SQLState = SQLState;
   }
 
   private int getLine(ASTNode tree) {
@@ -140,4 +223,7 @@
     return mesg;
   }
 
+  public String getSQLState() {
+    return SQLState;
+  }
 }

Modified: hadoop/hive/trunk/service/if/hive_service.thrift
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/service/if/hive_service.thrift?rev=817769&r1=817768&r2=817769&view=diff
==============================================================================
--- hadoop/hive/trunk/service/if/hive_service.thrift (original)
+++ hadoop/hive/trunk/service/if/hive_service.thrift Tue Sep 22 18:58:10 2009
@@ -49,6 +49,8 @@
 
 exception HiveServerException {
   1: string message
+  2: i32 errorCode
+  3: string SQLState
 }
 
 # Interface for Thrift Hive Server

Modified: hadoop/hive/trunk/service/src/gen-javabean/org/apache/hadoop/hive/service/HiveServerException.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/service/src/gen-javabean/org/apache/hadoop/hive/service/HiveServerException.java?rev=817769&r1=817768&r2=817769&view=diff
==============================================================================
--- hadoop/hive/trunk/service/src/gen-javabean/org/apache/hadoop/hive/service/HiveServerException.java (original)
+++ hadoop/hive/trunk/service/src/gen-javabean/org/apache/hadoop/hive/service/HiveServerException.java Tue Sep 22 18:58:10 2009
@@ -21,17 +21,28 @@
 public class HiveServerException extends Exception implements TBase, java.io.Serializable, Cloneable {
   private static final TStruct STRUCT_DESC = new TStruct("HiveServerException");
   private static final TField MESSAGE_FIELD_DESC = new TField("message", TType.STRING, (short)1);
+  private static final TField ERROR_CODE_FIELD_DESC = new TField("errorCode", TType.I32, (short)2);
+  private static final TField SQLSTATE_FIELD_DESC = new TField("SQLState", TType.STRING, (short)3);
 
   private String message;
   public static final int MESSAGE = 1;
+  private int errorCode;
+  public static final int ERRORCODE = 2;
+  private String SQLState;
+  public static final int SQLSTATE = 3;
 
   private final Isset __isset = new Isset();
   private static final class Isset implements java.io.Serializable {
+    public boolean errorCode = false;
   }
 
   public static final Map<Integer, FieldMetaData> metaDataMap = Collections.unmodifiableMap(new HashMap<Integer, FieldMetaData>() {{
     put(MESSAGE, new FieldMetaData("message", TFieldRequirementType.DEFAULT, 
         new FieldValueMetaData(TType.STRING)));
+    put(ERRORCODE, new FieldMetaData("errorCode", TFieldRequirementType.DEFAULT, 
+        new FieldValueMetaData(TType.I32)));
+    put(SQLSTATE, new FieldMetaData("SQLState", TFieldRequirementType.DEFAULT, 
+        new FieldValueMetaData(TType.STRING)));
   }});
 
   static {
@@ -42,10 +53,15 @@
   }
 
   public HiveServerException(
-    String message)
+    String message,
+    int errorCode,
+    String SQLState)
   {
     this();
     this.message = message;
+    this.errorCode = errorCode;
+    this.__isset.errorCode = true;
+    this.SQLState = SQLState;
   }
 
   /**
@@ -55,6 +71,11 @@
     if (other.isSetMessage()) {
       this.message = other.message;
     }
+    __isset.errorCode = other.__isset.errorCode;
+    this.errorCode = other.errorCode;
+    if (other.isSetSQLState()) {
+      this.SQLState = other.SQLState;
+    }
   }
 
   @Override
@@ -79,6 +100,41 @@
     return this.message != null;
   }
 
+  public int getErrorCode() {
+    return this.errorCode;
+  }
+
+  public void setErrorCode(int errorCode) {
+    this.errorCode = errorCode;
+    this.__isset.errorCode = true;
+  }
+
+  public void unsetErrorCode() {
+    this.__isset.errorCode = false;
+  }
+
+  // Returns true if field errorCode is set (has been asigned a value) and false otherwise
+  public boolean isSetErrorCode() {
+    return this.__isset.errorCode;
+  }
+
+  public String getSQLState() {
+    return this.SQLState;
+  }
+
+  public void setSQLState(String SQLState) {
+    this.SQLState = SQLState;
+  }
+
+  public void unsetSQLState() {
+    this.SQLState = null;
+  }
+
+  // Returns true if field SQLState is set (has been asigned a value) and false otherwise
+  public boolean isSetSQLState() {
+    return this.SQLState != null;
+  }
+
   public void setFieldValue(int fieldID, Object value) {
     switch (fieldID) {
     case MESSAGE:
@@ -89,6 +145,22 @@
       }
       break;
 
+    case ERRORCODE:
+      if (value == null) {
+        unsetErrorCode();
+      } else {
+        setErrorCode((Integer)value);
+      }
+      break;
+
+    case SQLSTATE:
+      if (value == null) {
+        unsetSQLState();
+      } else {
+        setSQLState((String)value);
+      }
+      break;
+
     default:
       throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
     }
@@ -99,6 +171,12 @@
     case MESSAGE:
       return getMessage();
 
+    case ERRORCODE:
+      return new Integer(getErrorCode());
+
+    case SQLSTATE:
+      return getSQLState();
+
     default:
       throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
     }
@@ -109,6 +187,10 @@
     switch (fieldID) {
     case MESSAGE:
       return isSetMessage();
+    case ERRORCODE:
+      return isSetErrorCode();
+    case SQLSTATE:
+      return isSetSQLState();
     default:
       throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
     }
@@ -136,6 +218,24 @@
         return false;
     }
 
+    boolean this_present_errorCode = true;
+    boolean that_present_errorCode = true;
+    if (this_present_errorCode || that_present_errorCode) {
+      if (!(this_present_errorCode && that_present_errorCode))
+        return false;
+      if (this.errorCode != that.errorCode)
+        return false;
+    }
+
+    boolean this_present_SQLState = true && this.isSetSQLState();
+    boolean that_present_SQLState = true && that.isSetSQLState();
+    if (this_present_SQLState || that_present_SQLState) {
+      if (!(this_present_SQLState && that_present_SQLState))
+        return false;
+      if (!this.SQLState.equals(that.SQLState))
+        return false;
+    }
+
     return true;
   }
 
@@ -162,6 +262,21 @@
             TProtocolUtil.skip(iprot, field.type);
           }
           break;
+        case ERRORCODE:
+          if (field.type == TType.I32) {
+            this.errorCode = iprot.readI32();
+            this.__isset.errorCode = true;
+          } else { 
+            TProtocolUtil.skip(iprot, field.type);
+          }
+          break;
+        case SQLSTATE:
+          if (field.type == TType.STRING) {
+            this.SQLState = iprot.readString();
+          } else { 
+            TProtocolUtil.skip(iprot, field.type);
+          }
+          break;
         default:
           TProtocolUtil.skip(iprot, field.type);
           break;
@@ -182,6 +297,14 @@
       oprot.writeString(this.message);
       oprot.writeFieldEnd();
     }
+    oprot.writeFieldBegin(ERROR_CODE_FIELD_DESC);
+    oprot.writeI32(this.errorCode);
+    oprot.writeFieldEnd();
+    if (this.SQLState != null) {
+      oprot.writeFieldBegin(SQLSTATE_FIELD_DESC);
+      oprot.writeString(this.SQLState);
+      oprot.writeFieldEnd();
+    }
     oprot.writeFieldStop();
     oprot.writeStructEnd();
   }
@@ -198,6 +321,18 @@
       sb.append(this.message);
     }
     first = false;
+    if (!first) sb.append(", ");
+    sb.append("errorCode:");
+    sb.append(this.errorCode);
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("SQLState:");
+    if (this.SQLState == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.SQLState);
+    }
+    first = false;
     sb.append(")");
     return sb.toString();
   }

Modified: hadoop/hive/trunk/service/src/gen-php/hive_service_types.php
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/service/src/gen-php/hive_service_types.php?rev=817769&r1=817768&r2=817769&view=diff
==============================================================================
--- hadoop/hive/trunk/service/src/gen-php/hive_service_types.php (original)
+++ hadoop/hive/trunk/service/src/gen-php/hive_service_types.php Tue Sep 22 18:58:10 2009
@@ -200,6 +200,8 @@
   static $_TSPEC;
 
   public $message = null;
+  public $errorCode = null;
+  public $SQLState = null;
 
   public function __construct($vals=null) {
     if (!isset(self::$_TSPEC)) {
@@ -208,12 +210,26 @@
           'var' => 'message',
           'type' => TType::STRING,
           ),
+        2 => array(
+          'var' => 'errorCode',
+          'type' => TType::I32,
+          ),
+        3 => array(
+          'var' => 'SQLState',
+          'type' => TType::STRING,
+          ),
         );
     }
     if (is_array($vals)) {
       if (isset($vals['message'])) {
         $this->message = $vals['message'];
       }
+      if (isset($vals['errorCode'])) {
+        $this->errorCode = $vals['errorCode'];
+      }
+      if (isset($vals['SQLState'])) {
+        $this->SQLState = $vals['SQLState'];
+      }
     }
   }
 
@@ -243,6 +259,20 @@
             $xfer += $input->skip($ftype);
           }
           break;
+        case 2:
+          if ($ftype == TType::I32) {
+            $xfer += $input->readI32($this->errorCode);
+          } else {
+            $xfer += $input->skip($ftype);
+          }
+          break;
+        case 3:
+          if ($ftype == TType::STRING) {
+            $xfer += $input->readString($this->SQLState);
+          } else {
+            $xfer += $input->skip($ftype);
+          }
+          break;
         default:
           $xfer += $input->skip($ftype);
           break;
@@ -261,6 +291,16 @@
       $xfer += $output->writeString($this->message);
       $xfer += $output->writeFieldEnd();
     }
+    if ($this->errorCode !== null) {
+      $xfer += $output->writeFieldBegin('errorCode', TType::I32, 2);
+      $xfer += $output->writeI32($this->errorCode);
+      $xfer += $output->writeFieldEnd();
+    }
+    if ($this->SQLState !== null) {
+      $xfer += $output->writeFieldBegin('SQLState', TType::STRING, 3);
+      $xfer += $output->writeString($this->SQLState);
+      $xfer += $output->writeFieldEnd();
+    }
     $xfer += $output->writeFieldStop();
     $xfer += $output->writeStructEnd();
     return $xfer;

Modified: hadoop/hive/trunk/service/src/gen-py/hive_service/ttypes.py
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/service/src/gen-py/hive_service/ttypes.py?rev=817769&r1=817768&r2=817769&view=diff
==============================================================================
--- hadoop/hive/trunk/service/src/gen-py/hive_service/ttypes.py (original)
+++ hadoop/hive/trunk/service/src/gen-py/hive_service/ttypes.py Tue Sep 22 18:58:10 2009
@@ -142,15 +142,21 @@
   """
   Attributes:
    - message
+   - errorCode
+   - SQLState
   """
 
   thrift_spec = (
     None, # 0
     (1, TType.STRING, 'message', None, None, ), # 1
+    (2, TType.I32, 'errorCode', None, None, ), # 2
+    (3, TType.STRING, 'SQLState', None, None, ), # 3
   )
 
-  def __init__(self, message=None,):
+  def __init__(self, message=None, errorCode=None, SQLState=None,):
     self.message = message
+    self.errorCode = errorCode
+    self.SQLState = SQLState
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -166,6 +172,16 @@
           self.message = iprot.readString();
         else:
           iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.I32:
+          self.errorCode = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.SQLState = iprot.readString();
+        else:
+          iprot.skip(ftype)
       else:
         iprot.skip(ftype)
       iprot.readFieldEnd()
@@ -180,6 +196,14 @@
       oprot.writeFieldBegin('message', TType.STRING, 1)
       oprot.writeString(self.message)
       oprot.writeFieldEnd()
+    if self.errorCode != None:
+      oprot.writeFieldBegin('errorCode', TType.I32, 2)
+      oprot.writeI32(self.errorCode)
+      oprot.writeFieldEnd()
+    if self.SQLState != None:
+      oprot.writeFieldBegin('SQLState', TType.STRING, 3)
+      oprot.writeString(self.SQLState)
+      oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 

Modified: hadoop/hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java?rev=817769&r1=817768&r2=817769&view=diff
==============================================================================
--- hadoop/hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java (original)
+++ hadoop/hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java Tue Sep 22 18:58:10 2009
@@ -72,7 +72,7 @@
      * Stores state per connection
      */
     private SessionState session;
-    
+
     /**
      * Flag that indicates whether the last executed command was a Hive query
      */
@@ -103,29 +103,38 @@
     public void execute(String cmd) throws HiveServerException, TException {
       HiveServerHandler.LOG.info("Running the query: " + cmd);
       SessionState ss = SessionState.get();
-      
+
       String cmd_trimmed = cmd.trim();
       String[] tokens = cmd_trimmed.split("\\s");
       String cmd_1 = cmd_trimmed.substring(tokens[0].length()).trim();
       
       int ret = 0;
+      String errorMessage = "";
+      String SQLState = null;
+
       try {
         CommandProcessor proc = CommandProcessorFactory.get(tokens[0]);
         if(proc != null) {
           if (proc instanceof Driver) {
-        	  isHiveQuery = true;
-            ret = driver.run(cmd);
+            isHiveQuery = true;
+            Driver.DriverResponse response = driver.runCommand(cmd);
+            ret = response.getResponseCode();
+            SQLState = response.getSQLState();
+            errorMessage = response.getErrorMessage();
           } else {
-        	  isHiveQuery = false;
+            isHiveQuery = false;
             ret = proc.run(cmd_1);
           }
         }
       } catch (Exception e) {
-        throw new HiveServerException("Error running query: " + e.toString());
+        HiveServerException ex = new HiveServerException();
+        ex.setMessage("Error running query: " + e.toString());
+        throw ex;
       }
 
       if (ret != 0) {
-        throw new HiveServerException("Query returned non-zero code: " + ret);
+        throw new HiveServerException("Query returned non-zero code: " + ret +
+                                      ", cause: " + errorMessage, ret, SQLState);
       }
     }
 
@@ -137,7 +146,7 @@
       try {
         ClusterStatus cs = driver.getClusterStatus();
         JobTracker.State jbs = cs.getJobTrackerState();
-        
+
         // Convert the ClusterStatus to its Thrift equivalent: HiveClusterStatus
         int state;
         switch (jbs) {
@@ -151,7 +160,7 @@
             String errorMsg = "Unrecognized JobTracker state: " + jbs.toString();
             throw new Exception(errorMsg);
         }
-        
+
         hcs = new HiveClusterStatus(
             cs.getTaskTrackers(),
             cs.getMapTasks(),
@@ -163,19 +172,21 @@
       catch (Exception e) {
         LOG.error(e.toString());
         e.printStackTrace();
-        throw new HiveServerException("Unable to get cluster status: " + e.toString());
+        HiveServerException ex = new HiveServerException();
+        ex.setMessage("Unable to get cluster status: " + e.toString());
+        throw ex;
       }
       return hcs;
     }
-    
+
     /**
      * Return the Hive schema of the query result
      */
     public Schema getSchema() throws HiveServerException, TException {
       if (!isHiveQuery)
         // Return empty schema if the last command was not a Hive query
-        return new Schema();	
-    	
+        return new Schema();
+
       try {
         Schema schema = driver.getSchema();
         if (schema == null) {
@@ -187,10 +198,12 @@
       catch (Exception e) {
         LOG.error(e.toString());
         e.printStackTrace();
-        throw new HiveServerException("Unable to get schema: " + e.toString());
+        HiveServerException ex = new HiveServerException();
+        ex.setMessage("Unable to get schema: " + e.toString());
+        throw ex;
       }
     }
-    
+
     /**
      * Return the Thrift schema of the query result
      */
@@ -198,7 +211,7 @@
       if (!isHiveQuery)
         // Return empty schema if the last command was not a Hive query
         return new Schema();
-    	
+
       try {
         Schema schema = driver.getThriftSchema();
         if (schema == null) {
@@ -210,21 +223,23 @@
       catch (Exception e) {
         LOG.error(e.toString());
         e.printStackTrace();
-        throw new HiveServerException("Unable to get schema: " + e.toString());
+        HiveServerException ex = new HiveServerException();
+        ex.setMessage("Unable to get schema: " + e.toString());
+        throw ex;
       }
     }
-    
-    
-    /** 
+
+
+    /**
      * Fetches the next row in a query result set.
-     * 
+     *
      * @return the next row in a query result set. null if there is no more row to fetch.
      */
     public String fetchOne() throws HiveServerException, TException {
       if (!isHiveQuery)
         // Return no results if the last command was not a Hive query
         return "";
-      
+
       Vector<String> result = new Vector<String>();
       driver.setMaxRows(1);
       try {
@@ -236,7 +251,9 @@
         // TODO: return null in some other way
         return "";
       } catch (IOException e) {
-        throw new HiveServerException(e.getMessage());
+        HiveServerException ex = new HiveServerException();
+        ex.setMessage(e.getMessage());
+        throw ex;
       }
     }
 
@@ -244,26 +261,30 @@
      * Fetches numRows rows.
      *
      * @param numRows Number of rows to fetch.
-     * @return A list of rows. The size of the list is numRows if there are at least 
+     * @return A list of rows. The size of the list is numRows if there are at least
      *         numRows rows available to return. The size is smaller than numRows if
-     *         there aren't enough rows. The list will be empty if there is no more 
-     *         row to fetch or numRows == 0. 
+     *         there aren't enough rows. The list will be empty if there is no more
+     *         row to fetch or numRows == 0.
      * @throws HiveServerException Invalid value for numRows (numRows < 0)
      */
     public List<String> fetchN(int numRows) throws HiveServerException, TException {
       if (numRows < 0) {
-        throw new HiveServerException("Invalid argument for number of rows: " + numRows);
-      } 
+        HiveServerException ex = new HiveServerException();
+        ex.setMessage("Invalid argument for number of rows: " + numRows);
+        throw ex;
+      }
       if (!isHiveQuery)
-      	// Return no results if the last command was not a Hive query
+        // Return no results if the last command was not a Hive query
         return new Vector<String>();
-      
-      Vector<String> result = new Vector<String>();      
+
+      Vector<String> result = new Vector<String>();
       driver.setMaxRows(numRows);
       try {
         driver.getResults(result);
       } catch (IOException e) {
-        throw new HiveServerException(e.getMessage());
+        HiveServerException ex = new HiveServerException();
+        ex.setMessage(e.getMessage());
+        throw ex;
       }
       return result;
     }
@@ -273,14 +294,14 @@
      *
      * @return All the rows in a result set of a query executed using execute method.
      *
-     * TODO: Currently the server buffers all the rows before returning them 
+     * TODO: Currently the server buffers all the rows before returning them
      * to the client. Decide whether the buffering should be done in the client.
      */
     public List<String> fetchAll() throws HiveServerException, TException {
       if (!isHiveQuery)
         // Return no results if the last command was not a Hive query
         return new Vector<String>();
-      
+
       Vector<String> rows = new Vector<String>();
       Vector<String> result = new Vector<String>();
       try {
@@ -289,11 +310,13 @@
           result.clear();
         }
       } catch (IOException e) {
-        throw new HiveServerException(e.getMessage());
+        HiveServerException ex = new HiveServerException();
+        ex.setMessage(e.getMessage());
+        throw ex;
       }
       return rows;
     }
-    
+
     /**
      * Return the status of the server
      */
@@ -320,13 +343,15 @@
         qp.addToQueries(driver.getQueryPlan());
       }
       catch (Exception e) {
-        throw new HiveServerException(e.toString());
+        HiveServerException ex = new HiveServerException();
+        ex.setMessage(e.toString());
+        throw ex;
       }
       return qp;
     }
-    
+
   }
-	
+
   public static class ThriftHiveProcessorFactory extends TProcessorFactory {
     public ThriftHiveProcessorFactory (TProcessor processor) {
       super(processor);