You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/03/12 19:22:03 UTC

svn commit: r1455659 [11/11] - in /hive/trunk: ./ ant/src/org/apache/hadoop/hive/ant/ bin/ bin/ext/ cli/ common/ common/src/gen/ common/src/gen/org/ common/src/gen/org/apache/ common/src/gen/org/apache/hive/ common/src/gen/org/apache/hive/common/ commo...

Added: hive/trunk/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java
URL: http://svn.apache.org/viewvc/hive/trunk/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java?rev=1455659&view=auto
==============================================================================
--- hive/trunk/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java (added)
+++ hive/trunk/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java Tue Mar 12 18:22:00 2013
@@ -0,0 +1,373 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli.thrift;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hive.service.cli.CLIServiceClient;
+import org.apache.hive.service.cli.FetchOrientation;
+import org.apache.hive.service.cli.GetInfoType;
+import org.apache.hive.service.cli.GetInfoValue;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.OperationHandle;
+import org.apache.hive.service.cli.OperationState;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.SessionHandle;
+import org.apache.hive.service.cli.TableSchema;
+
+/**
+ * ThriftCLIServiceClient.
+ *
+ */
+public class ThriftCLIServiceClient extends CLIServiceClient {
+  private final TCLIService.Iface cliService;
+
+  public ThriftCLIServiceClient(TCLIService.Iface cliService) {
+    this.cliService = cliService;
+  }
+
+  public void checkStatus(TStatus status) throws HiveSQLException {
+    if (TStatusCode.ERROR_STATUS.equals(status.getStatusCode())) {
+      throw new HiveSQLException(status);
+    }
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hive.service.cli.ICLIService#openSession(java.lang.String, java.lang.String, java.util.Map)
+   */
+  @Override
+  public SessionHandle openSession(String username, String password,
+      Map<String, String> configuration)
+          throws HiveSQLException {
+    try {
+      TOpenSessionReq req = new TOpenSessionReq();
+      req.setUsername(username);
+      req.setPassword(password);
+      req.setConfiguration(configuration);
+      TOpenSessionResp resp = cliService.OpenSession(req);
+      checkStatus(resp.getStatus());
+      return new SessionHandle(resp.getSessionHandle());
+    } catch (HiveSQLException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new HiveSQLException(e);
+    }
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hive.service.cli.ICLIService#closeSession(org.apache.hive.service.cli.SessionHandle)
+   */
+  @Override
+  public SessionHandle openSessionWithImpersonation(String username, String password,
+      Map<String, String> configuration, String delegationToken) throws HiveSQLException {
+    throw new HiveSQLException("open with impersonation operation is not supported in the client");
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hive.service.cli.ICLIService#closeSession(org.apache.hive.service.cli.SessionHandle)
+   */
+  @Override
+  public void closeSession(SessionHandle sessionHandle) throws HiveSQLException {
+    try {
+      TCloseSessionReq req = new TCloseSessionReq(sessionHandle.toTSessionHandle());
+      TCloseSessionResp resp = cliService.CloseSession(req);
+      checkStatus(resp.getStatus());
+    } catch (HiveSQLException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new HiveSQLException(e);
+    }
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hive.service.cli.ICLIService#getInfo(org.apache.hive.service.cli.SessionHandle, java.util.List)
+   */
+  @Override
+  public GetInfoValue getInfo(SessionHandle sessionHandle, GetInfoType infoType)
+      throws HiveSQLException {
+    try {
+      // FIXME extract the right info type
+      TGetInfoReq req = new TGetInfoReq(sessionHandle.toTSessionHandle(), infoType.toTGetInfoType());
+      TGetInfoResp resp = cliService.GetInfo(req);
+      checkStatus(resp.getStatus());
+      return new GetInfoValue(resp.getInfoValue());
+    } catch (HiveSQLException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new HiveSQLException(e);
+    }
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hive.service.cli.ICLIService#executeStatement(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.util.Map)
+   */
+  @Override
+  public OperationHandle executeStatement(SessionHandle sessionHandle, String statement,
+      Map<String, String> confOverlay)
+          throws HiveSQLException {
+    try {
+      TExecuteStatementReq req = new TExecuteStatementReq(sessionHandle.toTSessionHandle(), statement);
+      req.setConfOverlay(confOverlay);
+      TExecuteStatementResp resp = cliService.ExecuteStatement(req);
+      checkStatus(resp.getStatus());
+      return new OperationHandle(resp.getOperationHandle());
+    } catch (HiveSQLException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new HiveSQLException(e);
+    }
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hive.service.cli.ICLIService#getTypeInfo(org.apache.hive.service.cli.SessionHandle)
+   */
+  @Override
+  public OperationHandle getTypeInfo(SessionHandle sessionHandle) throws HiveSQLException {
+    try {
+      TGetTypeInfoReq req = new TGetTypeInfoReq(sessionHandle.toTSessionHandle());
+      TGetTypeInfoResp resp = cliService.GetTypeInfo(req);
+      checkStatus(resp.getStatus());
+      return new OperationHandle(resp.getOperationHandle());
+    } catch (HiveSQLException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new HiveSQLException(e);
+    }
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hive.service.cli.ICLIService#getCatalogs(org.apache.hive.service.cli.SessionHandle)
+   */
+  @Override
+  public OperationHandle getCatalogs(SessionHandle sessionHandle) throws HiveSQLException {
+    try {
+      TGetCatalogsReq req = new TGetCatalogsReq(sessionHandle.toTSessionHandle());
+      TGetCatalogsResp resp = cliService.GetCatalogs(req);
+      checkStatus(resp.getStatus());
+      return new OperationHandle(resp.getOperationHandle());
+    } catch (HiveSQLException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new HiveSQLException(e);
+    }
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hive.service.cli.ICLIService#getSchemas(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String)
+   */
+  @Override
+  public OperationHandle getSchemas(SessionHandle sessionHandle, String catalogName,
+      String schemaName)
+          throws HiveSQLException {
+    try {
+      TGetSchemasReq req = new TGetSchemasReq(sessionHandle.toTSessionHandle());
+      req.setCatalogName(catalogName);
+      req.setSchemaName(schemaName);
+      TGetSchemasResp resp = cliService.GetSchemas(req);
+      checkStatus(resp.getStatus());
+      return new OperationHandle(resp.getOperationHandle());
+    } catch (HiveSQLException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new HiveSQLException(e);
+    }
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hive.service.cli.ICLIService#getTables(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String, java.lang.String, java.util.List)
+   */
+  @Override
+  public OperationHandle getTables(SessionHandle sessionHandle, String catalogName,
+      String schemaName, String tableName, List<String> tableTypes)
+          throws HiveSQLException {
+    try {
+      TGetTablesReq req = new TGetTablesReq(sessionHandle.toTSessionHandle());
+      req.setTableName(tableName);
+      req.setTableTypes(tableTypes);
+      req.setSchemaName(schemaName);
+      TGetTablesResp resp = cliService.GetTables(req);
+      checkStatus(resp.getStatus());
+      return new OperationHandle(resp.getOperationHandle());
+    } catch (HiveSQLException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new HiveSQLException(e);
+    }
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hive.service.cli.ICLIService#getTableTypes(org.apache.hive.service.cli.SessionHandle)
+   */
+  @Override
+  public OperationHandle getTableTypes(SessionHandle sessionHandle) throws HiveSQLException {
+    try {
+      TGetTableTypesReq req = new TGetTableTypesReq(sessionHandle.toTSessionHandle());
+      TGetTableTypesResp resp = cliService.GetTableTypes(req);
+      checkStatus(resp.getStatus());
+      return new OperationHandle(resp.getOperationHandle());
+    } catch (HiveSQLException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new HiveSQLException(e);
+    }
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hive.service.cli.ICLIService#getColumns(org.apache.hive.service.cli.SessionHandle)
+   */
+  @Override
+  public OperationHandle getColumns(SessionHandle sessionHandle,
+      String catalogName, String schemaName, String tableName, String columnName)
+          throws HiveSQLException {
+    try {
+      TGetColumnsReq req = new TGetColumnsReq();
+      req.setSessionHandle(sessionHandle.toTSessionHandle());
+      req.setCatalogName(catalogName);
+      req.setSchemaName(schemaName);
+      req.setTableName(tableName);
+      req.setColumnName(columnName);
+      TGetColumnsResp resp = cliService.GetColumns(req);
+      checkStatus(resp.getStatus());
+      return new OperationHandle(resp.getOperationHandle());
+    } catch (HiveSQLException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new HiveSQLException(e);
+    }
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hive.service.cli.ICLIService#getFunctions(org.apache.hive.service.cli.SessionHandle)
+   */
+  @Override
+  public OperationHandle getFunctions(SessionHandle sessionHandle,
+      String catalogName, String schemaName, String functionName) throws HiveSQLException {
+    try {
+      TGetFunctionsReq req = new TGetFunctionsReq(sessionHandle.toTSessionHandle(), functionName);
+      req.setCatalogName(catalogName);
+      req.setSchemaName(schemaName);
+      TGetFunctionsResp resp = cliService.GetFunctions(req);
+      checkStatus(resp.getStatus());
+      return new OperationHandle(resp.getOperationHandle());
+    } catch (HiveSQLException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new HiveSQLException(e);
+    }
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hive.service.cli.ICLIService#getOperationStatus(org.apache.hive.service.cli.OperationHandle)
+   */
+  @Override
+  public OperationState getOperationStatus(OperationHandle opHandle) throws HiveSQLException {
+    try {
+      TGetOperationStatusReq req = new TGetOperationStatusReq(opHandle.toTOperationHandle());
+      TGetOperationStatusResp resp = cliService.GetOperationStatus(req);
+      checkStatus(resp.getStatus());
+      return OperationState.getOperationState(resp.getOperationState());
+    } catch (HiveSQLException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new HiveSQLException(e);
+    }
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hive.service.cli.ICLIService#cancelOperation(org.apache.hive.service.cli.OperationHandle)
+   */
+  @Override
+  public void cancelOperation(OperationHandle opHandle) throws HiveSQLException {
+    try {
+      TCancelOperationReq req = new TCancelOperationReq(opHandle.toTOperationHandle());
+      TCancelOperationResp resp = cliService.CancelOperation(req);
+      checkStatus(resp.getStatus());
+    } catch (HiveSQLException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new HiveSQLException(e);
+    }
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hive.service.cli.ICLIService#closeOperation(org.apache.hive.service.cli.OperationHandle)
+   */
+  @Override
+  public void closeOperation(OperationHandle opHandle)
+      throws HiveSQLException {
+    try {
+      TCloseOperationReq req  = new TCloseOperationReq(opHandle.toTOperationHandle());
+      TCloseOperationResp resp = cliService.CloseOperation(req);
+      checkStatus(resp.getStatus());
+    } catch (HiveSQLException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new HiveSQLException(e);
+    }
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hive.service.cli.ICLIService#getResultSetMetadata(org.apache.hive.service.cli.OperationHandle)
+   */
+  @Override
+  public TableSchema getResultSetMetadata(OperationHandle opHandle)
+      throws HiveSQLException {
+    try {
+      TGetResultSetMetadataReq req = new TGetResultSetMetadataReq(opHandle.toTOperationHandle());
+      TGetResultSetMetadataResp resp = cliService.GetResultSetMetadata(req);
+      checkStatus(resp.getStatus());
+      return new TableSchema(resp.getSchema());
+    } catch (HiveSQLException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new HiveSQLException(e);
+    }
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hive.service.cli.ICLIService#fetchResults(org.apache.hive.service.cli.OperationHandle, org.apache.hive.service.cli.FetchOrientation, long)
+   */
+  @Override
+  public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation, long maxRows)
+      throws HiveSQLException {
+    try {
+      TFetchResultsReq req = new TFetchResultsReq();
+      req.setOperationHandle(opHandle.toTOperationHandle());
+      req.setOrientation(orientation.toTFetchOrientation());
+      req.setMaxRows(maxRows);
+      TFetchResultsResp resp = cliService.FetchResults(req);
+      checkStatus(resp.getStatus());
+      return new RowSet(resp.getResults());
+    } catch (HiveSQLException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new HiveSQLException(e);
+    }
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hive.service.cli.ICLIService#fetchResults(org.apache.hive.service.cli.OperationHandle)
+   */
+  @Override
+  public RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException {
+    // TODO: set the correct default fetch size
+    return fetchResults(opHandle, FetchOrientation.FETCH_NEXT, 10000);
+  }
+}

Added: hive/trunk/service/src/java/org/apache/hive/service/server/HiveServer2.java
URL: http://svn.apache.org/viewvc/hive/trunk/service/src/java/org/apache/hive/service/server/HiveServer2.java?rev=1455659&view=auto
==============================================================================
--- hive/trunk/service/src/java/org/apache/hive/service/server/HiveServer2.java (added)
+++ hive/trunk/service/src/java/org/apache/hive/service/server/HiveServer2.java Tue Mar 12 18:22:00 2013
@@ -0,0 +1,94 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.server;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.common.LogUtils;
+import org.apache.hadoop.hive.common.LogUtils.LogInitializationException;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hive.common.util.HiveStringUtils;
+import org.apache.hive.service.CompositeService;
+import org.apache.hive.service.cli.CLIService;
+import org.apache.hive.service.cli.thrift.ThriftCLIService;
+
+/**
+ * HiveServer2.
+ *
+ */
+public class HiveServer2 extends CompositeService {
+  private static final Log LOG = LogFactory.getLog(HiveServer2.class);
+  private static CompositeServiceShutdownHook serverShutdownHook;
+  public static final int SHUTDOWN_HOOK_PRIORITY = 100;
+
+  private CLIService cliService;
+  private ThriftCLIService thriftCLIService;
+
+  public HiveServer2() {
+    super("HiveServer2");
+  }
+
+
+  @Override
+  public synchronized void init(HiveConf hiveConf) {
+    cliService = new CLIService();
+    addService(cliService);
+
+    thriftCLIService = new ThriftCLIService(cliService);
+    addService(thriftCLIService);
+
+    super.init(hiveConf);
+  }
+
+  @Override
+  public synchronized void start() {
+    super.start();
+  }
+
+  @Override
+  public synchronized void stop() {
+    super.stop();
+  }
+
+  /**
+   * @param args
+   */
+  public static void main(String[] args) {
+
+    //NOTE: It is critical to do this here so that log4j is reinitialized
+    // before any of the other core hive classes are loaded
+    try {
+      LogUtils.initHiveLog4j();
+    } catch (LogInitializationException e) {
+      LOG.warn(e.getMessage());
+    }
+
+    HiveStringUtils.startupShutdownMessage(HiveServer2.class, args, LOG);
+    try {
+      HiveConf hiveConf = new HiveConf();
+      HiveServer2 server = new HiveServer2();
+      server.init(hiveConf);
+      server.start();
+    } catch (Throwable t) {
+      LOG.fatal("Error starting HiveServer2", t);
+      System.exit(-1);
+    }
+  }
+
+}

Added: hive/trunk/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
URL: http://svn.apache.org/viewvc/hive/trunk/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java?rev=1455659&view=auto
==============================================================================
--- hive/trunk/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java (added)
+++ hive/trunk/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java Tue Mar 12 18:22:00 2013
@@ -0,0 +1,116 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+import java.util.Collections;
+import java.util.HashMap;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * CLIServiceTest.
+ *
+ */
+public abstract class CLIServiceTest {
+
+  protected static CLIServiceClient client;
+
+  /**
+   * @throws java.lang.Exception
+   */
+  @Before
+  public void setUp() throws Exception {
+  }
+
+  /**
+   * @throws java.lang.Exception
+   */
+  @After
+  public void tearDown() throws Exception {
+  }
+
+  @Test
+  public void createSessionTest() throws Exception {
+    SessionHandle sessionHandle = client
+        .openSession("tom", "password", Collections.<String, String>emptyMap());
+    assertNotNull(sessionHandle);
+    client.closeSession(sessionHandle);
+
+    sessionHandle = client.openSession("tom", "password");
+    assertNotNull(sessionHandle);
+    client.closeSession(sessionHandle);
+  }
+
+  @Test
+  public void getFunctionsTest() throws Exception {
+    SessionHandle sessionHandle = client.openSession("tom", "password", new HashMap<String, String>());
+    assertNotNull(sessionHandle);
+    OperationHandle opHandle = client.getFunctions(sessionHandle, null, null, "*");
+    TableSchema schema = client.getResultSetMetadata(opHandle);
+
+    ColumnDescriptor columnDesc = schema.getColumnDescriptorAt(0);
+    assertEquals("FUNCTION_CAT", columnDesc.getName());
+    assertEquals(Type.STRING_TYPE, columnDesc.getType());
+
+    columnDesc = schema.getColumnDescriptorAt(1);
+    assertEquals("FUNCTION_SCHEM", columnDesc.getName());
+    assertEquals(Type.STRING_TYPE, columnDesc.getType());
+
+    columnDesc = schema.getColumnDescriptorAt(2);
+    assertEquals("FUNCTION_NAME", columnDesc.getName());
+    assertEquals(Type.STRING_TYPE, columnDesc.getType());
+
+    columnDesc = schema.getColumnDescriptorAt(3);
+    assertEquals("REMARKS", columnDesc.getName());
+    assertEquals(Type.STRING_TYPE, columnDesc.getType());
+
+    columnDesc = schema.getColumnDescriptorAt(4);
+    assertEquals("FUNCTION_TYPE", columnDesc.getName());
+    assertEquals(Type.INT_TYPE, columnDesc.getType());
+
+    columnDesc = schema.getColumnDescriptorAt(5);
+    assertEquals("SPECIFIC_NAME", columnDesc.getName());
+    assertEquals(Type.STRING_TYPE, columnDesc.getType());
+
+    client.closeOperation(opHandle);
+    client.closeSession(sessionHandle);
+  }
+
+  @Test
+  public void getInfoTest() throws Exception {
+    SessionHandle sessionHandle = client.openSession("tom", "password", new HashMap<String, String>());
+    assertNotNull(sessionHandle);
+
+    GetInfoValue value = client.getInfo(sessionHandle, GetInfoType.CLI_DBMS_NAME);
+    System.out.println(value.getStringValue());
+
+    value = client.getInfo(sessionHandle, GetInfoType.CLI_SERVER_NAME);
+    System.out.println(value.getStringValue());
+
+    value = client.getInfo(sessionHandle, GetInfoType.CLI_DBMS_VER);
+    System.out.println(value.getStringValue());
+
+    client.closeSession(sessionHandle);
+  }
+}

Added: hive/trunk/service/src/test/org/apache/hive/service/cli/TestEmbeddedThriftCLIService.java
URL: http://svn.apache.org/viewvc/hive/trunk/service/src/test/org/apache/hive/service/cli/TestEmbeddedThriftCLIService.java?rev=1455659&view=auto
==============================================================================
--- hive/trunk/service/src/test/org/apache/hive/service/cli/TestEmbeddedThriftCLIService.java (added)
+++ hive/trunk/service/src/test/org/apache/hive/service/cli/TestEmbeddedThriftCLIService.java Tue Mar 12 18:22:00 2013
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli;
+
+import org.apache.hive.service.cli.thrift.EmbeddedThriftCLIService;
+import org.apache.hive.service.cli.thrift.ThriftCLIService;
+import org.apache.hive.service.cli.thrift.ThriftCLIServiceClient;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+
+/**
+ * TestEmbeddedCLIService.
+ *
+ */
+public class TestEmbeddedThriftCLIService extends CLIServiceTest {
+
+  private static  ThriftCLIService service;
+
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+    service = new EmbeddedThriftCLIService();
+    client = new ThriftCLIServiceClient(service);
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hive.service.cli.CLIServiceTest#setUp()
+   */
+  @Override
+  @Before
+  public void setUp() throws Exception {
+    super.setUp();
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hive.service.cli.CLIServiceTest#tearDown()
+   */
+  @Override
+  @After
+  public void tearDown() throws Exception {
+    super.tearDown();
+  }
+
+}

Added: hive/trunk/service/src/test/org/apache/hive/service/server/TestHiveServer2Concurrency.java
URL: http://svn.apache.org/viewvc/hive/trunk/service/src/test/org/apache/hive/service/server/TestHiveServer2Concurrency.java?rev=1455659&view=auto
==============================================================================
--- hive/trunk/service/src/test/org/apache/hive/service/server/TestHiveServer2Concurrency.java (added)
+++ hive/trunk/service/src/test/org/apache/hive/service/server/TestHiveServer2Concurrency.java Tue Mar 12 18:22:00 2013
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.server;
+
+import static org.junit.Assert.fail;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.QTestUtil;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * TestHiveServer2Concurrency.
+ *
+ */
+public class TestHiveServer2Concurrency {
+
+  private static QTestUtil.QTestSetup miniZKCluster = null;
+  private static HiveServer2 hiveServer2;
+
+  /**
+   * @throws java.lang.Exception
+   */
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+    HiveConf hiveConf = new HiveConf();
+
+    miniZKCluster = new QTestUtil.QTestSetup();
+    miniZKCluster.preTest(hiveConf);
+
+    hiveServer2 = new HiveServer2();
+    hiveServer2.init(hiveConf);
+    hiveServer2.start();
+    Thread.sleep(5000);
+  }
+
+  /**
+   * @throws java.lang.Exception
+   */
+  @AfterClass
+  public static void tearDownAfterClass() throws Exception {
+    if (hiveServer2 != null) {
+      hiveServer2.stop();
+    }
+    if (miniZKCluster != null) {
+      try {
+        miniZKCluster.tearDown();
+      } catch (Exception e) {
+        e.printStackTrace();
+      }
+    }
+  }
+
+  class QueryRunner implements Runnable {
+
+    @Override
+    public void run() {
+      // TODO Auto-generated method stub
+
+    }
+
+  }
+
+
+  /**
+   * @throws java.lang.Exception
+   */
+  @Before
+  public void setUp() throws Exception {
+  }
+
+  /**
+   * @throws java.lang.Exception
+   */
+  @After
+  public void tearDown() throws Exception {
+  }
+
+  @Test
+  public void test() {
+    fail("Not yet implemented");
+  }
+
+}

Modified: hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1455659&r1=1455658&r2=1455659&view=diff
==============================================================================
--- hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java (original)
+++ hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java Tue Mar 12 18:22:00 2013
@@ -540,10 +540,16 @@ public class Hadoop20Shims implements Ha
   }
 
   @Override
-  public void doAs(UserGroupInformation ugi, PrivilegedExceptionAction<Void> pvea) throws
+  public void setTokenStr(UserGroupInformation ugi, String tokenStr, String tokenService)
+    throws IOException {
+    throw new UnsupportedOperationException("Tokens are not supported in current hadoop version");
+  }
+
+  @Override
+  public <T> T doAs(UserGroupInformation ugi, PrivilegedExceptionAction<T> pvea) throws
     IOException, InterruptedException {
     try {
-      Subject.doAs(SecurityUtil.getSubject(ugi),pvea);
+      return Subject.doAs(SecurityUtil.getSubject(ugi),pvea);
     } catch (PrivilegedActionException e) {
       throw new IOException(e);
     }
@@ -555,6 +561,21 @@ public class Hadoop20Shims implements Ha
   }
 
   @Override
+  public void loginUserFromKeytab(String principal, String keytabFile) throws IOException {
+    throw new UnsupportedOperationException("Kerberos login is not supported in current hadoop version");
+  }
+
+  @Override
+  public UserGroupInformation createProxyUser(String userName) throws IOException {
+    return createRemoteUser(userName, null);
+  }
+
+  @Override
+  public boolean isSecurityEnabled() {
+    return false;
+  }
+
+  @Override
   public String getTaskAttemptLogUrl(JobConf conf,
     String taskTrackerHttpAddress, String taskAttemptId)
     throws MalformedURLException {

Modified: hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java?rev=1455659&r1=1455658&r2=1455659&view=diff
==============================================================================
--- hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java (original)
+++ hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java Tue Mar 12 18:22:00 2013
@@ -39,6 +39,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
+import org.apache.hadoop.hive.thrift.DelegationTokenIdentifier;
 import org.apache.hadoop.hive.thrift.DelegationTokenSelector;
 import org.apache.hadoop.http.HtmlQuoting;
 import org.apache.hadoop.io.Text;
@@ -59,6 +60,7 @@ import org.apache.hadoop.mapred.TaskID;
 import org.apache.hadoop.mapred.lib.CombineFileInputFormat;
 import org.apache.hadoop.mapred.lib.CombineFileSplit;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
@@ -463,19 +465,19 @@ public abstract class HadoopShimsSecure 
 
     return ToolRunner.run(har, args.toArray(new String[0]));
   }
-  
+
   /*
    * This particular instance is for Hadoop 1.0 which creates an archive
    * with only the relative path of the archived directory stored within
    * the archive as compared to the full path in case of earlier versions.
    * See this api in Hadoop20Shims for comparison.
    */
-  public URI getHarUri(URI original, URI base, URI originalBase) 
+  public URI getHarUri(URI original, URI base, URI originalBase)
     throws URISyntaxException {
     URI relative = originalBase.relativize(original);
     if (relative.isAbsolute()) {
       throw new URISyntaxException("Couldn't create URI for location.",
-                                   "Relative: " + relative + " Base: " 
+                                   "Relative: " + relative + " Base: "
                                    + base + " OriginalBase: " + originalBase);
     }
 
@@ -538,8 +540,27 @@ public abstract class HadoopShimsSecure 
   }
 
   @Override
-  public void doAs(UserGroupInformation ugi, PrivilegedExceptionAction<Void> pvea) throws IOException, InterruptedException {
-    ugi.doAs(pvea);
+  public void setTokenStr(UserGroupInformation ugi, String tokenStr, String tokenService) throws IOException {
+    Token<DelegationTokenIdentifier> delegationToken = new Token<DelegationTokenIdentifier>();
+    delegationToken.decodeFromUrlString(tokenStr);
+    delegationToken.setService(new Text(tokenService));
+    ugi.addToken(delegationToken);
+  }
+
+  @Override
+  public <T> T doAs(UserGroupInformation ugi, PrivilegedExceptionAction<T> pvea) throws IOException, InterruptedException {
+    return ugi.doAs(pvea);
+  }
+
+  @Override
+  public UserGroupInformation createProxyUser(String userName) throws IOException {
+    return UserGroupInformation.createProxyUser(
+        userName, UserGroupInformation.getLoginUser());
+  }
+
+  @Override
+  public boolean isSecurityEnabled() {
+    return UserGroupInformation.isSecurityEnabled();
   }
 
   @Override
@@ -557,6 +578,12 @@ public abstract class HadoopShimsSecure 
   }
 
   @Override
+  public void loginUserFromKeytab(String principal, String keytabFile) throws IOException {
+    String hostPrincipal = SecurityUtil.getServerPrincipal(principal, "0.0.0.0");
+    UserGroupInformation.loginUserFromKeytab(hostPrincipal, keytabFile);
+  }
+
+  @Override
   abstract public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception;
 
   @Override

Modified: hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java?rev=1455659&r1=1455658&r2=1455659&view=diff
==============================================================================
--- hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java (original)
+++ hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java Tue Mar 12 18:22:00 2013
@@ -40,6 +40,8 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.Client;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport;
 import org.apache.hadoop.security.SaslRpcServer;
 import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
@@ -62,6 +64,7 @@ import org.apache.thrift.transport.TTran
 import org.apache.thrift.transport.TTransportException;
 import org.apache.thrift.transport.TTransportFactory;
 
+import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION;
 
  /**
   * Functions that bridge Thrift's SASL transports to Hadoop's
@@ -76,6 +79,14 @@ import org.apache.thrift.transport.TTran
    }
 
    @Override
+   public Client createClientWithConf(String authType) {
+     Configuration conf = new Configuration();
+     conf.set(HADOOP_SECURITY_AUTHENTICATION, authType);
+     UserGroupInformation.setConfiguration(conf);
+     return new Client();
+   }
+
+   @Override
    public Server createServer(String keytabFile, String principalConf) throws TTransportException {
      return new Server(keytabFile, principalConf);
    }
@@ -233,7 +244,7 @@ import org.apache.thrift.transport.TTran
      /**
       * Create a server with a kerberos keytab/principal.
       */
-     private Server(String keytabFile, String principalConf)
+     protected Server(String keytabFile, String principalConf)
        throws TTransportException {
        if (keytabFile == null || keytabFile.isEmpty()) {
          throw new TTransportException("No keytab specified");
@@ -293,7 +304,15 @@ import org.apache.thrift.transport.TTran
       */
      @Override
      public TProcessor wrapProcessor(TProcessor processor) {
-      return new TUGIAssumingProcessor(processor, secretManager);
+       return new TUGIAssumingProcessor(processor, secretManager, true);
+     }
+
+     /**
+      * Wrap a TProcessor to capture the client information like connecting userid, ip etc
+      */
+     @Override
+     public TProcessor wrapNonAssumingProcessor(TProcessor processor) {
+      return new TUGIAssumingProcessor(processor, secretManager, false);
      }
 
     protected DelegationTokenStore getTokenStore(Configuration conf)
@@ -398,6 +417,18 @@ import org.apache.thrift.transport.TTran
        }
      };
 
+     private static ThreadLocal<String> remoteUser = new ThreadLocal<String> () {
+       @Override
+       protected synchronized String initialValue() {
+         return null;
+       }
+     };
+
+     @Override
+     public String getRemoteUser() {
+       return remoteUser.get();
+     }
+     
     /** CallbackHandler for SASL DIGEST-MD5 mechanism */
     // This code is pretty much completely based on Hadoop's
     // SaslRpcServer.SaslDigestCallbackHandler - the only reason we could not
@@ -479,12 +510,15 @@ import org.apache.thrift.transport.TTran
       *
       * This is used on the server side to set the UGI for each specific call.
       */
-     private class TUGIAssumingProcessor implements TProcessor {
+     protected class TUGIAssumingProcessor implements TProcessor {
        final TProcessor wrapped;
        DelegationTokenSecretManager secretManager;
-       TUGIAssumingProcessor(TProcessor wrapped, DelegationTokenSecretManager secretManager) {
+       boolean useProxy;
+       TUGIAssumingProcessor(TProcessor wrapped, DelegationTokenSecretManager secretManager,
+           boolean useProxy) {
          this.wrapped = wrapped;
          this.secretManager = secretManager;
+         this.useProxy = useProxy;
        }
 
        public boolean process(final TProtocol inProt, final TProtocol outProt) throws TException {
@@ -513,17 +547,23 @@ import org.apache.thrift.transport.TTran
          remoteAddress.set(socket.getInetAddress());
          UserGroupInformation clientUgi = null;
          try {
-           clientUgi = UserGroupInformation.createProxyUser(
-              endUser, UserGroupInformation.getLoginUser());
-           return clientUgi.doAs(new PrivilegedExceptionAction<Boolean>() {
-               public Boolean run() {
-                 try {
-                   return wrapped.process(inProt, outProt);
-                 } catch (TException te) {
-                   throw new RuntimeException(te);
+           if (useProxy) {
+             clientUgi = UserGroupInformation.createProxyUser(
+               endUser, UserGroupInformation.getLoginUser());
+             remoteUser.set(clientUgi.getShortUserName());
+             return clientUgi.doAs(new PrivilegedExceptionAction<Boolean>() {
+                 public Boolean run() {
+                   try {
+                     return wrapped.process(inProt, outProt);
+                   } catch (TException te) {
+                     throw new RuntimeException(te);
+                   }
                  }
-               }
-             });
+               });
+           } else {
+             remoteUser.set(endUser);
+             return wrapped.process(inProt, outProt);
+           }
          } catch (RuntimeException rte) {
            if (rte.getCause() instanceof TException) {
              throw (TException)rte.getCause();

Modified: hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1455659&r1=1455658&r2=1455659&view=diff
==============================================================================
--- hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java (original)
+++ hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java Tue Mar 12 18:22:00 2013
@@ -192,15 +192,15 @@ public interface HadoopShims {
   public void closeAllForUGI(UserGroupInformation ugi);
 
   public UserGroupInformation getUGIForConf(Configuration conf) throws LoginException, IOException;
-
   /**
    * Used by metastore server to perform requested rpc in client context.
+   * @param <T>
    * @param ugi
    * @param pvea
    * @throws IOException
    * @throws InterruptedException
    */
-  public void doAs(UserGroupInformation ugi, PrivilegedExceptionAction<Void> pvea) throws
+  public <T> T doAs(UserGroupInformation ugi, PrivilegedExceptionAction<T> pvea) throws
     IOException, InterruptedException;
 
   /**
@@ -226,6 +226,12 @@ public interface HadoopShims {
   public boolean isSecureShimImpl();
 
   /**
+   * Return true if the hadoop configuration has security enabled
+   * @return
+   */
+  public boolean isSecurityEnabled();
+
+  /**
    * Get the string form of the token given a token signature.
    * The signature is used as the value of the "service" field in the token for lookup.
    * Ref: AbstractDelegationTokenSelector in Hadoop. If there exists such a token
@@ -242,6 +248,16 @@ public interface HadoopShims {
    */
   String getTokenStrForm(String tokenSignature) throws IOException;
 
+  /**
+   * Add a delegation token to the given ugi
+   * @param ugi
+   * @param tokenStr
+   * @param tokenService
+   * @throws IOException
+   */
+  void setTokenStr(UserGroupInformation ugi, String tokenStr, String tokenService)
+    throws IOException;
+
 
   enum JobTrackerState { INITIALIZING, RUNNING };
 
@@ -290,6 +306,12 @@ public interface HadoopShims {
   public String getJobLauncherHttpAddress(Configuration conf);
 
 
+ /**
+  *  Perform kerberos login using the given principal and keytab
+ * @throws IOException
+  */
+  public void loginUserFromKeytab(String principal, String keytabFile) throws IOException;
+
   /**
    * Move the directory/file to trash. In case of the symlinks or mount points, the file is
    * moved to the trashbin in the actual volume of the path p being deleted
@@ -321,6 +343,13 @@ public interface HadoopShims {
   public short getDefaultReplication(FileSystem fs, Path path);
 
   /**
+   * Create the proxy ugi for the given userid
+   * @param userName
+   * @return
+   */
+  UserGroupInformation createProxyUser(String userName) throws IOException;
+
+  /**
    * InputSplitShim.
    *
    */
@@ -380,4 +409,5 @@ public interface HadoopShims {
     RecordReader getRecordReader(JobConf job, InputSplitShim split, Reporter reporter,
         Class<RecordReader<K, V>> rrClass) throws IOException;
   }
+
 }

Modified: hive/trunk/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java?rev=1455659&r1=1455658&r2=1455659&view=diff
==============================================================================
--- hive/trunk/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java (original)
+++ hive/trunk/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java Tue Mar 12 18:22:00 2013
@@ -37,6 +37,11 @@ import org.apache.thrift.transport.TTran
        "The current version of Hadoop does not support Authentication");
    }
 
+   public Client createClientWithConf(String authType) {
+     throw new UnsupportedOperationException(
+       "The current version of Hadoop does not support Authentication");
+   }
+   
    public Server createServer(String keytabFile, String principalConf)
      throws TTransportException {
      throw new UnsupportedOperationException(
@@ -67,7 +72,9 @@ import org.apache.thrift.transport.TTran
    public static abstract class Server {
      public abstract TTransportFactory createTransportFactory() throws TTransportException;
      public abstract TProcessor wrapProcessor(TProcessor processor);
+     public abstract TProcessor wrapNonAssumingProcessor(TProcessor processor);
      public abstract InetAddress getRemoteAddress();
+     public abstract String getRemoteUser();
      public abstract void startDelegationTokenSecretManager(Configuration conf) throws IOException;
      public abstract String getDelegationToken(String owner, String renewer) 
      throws IOException, InterruptedException;