You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@knox.apache.org by km...@apache.org on 2013/09/13 20:19:26 UTC

[2/2] git commit: KNOX-99: Provide Hive sample code

KNOX-99: Provide Hive sample code


Project: http://git-wip-us.apache.org/repos/asf/incubator-knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-knox/commit/7998c47f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-knox/tree/7998c47f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-knox/diff/7998c47f

Branch: refs/heads/master
Commit: 7998c47fcaea1118c34a10cea97122c9b8b31487
Parents: 2f135e1
Author: Kevin Minder <ke...@hortonworks.com>
Authored: Fri Sep 13 14:19:15 2013 -0400
Committer: Kevin Minder <ke...@hortonworks.com>
Committed: Fri Sep 13 14:19:15 2013 -0400

----------------------------------------------------------------------
 .../SecureQueryEncryptDecryptProcessorTest.java |   91 --
 .../home/samples/HiveJDBCSample.java            |   95 ++
 gateway-release/home/samples/sample.log         | 1387 ++++++++++++++++++
 3 files changed, 1482 insertions(+), 91 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/7998c47f/gateway-provider-secure-query/src/test/java/org/apache/hadoop/gateway/securequery/SecureQueryEncryptDecryptProcessorTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-secure-query/src/test/java/org/apache/hadoop/gateway/securequery/SecureQueryEncryptDecryptProcessorTest.java b/gateway-provider-secure-query/src/test/java/org/apache/hadoop/gateway/securequery/SecureQueryEncryptDecryptProcessorTest.java
deleted file mode 100644
index b06d2f5..0000000
--- a/gateway-provider-secure-query/src/test/java/org/apache/hadoop/gateway/securequery/SecureQueryEncryptDecryptProcessorTest.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.securequery;
-
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteEnvironment;
-import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteContext;
-import org.apache.hadoop.gateway.util.urltemplate.Params;
-import org.apache.hadoop.gateway.util.urltemplate.Parser;
-import org.apache.hadoop.gateway.util.urltemplate.Query;
-import org.apache.hadoop.gateway.util.urltemplate.Template;
-import org.easymock.Capture;
-import org.easymock.EasyMock;
-import org.junit.Test;
-
-import java.util.Arrays;
-
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.CoreMatchers.notNullValue;
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.hamcrest.Matchers.greaterThan;
-import static org.hamcrest.Matchers.nullValue;
-
-public class SecureQueryEncryptDecryptProcessorTest {
-
-  @Test
-  public void testEncryptDecrypt() throws Exception {
-    Query query;
-    Template origTemplate = Parser.parse( "http://host:0/path/file?query-param-name=query-param-value" );
-
-    // Test encryption.  Results are left in encTemplate
-
-    UrlRewriteEnvironment encEnvironment = EasyMock.createNiceMock( UrlRewriteEnvironment.class );
-    EasyMock.expect( encEnvironment.resolve( "cluster.name" ) ).andReturn( Arrays.asList( "test-cluster-name" ) ).anyTimes();
-    UrlRewriteContext encContext = EasyMock.createNiceMock( UrlRewriteContext.class );
-    EasyMock.expect( encContext.getCurrentUrl() ).andReturn( origTemplate );
-    Capture<Template> encTemplate = new Capture<Template>();
-    encContext.setCurrentUrl( EasyMock.capture( encTemplate ) );
-    EasyMock.replay( encEnvironment, encContext );
-
-    SecureQueryEncryptDescriptor descriptor = new SecureQueryEncryptDescriptor();
-    SecureQueryEncryptProcessor processor = new SecureQueryEncryptProcessor();
-    processor.initialize( encEnvironment, descriptor );
-    processor.process( encContext );
-
-    assertThat( encTemplate, notNullValue() );
-    query = encTemplate.getValue().getQuery().get( "_" );
-    assertThat( query.getFirstValue().getPattern().length(), greaterThan( 1 ) );
-    query = encTemplate.getValue().getQuery().get( "query-param-name" );
-    assertThat( query, nullValue() );
-
-    // Test decryption.  Results are left in decTemplate.
-
-    UrlRewriteEnvironment decEnvironment = EasyMock.createNiceMock( UrlRewriteEnvironment.class );
-    EasyMock.expect( decEnvironment.resolve( "cluster.name" ) ).andReturn( Arrays.asList( "test-cluster-name" ) ).anyTimes();
-    Params decParams = EasyMock.createNiceMock( Params.class );
-    EasyMock.expect( decParams.resolve( "cluster.name" ) ).andReturn( Arrays.asList("test-cluster-name") ).anyTimes();
-    UrlRewriteContext decContext = EasyMock.createNiceMock( UrlRewriteContext.class );
-    EasyMock.expect( decContext.getCurrentUrl() ).andReturn( encTemplate.getValue() );
-    EasyMock.expect( decContext.getParameters() ).andReturn( decParams );
-    Capture<Template> decTemplate = new Capture<Template>();
-    decContext.setCurrentUrl( EasyMock.capture( decTemplate ) );
-    EasyMock.replay( decEnvironment, decParams, decContext );
-
-    SecureQueryDecryptDescriptor descriptor1 = new SecureQueryDecryptDescriptor();
-    SecureQueryDecryptProcessor decProcessor = new SecureQueryDecryptProcessor();
-    decProcessor.initialize( decEnvironment, descriptor1 );
-    decProcessor.process( decContext );
-
-    assertThat( decTemplate, notNullValue() );
-    query = decTemplate.getValue().getQuery().get( "query-param-name" );
-    assertThat( query.getFirstValue().getPattern(), is( "query-param-value" ) );
-    query = decTemplate.getValue().getQuery().get( "_" );
-    assertThat( query, nullValue() );
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/7998c47f/gateway-release/home/samples/HiveJDBCSample.java
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/HiveJDBCSample.java b/gateway-release/home/samples/HiveJDBCSample.java
new file mode 100644
index 0000000..dbb5fb5
--- /dev/null
+++ b/gateway-release/home/samples/HiveJDBCSample.java
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+public class HiveJDBCSample {
+
+  public static void main( String[] args ) {
+    Connection connection = null;
+    Statement statement = null;
+    ResultSet resultSet = null;
+
+    try {
+      String user = "bob";
+      String password = "bob-password";
+      String gatewayHost = "localhost";
+      int gatewayPort = 8443;
+      String contextPath = "gateway/sample/hive/api/v1";
+      String connectionString = String.format( "jdbc:hive2://%s:%d/?hive.server2.servermode=https;hive.server2.http.path=%s", gatewayHost, gatewayPort, contextPath );
+
+      // load Hive JDBC Driver
+      Class.forName( "org.apache.hive.jdbc.HiveDriver" );
+      //Class.forName("java.lang.Object");
+
+      // configure JDBC connection
+      connection = DriverManager.getConnection( connectionString, user, password );
+
+      statement = connection.createStatement();
+
+      // disable Hive authorization - it could be ommited if Hive authorization
+      // was configured properly
+      statement.execute( "set hive.security.authorization.enabled=false" );
+
+      // create sample table
+      statement.execute( "CREATE TABLE logs(column1 string, column2 string, column3 string, column4 string, column5 string, column6 string, column7 string) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' '" );
+
+      // load data into Hive from file /tmp/log.txt which is placed on the local file system 
+      statement.execute( "LOAD DATA LOCAL INPATH '/tmp/sample.log' OVERWRITE INTO TABLE logs" );
+
+      resultSet = statement.executeQuery( "SELECT * FROM logs" );
+
+      while ( resultSet.next() ) {
+        System.out.println( resultSet.getString( 1 ) + " --- " + resultSet.getString( 2 ) );
+      }
+    } catch ( ClassNotFoundException ex ) {
+      Logger.getLogger( HiveJDBCSample.class.getName() ).log( Level.SEVERE, null, ex );
+    } catch ( SQLException ex ) {
+      Logger.getLogger( HiveJDBCSample.class.getName() ).log( Level.SEVERE, null, ex );
+    } finally {
+      if ( resultSet != null ) {
+        try {
+          resultSet.close();
+        } catch ( SQLException ex ) {
+          Logger.getLogger( HiveJDBCSample.class.getName() ).log( Level.SEVERE, null, ex );
+        }
+      }
+      if ( statement != null ) {
+        try {
+          statement.close();
+        } catch ( SQLException ex ) {
+          Logger.getLogger( HiveJDBCSample.class.getName() ).log( Level.SEVERE, null, ex );
+        }
+      }
+      if ( connection != null ) {
+        try {
+          connection.close();
+        } catch ( SQLException ex ) {
+          Logger.getLogger( HiveJDBCSample.class.getName() ).log( Level.SEVERE, null, ex );
+        }
+      }
+    }
+  }
+}