You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by kh...@apache.org on 2013/10/02 23:32:44 UTC

svn commit: r1528633 - in /hive/trunk/hcatalog: conf/ core/src/main/java/org/apache/hcatalog/security/ core/src/main/java/org/apache/hive/hcatalog/security/ core/src/test/java/org/apache/hcatalog/security/ core/src/test/java/org/apache/hive/hcatalog/se...

Author: khorgath
Date: Wed Oct  2 21:32:43 2013
New Revision: 1528633

URL: http://svn.apache.org/r1528633
Log:
HIVE-5413 : StorageDelegationAuthorizationProvider uses non-existent org.apache.hive.hcatalog.hbase.HBaseHCatStorageHandler (Eugene Koifman via Sushanth Sowmyan)

Added:
    hive/trunk/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java
      - copied, changed from r1528631, hive/trunk/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/HBaseReadWrite.java
Removed:
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/security/HdfsAuthorizationProvider.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/security/StorageDelegationAuthorizationProvider.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/security/TestHdfsAuthorizationProvider.java
    hive/trunk/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/HBaseReadWrite.java
Modified:
    hive/trunk/hcatalog/conf/proto-hive-site.xml
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/security/StorageDelegationAuthorizationProvider.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/security/TestHdfsAuthorizationProvider.java
    hive/trunk/hcatalog/src/test/e2e/hcatalog/tests/hadoop.conf
    hive/trunk/hcatalog/src/test/e2e/hcatalog/tests/pig.conf
    hive/trunk/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteTextPartitioned.java
    hive/trunk/hcatalog/src/test/e2e/templeton/README.txt
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseAuthorizationProvider.java

Modified: hive/trunk/hcatalog/conf/proto-hive-site.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/conf/proto-hive-site.xml?rev=1528633&r1=1528632&r2=1528633&view=diff
==============================================================================
--- hive/trunk/hcatalog/conf/proto-hive-site.xml (original)
+++ hive/trunk/hcatalog/conf/proto-hive-site.xml Wed Oct  2 21:32:43 2013
@@ -114,7 +114,7 @@
 
 <property>
   <name>hive.security.authorization.manager</name>
-  <value>org.apache.hive.hcatalog.security.StorageDelegationAuthorizationProvider</value>
+  <value>org.apache.hcatalog.security.StorageDelegationAuthorizationProvider</value>
   <description>the hive client authorization manager class name.
   The user defined authorization class should implement interface org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider.
   HCatalog uses a model, where authorization checks are delegated to the storage layer (hdfs, hbase, ...).

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java?rev=1528633&r1=1528632&r2=1528633&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java Wed Oct  2 21:32:43 2013
@@ -56,7 +56,7 @@ import org.apache.hadoop.security.UserGr
  * An AuthorizationProvider, which checks against the data access level permissions on HDFS.
  * It makes sense to eventually move this class to Hive, so that all hive users can
  * use this authorization model. 
- * @deprecated Use/modify {@link org.apache.hive.hcatalog.security.HdfsAuthorizationProvider} instead
+ * @deprecated use {@link org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider}
  */
 public class HdfsAuthorizationProvider extends HiveAuthorizationProviderBase {
 

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/security/StorageDelegationAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/security/StorageDelegationAuthorizationProvider.java?rev=1528633&r1=1528632&r2=1528633&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/security/StorageDelegationAuthorizationProvider.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/security/StorageDelegationAuthorizationProvider.java Wed Oct  2 21:32:43 2013
@@ -42,7 +42,7 @@ import org.apache.hcatalog.mapreduce.HCa
 /**
  * A HiveAuthorizationProvider which delegates the authorization requests to 
  * the underlying AuthorizationProviders obtained from the StorageHandler.
- * @deprecated Use/modify {@link org.apache.hive.hcatalog.security.StorageDelegationAuthorizationProvider} instead
+ * @deprecated 
  */
 public class StorageDelegationAuthorizationProvider extends HiveAuthorizationProviderBase {
 

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/security/TestHdfsAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/security/TestHdfsAuthorizationProvider.java?rev=1528633&r1=1528632&r2=1528633&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/security/TestHdfsAuthorizationProvider.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/security/TestHdfsAuthorizationProvider.java Wed Oct  2 21:32:43 2013
@@ -53,7 +53,7 @@ import org.junit.Before;
 import org.junit.Test;
 
 /**
- * @deprecated Use/modify {@link org.apache.hive.hcatalog.security.TestHdfsAuthorizationProvider} instead
+ * @deprecated 
  */
 public class TestHdfsAuthorizationProvider {
 
@@ -78,7 +78,7 @@ public class TestHdfsAuthorizationProvid
     conf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName());
     conf.setBoolVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED, true);
     conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
-        StorageDelegationAuthorizationProvider.class.getCanonicalName());
+        StorageDelegationAuthorizationProvider.class.getName());
     conf.set("fs.pfile.impl", "org.apache.hadoop.fs.ProxyLocalFileSystem");
 
     whDir = System.getProperty("test.warehouse.dir", "/tmp/testhdfsauthorization_wh");

Modified: hive/trunk/hcatalog/src/test/e2e/hcatalog/tests/hadoop.conf
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/hcatalog/tests/hadoop.conf?rev=1528633&r1=1528632&r2=1528633&view=diff
==============================================================================
--- hive/trunk/hcatalog/src/test/e2e/hcatalog/tests/hadoop.conf (original)
+++ hive/trunk/hcatalog/src/test/e2e/hcatalog/tests/hadoop.conf Wed Oct  2 21:32:43 2013
@@ -225,9 +225,9 @@ jar :FUNCPATH:/testudf.jar org.apache.hi
                                 {
                                  'num' => 1
                                 ,'hcat_prep'=>q\drop table if exists hadoop_hbase_1;
-create table hadoop_hbase_1(key string, gpa string) STORED BY 'org.apache.hive.hcatalog.hbase.HBaseHCatStorageHandler' TBLPROPERTIES ('hbase.columns.mapping'=':key,info:gpa');\
+create table hadoop_hbase_1(key string, gpa string) STORED BY 'org.apache.hcatalog.hbase.HBaseHCatStorageHandler' TBLPROPERTIES ('hbase.columns.mapping'=':key,info:gpa');\
                                 ,'hadoop' => q\
-jar :FUNCPATH:/testudf.jar org.apache.hive.hcatalog.utils.HBaseReadWrite -libjars :HCAT_JAR: :THRIFTSERVER: :INPATH:/studenttab10k hadoop_hbase_1 :OUTPATH:
+jar :FUNCPATH:/testudf.jar org.apache.hcatalog.utils.HBaseReadWrite -libjars :HCAT_JAR: :THRIFTSERVER: :INPATH:/studenttab10k hadoop_hbase_1 :OUTPATH:
 \,
                                 ,'sql' => q\select name, sum(gpa) from studenttab10k group by name;\
                                 ,'floatpostprocess' => 1

Modified: hive/trunk/hcatalog/src/test/e2e/hcatalog/tests/pig.conf
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/hcatalog/tests/pig.conf?rev=1528633&r1=1528632&r2=1528633&view=diff
==============================================================================
--- hive/trunk/hcatalog/src/test/e2e/hcatalog/tests/pig.conf (original)
+++ hive/trunk/hcatalog/src/test/e2e/hcatalog/tests/pig.conf Wed Oct  2 21:32:43 2013
@@ -319,15 +319,15 @@ store c into ':OUTPATH:';\
                                 {
                                  'num' => 1
                                 ,'hcat_prep'=>q\drop table if exists pig_hbase_1;
-create table pig_hbase_1(key string, age string, gpa string) STORED BY 'org.apache.hive.hcatalog.hbase.HBaseHCatStorageHandler' TBLPROPERTIES ('hbase.columns.mapping'=':key,info:age,info:gpa');\
+create table pig_hbase_1(key string, age string, gpa string) STORED BY 'org.apache.hcatalog.hbase.HBaseHCatStorageHandler' TBLPROPERTIES ('hbase.columns.mapping'=':key,info:age,info:gpa');\
                                 ,'pig' => q\set hcat.hbase.output.bulkMode 'false'
 a = load ':INPATH:/studenttab10k' as (name:chararray, age:int, gpa:float);
 b = group a by name;
 c = foreach b generate group as name, AVG(a.age) as age, AVG(a.gpa) as gpa;
 d = foreach c generate name as key, (chararray)age, (chararray)gpa as gpa;
-store d into 'pig_hbase_1' using org.apache.hive.hcatalog.pig.HCatStorer();
+store d into 'pig_hbase_1' using org.apache.hcatalog.pig.HCatStorer();
 exec
-e = load 'pig_hbase_1' using org.apache.hive.hcatalog.pig.HCatLoader();
+e = load 'pig_hbase_1' using org.apache.hcatalog.pig.HCatLoader();
 store e into ':OUTPATH:';\,
                                 ,'result_table' => ['pig_hbase_1','?']
 				,'sql'   => [ 'select name, avg(cast(age as decimal(10,5))), avg(gpa) from studenttab10k group by name;', 'select name, avg(cast(age as decimal(10,5))), avg(gpa) from studenttab10k group by name;' ]
@@ -338,17 +338,17 @@ store e into ':OUTPATH:';\,
                                  # multiquery
                                  'num' => 2
                                 ,'hcat_prep'=>q\drop table if exists pig_hbase_2_1;
-create table pig_hbase_2_1(key string, age string, gpa string) STORED BY 'org.apache.hive.hcatalog.hbase.HBaseHCatStorageHandler' TBLPROPERTIES ('hbase.columns.mapping'=':key,info:age,info:gpa');
+create table pig_hbase_2_1(key string, age string, gpa string) STORED BY 'org.apache.hcatalog.hbase.HBaseHCatStorageHandler' TBLPROPERTIES ('hbase.columns.mapping'=':key,info:age,info:gpa');
 drop table if exists pig_hbase_2_2;
-create table pig_hbase_2_2(key string, age string, gpa string) STORED BY 'org.apache.hive.hcatalog.hbase.HBaseHCatStorageHandler' TBLPROPERTIES ('hbase.columns.mapping'=':key,info:age,info:gpa');
+create table pig_hbase_2_2(key string, age string, gpa string) STORED BY 'org.apache.hcatalog.hbase.HBaseHCatStorageHandler' TBLPROPERTIES ('hbase.columns.mapping'=':key,info:age,info:gpa');
 \
                                 ,'pig' => q\set hcat.hbase.output.bulkMode 'false'
 a = load ':INPATH:/studenttab10k' as (name:chararray, age:int, gpa:float);
 b = group a by name;
 c = foreach b generate group as name, AVG(a.age) as age, AVG(a.gpa) as gpa;
 d = foreach c generate name as key, (chararray)age, (chararray)gpa as gpa;
-store d into 'pig_hbase_2_1' using org.apache.hive.hcatalog.pig.HCatStorer();
-store d into 'pig_hbase_2_2' using org.apache.hive.hcatalog.pig.HCatStorer();\,
+store d into 'pig_hbase_2_1' using org.apache.hcatalog.pig.HCatStorer();
+store d into 'pig_hbase_2_2' using org.apache.hcatalog.pig.HCatStorer();\,
                                 ,'result_table' => ['pig_hbase_2_1','pig_hbase_2_2']
 				,'sql'   => [ 'select name, avg(cast(age as decimal(10,5))), avg(gpa) from studenttab10k group by name;', 'select name, avg(cast(age as decimal(10,5))), avg(gpa) from studenttab10k group by name;']
                                 ,'floatpostprocess' => 1

Copied: hive/trunk/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java (from r1528631, hive/trunk/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/HBaseReadWrite.java)
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java?p2=hive/trunk/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java&p1=hive/trunk/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/HBaseReadWrite.java&r1=1528631&r2=1528633&rev=1528633&view=diff
==============================================================================
--- hive/trunk/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/HBaseReadWrite.java (original)
+++ hive/trunk/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java Wed Oct  2 21:32:43 2013
@@ -17,7 +17,7 @@
  * under the License.
  */
 
-package org.apache.hive.hcatalog.utils;
+package org.apache.hcatalog.utils;
 
 import java.io.IOException;
 
@@ -36,13 +36,13 @@ import org.apache.hadoop.mapreduce.lib.o
 import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.hive.hcatalog.common.HCatConstants;
-import org.apache.hive.hcatalog.data.DefaultHCatRecord;
-import org.apache.hive.hcatalog.data.HCatRecord;
-import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
-import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
-import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
+import org.apache.hcatalog.common.HCatConstants;
+import org.apache.hcatalog.data.DefaultHCatRecord;
+import org.apache.hcatalog.data.HCatRecord;
+import org.apache.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.hcatalog.mapreduce.HCatOutputFormat;
+import org.apache.hcatalog.mapreduce.InputJobInfo;
+import org.apache.hcatalog.mapreduce.OutputJobInfo;
 
 /**
  * This is a map reduce test for testing hcat which goes against the "numbers"

Modified: hive/trunk/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteTextPartitioned.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteTextPartitioned.java?rev=1528633&r1=1528632&r2=1528633&view=diff
==============================================================================
--- hive/trunk/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteTextPartitioned.java (original)
+++ hive/trunk/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hive/hcatalog/utils/WriteTextPartitioned.java Wed Oct  2 21:32:43 2013
@@ -48,7 +48,7 @@ import org.apache.hive.hcatalog.mapreduc
  * other columns. This is to simulate a typical operation in a map reduce
  * program to test that hcat hands the right data to the map reduce program
  *
- * Usage: hadoop jar org.apache.hive.hcatalog.utils.HBaseReadWrite -libjars
+ * Usage: hadoop jar org.apache.hcatalog.utils.HBaseReadWrite -libjars
  * &lt;hcat_jar&gt; * &lt;serveruri&gt; &lt;input_tablename&gt; &lt;output_tablename&gt; [filter]
  * If filter is given it will be provided as the partition to write to.
  */

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/README.txt
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/README.txt?rev=1528633&r1=1528632&r2=1528633&view=diff
==============================================================================
--- hive/trunk/hcatalog/src/test/e2e/templeton/README.txt (original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/README.txt Wed Oct  2 21:32:43 2013
@@ -69,6 +69,7 @@ Setup
 2. Install perl and following perl modules  (cpan -i <MODULE_NAME>)
 * IPC::Run
 * JSON
+* JSON::Path
 * Data::Dump
 * Number::Compare
 * Text::Glob

Modified: hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseAuthorizationProvider.java?rev=1528633&r1=1528632&r2=1528633&view=diff
==============================================================================
--- hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseAuthorizationProvider.java (original)
+++ hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseAuthorizationProvider.java Wed Oct  2 21:32:43 2013
@@ -22,6 +22,8 @@ package org.apache.hcatalog.hbase;
 import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.classification.InterfaceAudience;
+import org.apache.hadoop.hive.common.classification.InterfaceStability;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -34,8 +36,11 @@ import org.apache.hadoop.hive.ql.securit
 /**
  * This class is an implementation of HiveAuthorizationProvider to provide
  * authorization functionality for HBase tables.
+ * @deprecated 
  */
-class HBaseAuthorizationProvider implements HiveAuthorizationProvider {
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public class HBaseAuthorizationProvider implements HiveAuthorizationProvider {
 
   @Override
   public Configuration getConf() {