You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hcatalog-commits@incubator.apache.org by ga...@apache.org on 2012/03/13 00:23:44 UTC

svn commit: r1299933 - in /incubator/hcatalog/trunk: ./ src/docs/src/documentation/content/xdocs/ src/java/org/apache/hcatalog/common/ src/java/org/apache/hcatalog/mapred/ src/java/org/apache/hcatalog/mapreduce/ src/java/org/apache/hcatalog/pig/ src/te...

Author: gates
Date: Tue Mar 13 00:23:43 2012
New Revision: 1299933

URL: http://svn.apache.org/viewvc?rev=1299933&view=rev
Log:
HCATALOG-289 InputJobInfo still uses serverUri and serverKerberosPrincipal

Modified:
    incubator/hcatalog/trunk/CHANGES.txt
    incubator/hcatalog/trunk/src/docs/src/documentation/content/xdocs/loadstore.xml
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatConstants.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapred/HCatMapredInputFormat.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/DefaultOutputCommitterContainer.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/FileOutputFormatContainer.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/InputJobInfo.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatLoader.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/PigHCatUtil.java
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/drivers/TestDriverHadoop.pm
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/drivers/TestDriverPig.pm
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/GroupByAge.java
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadJson.java
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadRC.java
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadText.java
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadWrite.java
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SimpleRead.java
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreComplex.java
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreDemo.java
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreNumbers.java
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SumNumbers.java
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/TypeDataCheck.java
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteJson.java
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteRC.java
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteText.java
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteTextPartitioned.java
    incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java
    incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java
    incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java
    incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java
    incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java

Modified: incubator/hcatalog/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/CHANGES.txt?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/CHANGES.txt (original)
+++ incubator/hcatalog/trunk/CHANGES.txt Tue Mar 13 00:23:43 2012
@@ -34,6 +34,8 @@ Trunk (unreleased changes)
 Release 0.4.0 - Unreleased
 
   INCOMPATIBLE CHANGES
+  HCAT-298 InputJobInfo still uses serverUri and serverKerberosPrincipal (khorgath via gates)
+
   HCAT-295 Rename storage-drivers directory to storage-handlers (gates)
 
   HCAT-264 Barrier for HCatOutputFormat for Pig/Mapreduce if the table contains feature only supported in Hive (daijy via gates)

Modified: incubator/hcatalog/trunk/src/docs/src/documentation/content/xdocs/loadstore.xml
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/docs/src/documentation/content/xdocs/loadstore.xml?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/docs/src/documentation/content/xdocs/loadstore.xml (original)
+++ incubator/hcatalog/trunk/src/docs/src/documentation/content/xdocs/loadstore.xml Tue Mar 13 00:23:43 2012
@@ -34,14 +34,14 @@ $ pig mypig.script
     
    <p> If you run your Pig script using the "java" command (java -cp pig.jar...), then the hcat jar needs to be included in the classpath of the java command line (using the -cp option). Additionally, the following properties are required in the command line: </p>
     <ul>
-		<li>-Dhcat.metastore.uri=thrift://&lt;hcatalog server hostname&gt;:9080 </li>
-		<li>-Dhcat.metastore.principal=&lt;hcatalog server kerberos principal&gt; </li>
+		<li>-Dhive.metastore.uris=thrift://&lt;hcatalog server hostname&gt;:9080 </li>
+		<li>-Dhive.metastore.kerberos.principal=&lt;hcatalog server kerberos principal&gt; </li>
 	</ul>
 	
 <source>
 $ java -cp pig.jar hcatalog.jar
-     -Dhcat.metastore.uri=thrift://&lt;hcatalog server hostname&gt;:9080 
-     -Dhcat.metastore.principal=&lt;hcatalog server kerberos principal&gt; myscript.pig
+     -Dhive.metastore.uris=thrift://&lt;hcatalog server hostname&gt;:9080 
+     -Dhive.metastore.kerberos.principal=&lt;hcatalog server kerberos principal&gt; myscript.pig
 </source>
 <p></p>
 <p><strong>Authentication</strong></p>

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatConstants.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatConstants.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatConstants.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatConstants.java Tue Mar 13 00:23:43 2012
@@ -17,6 +17,7 @@
  */
 package org.apache.hcatalog.common;
 
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.SequenceFileOutputFormat;
 
@@ -50,7 +51,7 @@ public final class HCatConstants {
 
   public static final String HCAT_TABLE_SCHEMA = "hcat.table.schema";
 
-  public static final String HCAT_METASTORE_URI = "hcat.metastore.uri";
+  public static final String HCAT_METASTORE_URI = HiveConf.ConfVars.METASTOREURIS.varname;
 
   public static final String HCAT_PERMS = "hcat.perms";
 
@@ -60,7 +61,8 @@ public final class HCatConstants {
 
   public static final String HCAT_CREATE_DB_NAME = "hcat.create.db.name";
 
-  public static final String HCAT_METASTORE_PRINCIPAL = "hcat.metastore.principal";
+  public static final String HCAT_METASTORE_PRINCIPAL 
+          = HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname;
 
   // IMPORTANT IMPORTANT IMPORTANT!!!!!
   //The keys used to store info into the job Configuration.

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java Tue Mar 13 00:23:43 2012
@@ -624,60 +624,37 @@ public class HCatUtil {
     }
 
 
-    public static HiveConf getHiveConf(String url, Configuration conf) 
+    public static HiveConf getHiveConf(Configuration conf) 
       throws IOException {
-      HiveConf hiveConf = new HiveConf();
-
-      if( url != null ) {
-        //User specified a thrift url
 
-        hiveConf.set("hive.metastore.local", "false");
-        hiveConf.set(ConfVars.METASTOREURIS.varname, url);
+      HiveConf hiveConf = new HiveConf();
 
-        String kerberosPrincipal = conf.get(
-                                   HCatConstants.HCAT_METASTORE_PRINCIPAL);
-        if (kerberosPrincipal == null){
-            kerberosPrincipal = conf.get(
-                                ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname);
-        }
-        if (kerberosPrincipal != null){
-            hiveConf.setBoolean(
-                    ConfVars.METASTORE_USE_THRIFT_SASL.varname, true);
-            hiveConf.set(
-                    ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname, 
-                    kerberosPrincipal);
-        }
+      //copy the hive conf into the job conf and restore it
+      //in the backend context
+      if( conf.get(HCatConstants.HCAT_KEY_HIVE_CONF) == null ) {
+        conf.set(HCatConstants.HCAT_KEY_HIVE_CONF, 
+            HCatUtil.serialize(hiveConf.getAllProperties()));
       } else {
-        //Thrift url is null, copy the hive conf into 
-        //the job conf and restore it
-        //in the backend context
-
-        if( conf.get(HCatConstants.HCAT_KEY_HIVE_CONF) == null ) {
-          conf.set(HCatConstants.HCAT_KEY_HIVE_CONF, 
-                   HCatUtil.serialize(hiveConf.getAllProperties()));
-        } else {
-          //Copy configuration properties into the hive conf
-          Properties properties = (Properties) HCatUtil.deserialize(
-                                  conf.get(HCatConstants.HCAT_KEY_HIVE_CONF));
-
-          for(Map.Entry<Object, Object> prop : properties.entrySet() ) {
-            if( prop.getValue() instanceof String ) {
-              hiveConf.set((String) prop.getKey(), (String) prop.getValue());
-            } else if( prop.getValue() instanceof Integer ) {
-              hiveConf.setInt((String) prop.getKey(), 
-                              (Integer) prop.getValue());
-            } else if( prop.getValue() instanceof Boolean ) {
-              hiveConf.setBoolean((String) prop.getKey(), 
-                                  (Boolean) prop.getValue());
-            } else if( prop.getValue() instanceof Long ) {
-              hiveConf.setLong((String) prop.getKey(), (Long) prop.getValue());
-            } else if( prop.getValue() instanceof Float ) {
-              hiveConf.setFloat((String) prop.getKey(), 
-                                (Float) prop.getValue());
-            }
+        //Copy configuration properties into the hive conf
+        Properties properties = (Properties) HCatUtil.deserialize(
+            conf.get(HCatConstants.HCAT_KEY_HIVE_CONF));
+
+        for(Map.Entry<Object, Object> prop : properties.entrySet() ) {
+          if( prop.getValue() instanceof String ) {
+            hiveConf.set((String) prop.getKey(), (String) prop.getValue());
+          } else if( prop.getValue() instanceof Integer ) {
+            hiveConf.setInt((String) prop.getKey(), 
+                (Integer) prop.getValue());
+          } else if( prop.getValue() instanceof Boolean ) {
+            hiveConf.setBoolean((String) prop.getKey(), 
+                (Boolean) prop.getValue());
+          } else if( prop.getValue() instanceof Long ) {
+            hiveConf.setLong((String) prop.getKey(), (Long) prop.getValue());
+          } else if( prop.getValue() instanceof Float ) {
+            hiveConf.setFloat((String) prop.getKey(), 
+                (Float) prop.getValue());
           }
         }
-
       }
 
       if(conf.get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE) != null) {

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapred/HCatMapredInputFormat.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapred/HCatMapredInputFormat.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapred/HCatMapredInputFormat.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapred/HCatMapredInputFormat.java Tue Mar 13 00:23:43 2012
@@ -114,7 +114,7 @@ public class HCatMapredInputFormat imple
   public static void setTableDesc(TableDesc tableDesc, Map<String,String> jobProperties) throws IOException{
     try {
     Pair<String,String> dbAndTableName = HCatUtil.getDbAndTableName(tableDesc.getTableName());
-    InputJobInfo info = InputJobInfo.create(dbAndTableName.first, dbAndTableName.second, "", null, null);
+    InputJobInfo info = InputJobInfo.create(dbAndTableName.first, dbAndTableName.second, "");
     jobProperties.put(HCatConstants.HCAT_KEY_JOB_INFO
         ,InitializeInput.getSerializedHcatKeyJobInfo(
             null, info,tableDesc.getProperties().getProperty("location")));

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/DefaultOutputCommitterContainer.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/DefaultOutputCommitterContainer.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/DefaultOutputCommitterContainer.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/DefaultOutputCommitterContainer.java Tue Mar 13 00:23:43 2012
@@ -91,8 +91,7 @@ class DefaultOutputCommitterContainer ex
 
         //Cancel HCat and JobTracker tokens
         try {
-            HiveConf hiveConf = HCatUtil.getHiveConf(null,
-                                                  context.getConfiguration());
+            HiveConf hiveConf = HCatUtil.getHiveConf(context.getConfiguration());
             HiveMetaStoreClient client = HCatUtil.createHiveClient(hiveConf);
             String tokenStrForm = client.getTokenStrForm();
             if(tokenStrForm != null && context.getConfiguration().get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE) != null) {

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java Tue Mar 13 00:23:43 2012
@@ -166,8 +166,7 @@ class FileOutputCommitterContainer exten
         OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(jobContext);
 
         try {
-            HiveConf hiveConf = HCatUtil.getHiveConf(null, 
-                                                jobContext.getConfiguration());
+            HiveConf hiveConf = HCatUtil.getHiveConf(jobContext.getConfiguration());
             HiveMetaStoreClient client = HCatUtil.createHiveClient(hiveConf);
             // cancel the deleg. tokens that were acquired for this job now that
             // we are done - we should cancel if the tokens were acquired by
@@ -294,7 +293,7 @@ class FileOutputCommitterContainer exten
         List<Partition> partitionsAdded = new ArrayList<Partition>();
 
         try {
-            HiveConf hiveConf = HCatUtil.getHiveConf(null, conf);
+            HiveConf hiveConf = HCatUtil.getHiveConf(conf);
             client = HCatUtil.createHiveClient(hiveConf);
 
             StorerInfo storer = InternalUtil.extractStorerInfo(table.getSd(),table.getParameters());

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/FileOutputFormatContainer.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/FileOutputFormatContainer.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/FileOutputFormatContainer.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/FileOutputFormatContainer.java Tue Mar 13 00:23:43 2012
@@ -93,8 +93,7 @@ class FileOutputFormatContainer extends 
     public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException {
         OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(context);
         try {
-            HiveConf hiveConf = HCatUtil.getHiveConf(null, 
-                                              context.getConfiguration());
+            HiveConf hiveConf = HCatUtil.getHiveConf(context.getConfiguration());
             handleDuplicatePublish(context,
                     jobInfo,
                     HCatUtil.createHiveClient(hiveConf),

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java Tue Mar 13 00:23:43 2012
@@ -226,8 +226,7 @@ public abstract class HCatBaseOutputForm
   }
 
   static void cancelDelegationTokens(JobContext context, OutputJobInfo outputJobInfo) throws Exception {
-    HiveConf hiveConf = HCatUtil.getHiveConf(null, 
-                                             context.getConfiguration());
+    HiveConf hiveConf = HCatUtil.getHiveConf(context.getConfiguration());
     HiveMetaStoreClient client = HCatUtil.createHiveClient(hiveConf);
     // cancel the deleg. tokens that were acquired for this job now that
     // we are done - we should cancel if the tokens were acquired by

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java Tue Mar 13 00:23:43 2012
@@ -75,7 +75,7 @@ public class HCatOutputFormat extends HC
       try {
 
         Configuration conf = job.getConfiguration();
-        hiveConf = HCatUtil.getHiveConf(null, conf);
+        hiveConf = HCatUtil.getHiveConf(conf);
         client = HCatUtil.createHiveClient(hiveConf);
         Table table = client.getTable(outputJobInfo.getDatabaseName(), outputJobInfo.getTableName());
         

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java Tue Mar 13 00:23:43 2012
@@ -71,9 +71,9 @@ public class InitializeInput {
   static final String HCAT_KEY_PREFIX = "hcat.";
   private static HiveConf hiveConf;
 
-  private static HiveMetaStoreClient createHiveMetaClient(Configuration conf, InputJobInfo inputJobInfo) throws Exception {
+  private static HiveMetaStoreClient createHiveMetaClient(Configuration conf) throws Exception {
 
-      hiveConf = getHiveConf(inputJobInfo, conf);
+      hiveConf = HCatUtil.getHiveConf(conf);
       return new HiveMetaStoreClient(hiveConf, null);
   }
 
@@ -101,7 +101,7 @@ public class InitializeInput {
 
     try {
       if (job != null){
-        client = createHiveMetaClient(job.getConfiguration(),inputJobInfo);
+        client = createHiveMetaClient(job.getConfiguration());
       } else {
         hiveConf = new HiveConf(HCatInputFormat.class);
         client = new HiveMetaStoreClient(hiveConf, null);
@@ -201,82 +201,4 @@ public class InitializeInput {
                         jobProperties, inputJobInfo.getTableInfo());
   }
 
-    static HiveConf getHiveConf(InputJobInfo iInfo, Configuration conf)
-            throws IOException {
-
-        HiveConf hiveConf = new HiveConf(HCatInputFormat.class);
-
-        if (iInfo.getServerUri() != null) {
-            // User specified a thrift url
-
-            hiveConf.set("hive.metastore.local", "false");
-            hiveConf.set(ConfVars.METASTOREURIS.varname, iInfo.getServerUri());
-
-            String kerberosPrincipal = iInfo.getServerKerberosPrincipal();
-            if (kerberosPrincipal != null) {
-                hiveConf.setBoolean(
-                        HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL.varname,
-                        true);
-                hiveConf.set(
-                        HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname,
-                        kerberosPrincipal);
-            } else {
-
-                kerberosPrincipal = conf
-                        .get(HCatConstants.HCAT_METASTORE_PRINCIPAL);
-
-                if (kerberosPrincipal == null) {
-                    kerberosPrincipal = conf
-                            .get(ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname);
-                }
-                if (kerberosPrincipal != null) {
-                    hiveConf.setBoolean(
-                            ConfVars.METASTORE_USE_THRIFT_SASL.varname, true);
-                    hiveConf.set(ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname,
-                            kerberosPrincipal);
-                }
-
-                if (conf.get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE) != null) {
-                    hiveConf.set("hive.metastore.token.signature",
-                            conf.get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE));
-                }
-            }
-
-        } else {
-            // Thrift url is null, copy the hive conf into the job conf and
-            // restore it
-            // in the backend context
-
-            if (conf.get(HCatConstants.HCAT_KEY_HIVE_CONF) == null) {
-                conf.set(HCatConstants.HCAT_KEY_HIVE_CONF,
-                        HCatUtil.serialize(hiveConf.getAllProperties()));
-            } else {
-                // Copy configuration properties into the hive conf
-                Properties properties = (Properties) HCatUtil.deserialize(conf
-                        .get(HCatConstants.HCAT_KEY_HIVE_CONF));
-
-                for (Map.Entry<Object, Object> prop : properties.entrySet()) {
-                    if (prop.getValue() instanceof String) {
-                        hiveConf.set((String) prop.getKey(),
-                                (String) prop.getValue());
-                    } else if (prop.getValue() instanceof Integer) {
-                        hiveConf.setInt((String) prop.getKey(),
-                                (Integer) prop.getValue());
-                    } else if (prop.getValue() instanceof Boolean) {
-                        hiveConf.setBoolean((String) prop.getKey(),
-                                (Boolean) prop.getValue());
-                    } else if (prop.getValue() instanceof Long) {
-                        hiveConf.setLong((String) prop.getKey(),
-                                (Long) prop.getValue());
-                    } else if (prop.getValue() instanceof Float) {
-                        hiveConf.setFloat((String) prop.getKey(),
-                                (Float) prop.getValue());
-                    }
-                }
-            }
-
-        }
-        return hiveConf;
-    }
-
 }

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/InputJobInfo.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/InputJobInfo.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/InputJobInfo.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/InputJobInfo.java Tue Mar 13 00:23:43 2012
@@ -38,15 +38,6 @@ public class InputJobInfo implements Ser
   /** meta information of the table to be read from */
   private HCatTableInfo tableInfo;
 
-  /** The Metadata server uri */
-  private final String serverUri;
-
-  /** If the hcat server is configured to work with hadoop security, this
-   * variable will hold the principal name of the server - this will be used
-   * in the authentication to the hcat server using kerberos
-   */
-  private final String serverKerberosPrincipal;
-
   /** The partition filter */
   private String filter;
 
@@ -65,32 +56,21 @@ public class InputJobInfo implements Ser
    * @param databaseName the db name
    * @param tableName the table name
    * @param filter the partition filter
-   * @param serverUri the Metadata server uri
-   * @param serverKerberosPrincipal If the hcat server is configured to
-   * work with hadoop security, the kerberos principal name of the server - else null
-   * The principal name should be of the form:
-   * <servicename>/_HOST@<realm> like "hcat/_HOST@myrealm.com"
-   * The special string _HOST will be replaced automatically with the correct host name
    */
+
   public static InputJobInfo create(String databaseName,
-                                    String tableName,
-                                    String filter,
-                                    String serverUri,
-                                    String serverKerberosPrincipal) {
-    return new InputJobInfo(databaseName, tableName, filter, 
-                            serverUri, serverKerberosPrincipal);
+      String tableName,
+      String filter) {
+    return new InputJobInfo(databaseName, tableName, filter);
   }
 
+  
   private InputJobInfo(String databaseName,
                        String tableName,
-                       String filter,
-                       String serverUri,
-                       String serverKerberosPrincipal) {
+                       String filter) {
     this.databaseName = (databaseName == null) ? 
                         MetaStoreUtils.DEFAULT_DATABASE_NAME : databaseName;
     this.tableName = tableName;
-    this.serverUri = serverUri;
-    this.serverKerberosPrincipal = serverKerberosPrincipal;
     this.filter = filter;
     this.properties = new Properties();
   }
@@ -130,21 +110,6 @@ public class InputJobInfo implements Ser
   }
 
   /**
-   * @return the serverKerberosPrincipal
-   */
-  public String getServerKerberosPrincipal() {
-    return serverKerberosPrincipal;
-  }
-
-  /**
-   * Gets the value of serverUri
-   * @return the serverUri
-   */
-  public String getServerUri() {
-    return serverUri;
-  }
-
-  /**
    * Gets the value of partition filter
    * @return the filter string
    */

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatLoader.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatLoader.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatLoader.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatLoader.java Tue Mar 13 00:23:43 2012
@@ -90,10 +90,7 @@ public class HCatLoader extends HCatBase
       HCatInputFormat.setInput(job,
                                InputJobInfo.create(dbName,
                                                    tableName,
-                                                   getPartitionFilterString(),
-                 hcatServerUri != null ? hcatServerUri : 
-                  (hcatServerUri = PigHCatUtil.getHCatServerUri(job)),
-                                    PigHCatUtil.getHCatServerPrincipal(job)));
+                                                   getPartitionFilterString()));
     }
 
     // Need to also push projections by calling setOutputSchema on

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/PigHCatUtil.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/PigHCatUtil.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/PigHCatUtil.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/PigHCatUtil.java Tue Mar 13 00:23:43 2012
@@ -29,6 +29,7 @@ import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
@@ -82,7 +83,7 @@ public class PigHCatUtil {
 
   static public String getHCatServerUri(Job job) {
 
-    return job.getConfiguration().get(HCatConstants.HCAT_METASTORE_URI);
+    return job.getConfiguration().get(HiveConf.ConfVars.METASTOREURIS.varname);
   }
 
   static public String getHCatServerPrincipal(Job job) {

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/drivers/TestDriverHadoop.pm
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/drivers/TestDriverHadoop.pm?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/drivers/TestDriverHadoop.pm (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/drivers/TestDriverHadoop.pm Tue Mar 13 00:23:43 2012
@@ -353,8 +353,8 @@ sub runHadoop
     }
 
     if (defined($testCmd->{'metastore.principal'}) && ($testCmd->{'metastore.principal'} =~ m/\S+/)) {
-        $ENV{'HADOOP_OPTS'} = "-Dhcat.metastore.principal=" . $testCmd->{'metastore.principal'};
-        $ENV{'HADOOP_CLIENT_OPTS'} = "-Dhcat.metastore.principal=" . $testCmd->{'metastore.principal'};
+        $ENV{'HADOOP_OPTS'} = "-Dhive.metastore.kerberos.principal=" . $testCmd->{'metastore.principal'};
+        $ENV{'HADOOP_CLIENT_OPTS'} = "-Dhive.metastore.kerberos.principal=" . $testCmd->{'metastore.principal'};
     }
 
     # Add su user if provided
@@ -650,7 +650,7 @@ sub getPigCmd($$$)
         push(@pigCmd, ("-x", "local"));
     }
 
-    my $opts .= "-Dhcat.metastore.uri=$testCmd->{'thriftserver'}";
+    my $opts .= "-Dhive.metastore.uris=$testCmd->{'thriftserver'}";
     if (defined($testCmd->{'java_params'})) {
         $opts = $opts . " " . join(" ", @{$testCmd->{'java_params'}});
     }

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/drivers/TestDriverPig.pm
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/drivers/TestDriverPig.pm?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/drivers/TestDriverPig.pm (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/drivers/TestDriverPig.pm Tue Mar 13 00:23:43 2012
@@ -392,7 +392,7 @@ sub getPigCmd($$$)
         push(@pigCmd, ("-x", "local"));
     }
 
-    my $opts .= "-Dhcat.metastore.uri=$testCmd->{'thriftserver'}";
+    my $opts .= "-Dhive.metastore.uris=$testCmd->{'thriftserver'}";
     if (defined($testCmd->{'java_params'})) {
         $opts = $opts . " " . join(" ", @{$testCmd->{'java_params'}});
     }

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/GroupByAge.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/GroupByAge.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/GroupByAge.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/GroupByAge.java Tue Mar 13 00:23:43 2012
@@ -106,7 +106,7 @@ public class GroupByAge extends Configur
             conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
         Job job = new Job(conf, "GroupByAge");
         HCatInputFormat.setInput(job, InputJobInfo.create(dbName,
-                inputTableName, null, serverUri, principalID));
+                inputTableName, null));
         // initialize HCatOutputFormat
 
         job.setInputFormatClass(HCatInputFormat.class);

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java Tue Mar 13 00:23:43 2012
@@ -167,7 +167,7 @@ public class HBaseReadWrite extends Conf
         
         job = new Job(conf, "HBaseRead");
         HCatInputFormat.setInput(job, InputJobInfo.create(dbName, tableName,
-                null, serverUri, principalID));
+                null));
         
         job.setInputFormatClass(HCatInputFormat.class);
         job.setOutputFormatClass(TextOutputFormat.class);

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadJson.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadJson.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadJson.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadJson.java Tue Mar 13 00:23:43 2012
@@ -94,7 +94,7 @@ public class ReadJson extends Configured
     conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
     Job job = new Job(conf, "ReadJson");
     HCatInputFormat.setInput(job, InputJobInfo.create(
-    		dbName, tableName, null, serverUri, principalID));
+    		dbName, tableName, null));
     // initialize HCatOutputFormat
     
     job.setInputFormatClass(HCatInputFormat.class);

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadRC.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadRC.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadRC.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadRC.java Tue Mar 13 00:23:43 2012
@@ -94,7 +94,7 @@ public class ReadRC extends Configured i
     conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
     Job job = new Job(conf, "ReadRC");
     HCatInputFormat.setInput(job, InputJobInfo.create(
-    		dbName, tableName, null, serverUri, principalID));
+    		dbName, tableName, null));
     // initialize HCatOutputFormat
     
     job.setInputFormatClass(HCatInputFormat.class);

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadText.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadText.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadText.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadText.java Tue Mar 13 00:23:43 2012
@@ -104,7 +104,7 @@ public class ReadText extends Configured
     conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
     Job job = new Job(conf, "ReadText");
     HCatInputFormat.setInput(job, InputJobInfo.create(
-    		dbName, tableName, null, serverUri, principalID));
+    		dbName, tableName, null));
     // initialize HCatOutputFormat
     
     job.setInputFormatClass(HCatInputFormat.class);

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadWrite.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadWrite.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadWrite.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadWrite.java Tue Mar 13 00:23:43 2012
@@ -86,7 +86,7 @@ public class ReadWrite extends Configure
             conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
         Job job = new Job(conf, "ReadWrite");
         HCatInputFormat.setInput(job, InputJobInfo.create(dbName,
-                inputTableName, null, serverUri, principalID));
+                inputTableName, null));
         // initialize HCatOutputFormat
 
         job.setInputFormatClass(HCatInputFormat.class);

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SimpleRead.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SimpleRead.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SimpleRead.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SimpleRead.java Tue Mar 13 00:23:43 2012
@@ -88,7 +88,7 @@ System.out.println(name);
     conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
     Job job = new Job(conf, "SimpleRead");
     HCatInputFormat.setInput(job, InputJobInfo.create(
-    		dbName, tableName, null, serverUri, principalID));
+    		dbName, tableName, null));
     // initialize HCatOutputFormat
     
     job.setInputFormatClass(HCatInputFormat.class);

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreComplex.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreComplex.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreComplex.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreComplex.java Tue Mar 13 00:23:43 2012
@@ -103,7 +103,7 @@ public class StoreComplex {
     // initialize HCatInputFormat
 
     HCatInputFormat.setInput(job, InputJobInfo.create(
-    		dbName, tableName, null, serverUri, principalID));
+    		dbName, tableName, null));
     // initialize HCatOutputFormat
     HCatOutputFormat.setOutput(job, OutputJobInfo.create(
             dbName, outputTableName, outputPartitionKvps));

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreDemo.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreDemo.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreDemo.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreDemo.java Tue Mar 13 00:23:43 2012
@@ -114,7 +114,7 @@ public class StoreDemo {
     Job job = new Job(conf, "storedemo");
     // initialize HCatInputFormat
     HCatInputFormat.setInput(job, InputJobInfo.create(
-    		dbName, tableName, null, serverUri, principalID));
+    		dbName, tableName, null));
     // initialize HCatOutputFormat
     HCatOutputFormat.setOutput(job, OutputJobInfo.create(
             dbName, outputTableName, outputPartitionKvps));

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreNumbers.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreNumbers.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreNumbers.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreNumbers.java Tue Mar 13 00:23:43 2012
@@ -178,7 +178,7 @@ public class StoreNumbers {
     
     // initialize HCatInputFormat
     HCatInputFormat.setInput(job, InputJobInfo.create(
-    		dbName, tableName, null, serverUri, principalID));
+    		dbName, tableName, null));
     // initialize HCatOutputFormat
     HCatOutputFormat.setOutput(job, OutputJobInfo.create(
             dbName, outputTableName, outputPartitionKvps));

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SumNumbers.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SumNumbers.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SumNumbers.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SumNumbers.java Tue Mar 13 00:23:43 2012
@@ -160,7 +160,7 @@ public class SumNumbers {
     conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
     Job job = new Job(conf, "sumnumbers");
     HCatInputFormat.setInput(job, InputJobInfo.create(
-    		dbName, tableName, null, serverUri, principalID));
+    		dbName, tableName, null));
     // initialize HCatOutputFormat
     
     job.setInputFormatClass(HCatInputFormat.class);

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/TypeDataCheck.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/TypeDataCheck.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/TypeDataCheck.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/TypeDataCheck.java Tue Mar 13 00:23:43 2012
@@ -150,7 +150,7 @@ public class TypeDataCheck implements To
 			Job job = new Job(conf, "typedatacheck");
 			// initialize HCatInputFormat
 			HCatInputFormat.setInput(job, InputJobInfo.create(
-					dbName, tableName, null, serverUri, principalID));
+					dbName, tableName, null));
 			HCatSchema s = HCatInputFormat.getTableSchema(job);
 			job.getConfiguration().set(SCHEMA_KEY, schemaStr);
 			job.getConfiguration().set(DELIM, outputdelim);

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteJson.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteJson.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteJson.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteJson.java Tue Mar 13 00:23:43 2012
@@ -93,7 +93,7 @@ public class WriteJson extends Configure
             conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
         Job job = new Job(conf, "WriteJson");
         HCatInputFormat.setInput(job, InputJobInfo.create(dbName,
-                inputTableName, null, serverUri, principalID));
+                inputTableName, null));
         // initialize HCatOutputFormat
 
         job.setInputFormatClass(HCatInputFormat.class);

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteRC.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteRC.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteRC.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteRC.java Tue Mar 13 00:23:43 2012
@@ -95,7 +95,7 @@ public class WriteRC extends Configured 
             conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
         Job job = new Job(conf, "WriteRC");
         HCatInputFormat.setInput(job, InputJobInfo.create(dbName,
-                inputTableName, null, serverUri, principalID));
+                inputTableName, null));
         // initialize HCatOutputFormat
 
         job.setInputFormatClass(HCatInputFormat.class);

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteText.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteText.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteText.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteText.java Tue Mar 13 00:23:43 2012
@@ -104,7 +104,7 @@ public class WriteText extends Configure
             conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
         Job job = new Job(conf, "WriteText");
         HCatInputFormat.setInput(job, InputJobInfo.create(dbName,
-                inputTableName, null, serverUri, principalID));
+                inputTableName, null));
         // initialize HCatOutputFormat
 
         job.setInputFormatClass(HCatInputFormat.class);

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteTextPartitioned.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteTextPartitioned.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteTextPartitioned.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteTextPartitioned.java Tue Mar 13 00:23:43 2012
@@ -95,7 +95,7 @@ public class WriteTextPartitioned extend
             conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
         Job job = new Job(conf, "WriteTextPartitioned");
         HCatInputFormat.setInput(job, InputJobInfo.create(dbName,
-                inputTableName, filter, serverUri, principalID));
+                inputTableName, filter));
         // initialize HCatOutputFormat
 
         job.setInputFormatClass(HCatInputFormat.class);

Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java (original)
+++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java Tue Mar 13 00:23:43 2012
@@ -293,7 +293,7 @@ public abstract class HCatMapReduceTest 
     job.setInputFormatClass(HCatInputFormat.class);
     job.setOutputFormatClass(TextOutputFormat.class);
 
-    InputJobInfo inputJobInfo = InputJobInfo.create(dbName,tableName,filter,thriftUri,null);
+    InputJobInfo inputJobInfo = InputJobInfo.create(dbName,tableName,filter);
     HCatInputFormat.setInput(job, inputJobInfo);
 
     job.setMapOutputKeyClass(BytesWritable.class);
@@ -325,7 +325,7 @@ public abstract class HCatMapReduceTest 
     job.setInputFormatClass(HCatInputFormat.class);
     job.setOutputFormatClass(TextOutputFormat.class);
 
-    InputJobInfo inputJobInfo = InputJobInfo.create(dbName,tableName,null,thriftUri,null);
+    InputJobInfo inputJobInfo = InputJobInfo.create(dbName,tableName,null);
     HCatInputFormat.setInput(job, inputJobInfo);
 
     return HCatInputFormat.getTableSchema(job);

Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java Tue Mar 13 00:23:43 2012
@@ -589,7 +589,7 @@ public class TestHBaseBulkOutputFormat e
         job.setMapperClass(MapReadAbortedTransaction.class);
         job.setInputFormatClass(HCatInputFormat.class);
         InputJobInfo inputJobInfo = InputJobInfo.create(databaseName,
-                tableName, null, null, null);
+                tableName, null);
         HCatInputFormat.setInput(job, inputJobInfo);
         job.setOutputFormatClass(TextOutputFormat.class);
         TextOutputFormat.setOutputPath(job, outputDir);

Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java Tue Mar 13 00:23:43 2012
@@ -354,7 +354,7 @@ public class TestHBaseDirectOutputFormat
         job.setMapperClass(MapReadAbortedTransaction.class);
         job.setInputFormatClass(HCatInputFormat.class);
         InputJobInfo inputJobInfo = InputJobInfo.create(databaseName,
-                tableName, null, null, null);
+                tableName, null);
         HCatInputFormat.setInput(job, inputJobInfo);
         job.setOutputFormatClass(TextOutputFormat.class);
         TextOutputFormat.setOutputPath(job, outputDir);

Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java Tue Mar 13 00:23:43 2012
@@ -212,7 +212,7 @@ public class TestHBaseInputFormat extend
 
         job.setInputFormatClass(HCatInputFormat.class);
         InputJobInfo inputJobInfo = InputJobInfo.create(databaseName, tableName,
-                null, null, null);
+                null);
         HCatInputFormat.setInput(job, inputJobInfo);
         job.setOutputFormatClass(TextOutputFormat.class);
         TextOutputFormat.setOutputPath(job, outputDir);
@@ -275,8 +275,7 @@ public class TestHBaseInputFormat extend
         job.setMapperClass(MapReadProjHTable.class);
         job.setInputFormatClass(HCatInputFormat.class);
         InputJobInfo inputJobInfo = InputJobInfo.create(
-                MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null, null,
-                null);
+                MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null);
         HCatInputFormat.setOutputSchema(job, getProjectionSchema());
         HCatInputFormat.setInput(job, inputJobInfo);
         job.setOutputFormatClass(TextOutputFormat.class);
@@ -336,8 +335,7 @@ public class TestHBaseInputFormat extend
         job.setInputFormat(HBaseInputFormat.class);
 
         InputJobInfo inputJobInfo = InputJobInfo.create(
-                MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null, null,
-                null);
+                MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null);
         //Configure projection schema
         job.set(HCatConstants.HCAT_KEY_OUTPUT_SCHEMA, HCatUtil.serialize(getProjectionSchema()));
         Job newJob = new Job(job);
@@ -409,8 +407,7 @@ public class TestHBaseInputFormat extend
         MapReadHTable.resetCounters();
         job.setInputFormatClass(HCatInputFormat.class);
         InputJobInfo inputJobInfo = InputJobInfo.create(
-                MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null, null,
-                null);
+                MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null);
         HCatInputFormat.setInput(job, inputJobInfo);
         job.setOutputFormatClass(TextOutputFormat.class);
         TextOutputFormat.setOutputPath(job, outputDir);
@@ -472,8 +469,7 @@ public class TestHBaseInputFormat extend
         job.setMapperClass(MapReadHTableRunningAbort.class);
         job.setInputFormatClass(HCatInputFormat.class);
         InputJobInfo inputJobInfo = InputJobInfo.create(
-                MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null, null,
-                null);
+                MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null);
         HCatInputFormat.setInput(job, inputJobInfo);
         job.setOutputFormatClass(TextOutputFormat.class);
         TextOutputFormat.setOutputPath(job, outputDir);

Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java?rev=1299933&r1=1299932&r2=1299933&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java Tue Mar 13 00:23:43 2012
@@ -87,7 +87,7 @@ public class TestSnapshots extends Skele
         cmdResponse = hcatDriver.run(tableQuery);
         assertEquals(0, cmdResponse.getResponseCode());
 
-        InputJobInfo inputInfo = InputJobInfo.create(databaseName, tableName, null, null, null);
+        InputJobInfo inputInfo = InputJobInfo.create(databaseName, tableName, null);
         Configuration conf = new Configuration(hcatConf);
         conf.set(HCatConstants.HCAT_KEY_HIVE_CONF,
                 HCatUtil.serialize(getHiveConf().getAllProperties()));
@@ -121,7 +121,7 @@ public class TestSnapshots extends Skele
         revMap.clear();
         revMap.put("cf1", 3L);
         hbaseSnapshot = new TableSnapshot(fullyQualTableName, revMap, -1);
-        inputInfo = InputJobInfo.create(databaseName, tableName, null, null, null);
+        inputInfo = InputJobInfo.create(databaseName, tableName, null);
         inputInfo.getProperties().setProperty(HBaseConstants.PROPERTY_TABLE_SNAPSHOT_KEY, "dummysnapshot");
         InitializeInput.setInput(job, inputInfo);
         modifiedInputInfo = job.getConfiguration().get(HCatConstants.HCAT_KEY_JOB_INFO);