You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/09/19 06:43:00 UTC

svn commit: r1626122 - in /hive/branches/cbo: ./ data/files/ hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/ hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/ itests/hive-unit/src/test/java/org/apache...

Author: gunther
Date: Fri Sep 19 04:42:59 2014
New Revision: 1626122

URL: http://svn.apache.org/r1626122
Log:
Merge latest trunk into cbo branch. (Gunther Hagleitner)

Added:
    hive/branches/cbo/data/files/data_with_escape.txt
      - copied unchanged from r1626121, hive/trunk/data/files/data_with_escape.txt
    hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatStorerMulti.java
      - copied unchanged from r1626121, hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatStorerMulti.java
    hive/branches/cbo/ql/src/test/queries/clientnegative/update_no_such_table.q
      - copied unchanged from r1626121, hive/trunk/ql/src/test/queries/clientnegative/update_no_such_table.q
    hive/branches/cbo/ql/src/test/queries/clientpositive/escape3.q
      - copied unchanged from r1626121, hive/trunk/ql/src/test/queries/clientpositive/escape3.q
    hive/branches/cbo/ql/src/test/results/clientnegative/update_no_such_table.q.out
      - copied unchanged from r1626121, hive/trunk/ql/src/test/results/clientnegative/update_no_such_table.q.out
    hive/branches/cbo/ql/src/test/results/clientpositive/escape3.q.out
      - copied unchanged from r1626121, hive/trunk/ql/src/test/results/clientpositive/escape3.q.out
Removed:
    hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatPigStorer.java
Modified:
    hive/branches/cbo/   (props changed)
    hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/MockLoader.java
    hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
    hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
    hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
    hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java
    hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
    hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java
    hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatStorer.java
    hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestPigHCatUtil.java
    hive/branches/cbo/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
    hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
    hive/branches/cbo/ql/src/test/queries/clientpositive/drop_index.q
    hive/branches/cbo/ql/src/test/results/clientpositive/drop_index.q.out
    hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveChar.java
    hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java
    hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyString.java
    hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
    hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveCharObjectInspector.java
    hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java
    hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
    hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveCharObjectInspector.java
    hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java

Propchange: hive/branches/cbo/
------------------------------------------------------------------------------
  Merged /hive/trunk:r1626047-1626121

Modified: hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/MockLoader.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/MockLoader.java?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/MockLoader.java (original)
+++ hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/MockLoader.java Fri Sep 19 04:42:59 2014
@@ -36,6 +36,7 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
 import org.apache.pig.LoadFunc;
 import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit;
 import org.apache.pig.data.Tuple;

Modified: hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java (original)
+++ hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java Fri Sep 19 04:42:59 2014
@@ -23,9 +23,8 @@ import java.io.IOException;
 import java.util.HashMap;
 import java.util.Iterator;
 
-import junit.framework.TestCase;
-
 import org.apache.commons.io.FileUtils;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hive.cli.CliSessionState;
@@ -42,6 +41,7 @@ import org.apache.hadoop.mapreduce.Recor
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
+
 import org.apache.hive.hcatalog.HcatTestUtils;
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.common.HCatContext;
@@ -51,12 +51,16 @@ import org.apache.hive.hcatalog.mapreduc
 import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
 import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
 import org.apache.hive.hcatalog.mapreduce.HCatMapRedUtil;
+
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.data.Tuple;
 
-public class TestE2EScenarios extends TestCase {
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
 
+public class TestE2EScenarios {
   private static final String TEST_DATA_DIR = System.getProperty("java.io.tmpdir") + File.separator
       + TestHCatLoader.class.getCanonicalName() + "-" + System.currentTimeMillis();
   private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse";
@@ -69,9 +73,8 @@ public class TestE2EScenarios extends Te
     return "orc";
   }
 
-  @Override
-  protected void setUp() throws Exception {
-
+  @Before
+  public void setUp() throws Exception {
     File f = new File(TEST_WAREHOUSE_DIR);
     if (f.exists()) {
       FileUtil.fullyDelete(f);
@@ -90,8 +93,8 @@ public class TestE2EScenarios extends Te
 
   }
 
-  @Override
-  protected void tearDown() throws Exception {
+  @After
+  public void tearDown() throws Exception {
     try {
       dropTable("inpy");
       dropTable("rc5318");
@@ -146,16 +149,13 @@ public class TestE2EScenarios extends Te
     System.err.println("===");
   }
 
-
   private void copyTable(String in, String out) throws IOException, InterruptedException {
     Job ijob = new Job();
     Job ojob = new Job();
     HCatInputFormat inpy = new HCatInputFormat();
     inpy.setInput(ijob , null, in);
     HCatOutputFormat oupy = new HCatOutputFormat();
-    oupy.setOutput(ojob,
-      OutputJobInfo.create(null, out, new HashMap<String,String>()
-      ));
+    oupy.setOutput(ojob, OutputJobInfo.create(null, out, new HashMap<String,String>()));
 
     // Test HCatContext
 
@@ -207,6 +207,7 @@ public class TestE2EScenarios extends Te
   }
 
 
+  @Test
   public void testReadOrcAndRCFromPig() throws Exception {
     String tableSchema = "ti tinyint, si smallint,i int, bi bigint, f float, d double, b boolean";
 
@@ -224,15 +225,14 @@ public class TestE2EScenarios extends Te
     driverRun("LOAD DATA LOCAL INPATH '"+TEXTFILE_LOCN+"' OVERWRITE INTO TABLE inpy");
 
     // write it out from hive to an rcfile table, and to an orc table
-//        driverRun("insert overwrite table rc5318 select * from inpy");
+    //driverRun("insert overwrite table rc5318 select * from inpy");
     copyTable("inpy","rc5318");
-//        driverRun("insert overwrite table orc5318 select * from inpy");
+    //driverRun("insert overwrite table orc5318 select * from inpy");
     copyTable("inpy","orc5318");
 
     pigDump("inpy");
     pigDump("rc5318");
     pigDump("orc5318");
-
   }
 
 }

Modified: hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java (original)
+++ hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java Fri Sep 19 04:42:59 2014
@@ -34,6 +34,7 @@ import java.util.Map;
 import java.util.Properties;
 
 import org.apache.commons.io.FileUtils;
+
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
@@ -45,11 +46,13 @@ import org.apache.hadoop.hive.ql.process
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.mapreduce.Job;
+
 import org.apache.hive.hcatalog.HcatTestUtils;
 import org.apache.hive.hcatalog.common.HCatUtil;
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.data.Pair;
 import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
+
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.ResourceStatistics;
@@ -60,10 +63,13 @@ import org.apache.pig.impl.logicalLayer.
 import org.apache.pig.PigRunner;
 import org.apache.pig.tools.pigstats.OutputStats;
 import org.apache.pig.tools.pigstats.PigStats;
+
 import org.joda.time.DateTime;
+
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -72,7 +78,7 @@ import static org.junit.Assert.*;
 public class TestHCatLoader {
   private static final Logger LOG = LoggerFactory.getLogger(TestHCatLoader.class);
   private static final String TEST_DATA_DIR = HCatUtil.makePathASafeFileName(System.getProperty("java.io.tmpdir") +
-          File.separator + TestHCatLoader.class.getCanonicalName() + "-" + System.currentTimeMillis());
+      File.separator + TestHCatLoader.class.getCanonicalName() + "-" + System.currentTimeMillis());
   private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse";
   private static final String BASIC_FILE_NAME = TEST_DATA_DIR + "/basic.input.data";
   private static final String COMPLEX_FILE_NAME = TEST_DATA_DIR + "/complex.input.data";
@@ -93,6 +99,7 @@ public class TestHCatLoader {
   private void dropTable(String tablename) throws IOException, CommandNeedRetryException {
     dropTable(tablename, driver);
   }
+
   static void dropTable(String tablename, Driver driver) throws IOException, CommandNeedRetryException {
     driver.run("drop table if exists " + tablename);
   }
@@ -100,7 +107,8 @@ public class TestHCatLoader {
   private void createTable(String tablename, String schema, String partitionedBy) throws IOException, CommandNeedRetryException {
     createTable(tablename, schema, partitionedBy, driver, storageFormat());
   }
-  static void createTable(String tablename, String schema, String partitionedBy, Driver driver, String storageFormat) 
+
+  static void createTable(String tablename, String schema, String partitionedBy, Driver driver, String storageFormat)
       throws IOException, CommandNeedRetryException {
     String createTable;
     createTable = "create table " + tablename + "(" + schema + ") ";
@@ -114,6 +122,7 @@ public class TestHCatLoader {
   private void createTable(String tablename, String schema) throws IOException, CommandNeedRetryException {
     createTable(tablename, schema, null);
   }
+
   /**
    * Execute Hive CLI statement
    * @param cmd arbitrary statement to execute
@@ -125,20 +134,20 @@ public class TestHCatLoader {
       throw new IOException("Failed to execute \"" + cmd + "\". Driver returned " + cpr.getResponseCode() + " Error: " + cpr.getErrorMessage());
     }
   }
+
   private static void checkProjection(FieldSchema fs, String expectedName, byte expectedPigType) {
     assertEquals(fs.alias, expectedName);
     assertEquals("Expected " + DataType.findTypeName(expectedPigType) + "; got " +
       DataType.findTypeName(fs.type), expectedPigType, fs.type);
   }
-  
+
   @Before
   public void setup() throws Exception {
-
     File f = new File(TEST_WAREHOUSE_DIR);
     if (f.exists()) {
       FileUtil.fullyDelete(f);
     }
-    if(!(new File(TEST_WAREHOUSE_DIR).mkdirs())) {
+    if (!(new File(TEST_WAREHOUSE_DIR).mkdirs())) {
       throw new RuntimeException("Could not create " + TEST_WAREHOUSE_DIR);
     }
 
@@ -192,7 +201,7 @@ public class TestHCatLoader {
     server.registerQuery("B = foreach A generate a,b;", ++i);
     server.registerQuery("B2 = filter B by a < 2;", ++i);
     server.registerQuery("store B2 into '" + PARTITIONED_TABLE + "' using org.apache.hive.hcatalog.pig.HCatStorer('bkt=0');", ++i);
-    
+
     server.registerQuery("C = foreach A generate a,b;", ++i);
     server.registerQuery("C2 = filter C by a >= 2;", ++i);
     server.registerQuery("store C2 into '" + PARTITIONED_TABLE + "' using org.apache.hive.hcatalog.pig.HCatStorer('bkt=1');", ++i);
@@ -470,7 +479,7 @@ public class TestHCatLoader {
       {
         fs.delete(new Path(PIGOUTPUT_DIR), true);
       }
-    }finally {
+    } finally {
       new File(PIG_FILE).delete();
     }
   }
@@ -534,7 +543,7 @@ public class TestHCatLoader {
   }
 
   /**
-   * basic tests that cover each scalar type 
+   * basic tests that cover each scalar type
    * https://issues.apache.org/jira/browse/HIVE-5814
    */
   private static final class AllTypesTable {

Modified: hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java (original)
+++ hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java Fri Sep 19 04:42:59 2014
@@ -33,6 +33,7 @@ import org.apache.hadoop.hive.ql.Command
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
+
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.backend.executionengine.ExecException;
@@ -44,8 +45,10 @@ import org.apache.pig.data.TupleFactory;
 import org.apache.pig.impl.logicalLayer.FrontendException;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
+
 import org.junit.BeforeClass;
 import org.junit.Test;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -225,7 +228,7 @@ public class TestHCatLoaderComplexSchema
       dropTable(tablename);
     }
   }
-  
+
   private void compareTuples(Tuple t1, Tuple t2) throws ExecException {
     Assert.assertEquals("Tuple Sizes don't match", t1.size(), t2.size());
     for (int i = 0; i < t1.size(); i++) {
@@ -237,7 +240,7 @@ public class TestHCatLoaderComplexSchema
       Assert.assertEquals(msg, noOrder(f1.toString()), noOrder(f2.toString()));
     }
   }
-  
+
   private String noOrder(String s) {
     char[] chars = s.toCharArray();
     Arrays.sort(chars);

Modified: hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java (original)
+++ hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java Fri Sep 19 04:42:59 2014
@@ -31,8 +31,10 @@ import java.util.Properties;
 
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+
 import org.apache.hive.hcatalog.HcatTestUtils;
 import org.apache.hive.hcatalog.mapreduce.HCatBaseTest;
+
 import org.apache.pig.EvalFunc;
 import org.apache.pig.ExecType;
 import org.apache.pig.PigException;
@@ -41,10 +43,13 @@ import org.apache.pig.data.DataByteArray
 import org.apache.pig.data.Tuple;
 import org.apache.pig.impl.logicalLayer.FrontendException;
 import org.apache.pig.impl.util.LogUtils;
+
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
+
 import org.junit.Assert;
 import org.junit.Test;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -63,6 +68,7 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "tinyint", "int", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       Integer.toString(300));
   }
+
   @Test
   public void testWriteSmallint() throws Exception {
     pigValueRangeTest("junitTypeTest1", "smallint", "int", null, Integer.toString(Short.MIN_VALUE),
@@ -72,6 +78,7 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "smallint", "int", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       Integer.toString(Short.MAX_VALUE + 1));
   }
+
   @Test
   public void testWriteChar() throws Exception {
     pigValueRangeTest("junitTypeTest1", "char(5)", "chararray", null, "xxx", "xxx  ");
@@ -81,6 +88,7 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "char(5)", "chararray", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       "too_long2");
   }
+
   @Test
   public void testWriteVarchar() throws Exception {
     pigValueRangeTest("junitTypeTest1", "varchar(5)", "chararray", null, "xxx", "xxx");
@@ -90,6 +98,7 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "varchar(5)", "chararray", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       "too_long2");
   }
+
   @Test
   public void testWriteDecimalXY() throws Exception {
     pigValueRangeTest("junitTypeTest1", "decimal(5,2)", "bigdecimal", null, BigDecimal.valueOf(1.2).toString(),
@@ -100,6 +109,7 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "decimal(5,2)", "bigdecimal", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       BigDecimal.valueOf(500.123).toString());
   }
+
   @Test
   public void testWriteDecimalX() throws Exception {
     //interestingly decimal(2) means decimal(2,0)
@@ -110,6 +120,7 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "decimal(2)", "bigdecimal", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       BigDecimal.valueOf(50.123).toString());
   }
+
   @Test
   public void testWriteDecimal() throws Exception {
     //decimal means decimal(10,0)
@@ -120,9 +131,10 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "decimal", "bigdecimal", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       BigDecimal.valueOf(12345678900L).toString());
   }
+
   /**
    * because we want to ignore TZ which is included in toString()
-   * include time to make sure it's 0 
+   * include time to make sure it's 0
    */
   private static final String FORMAT_4_DATE = "yyyy-MM-dd HH:mm:ss";
   @Test
@@ -142,6 +154,7 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest6", "date", "datetime", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       d.plusMinutes(1).toString(), FORMAT_4_DATE);//date out of range due to time!=0
   }
+
   @Test
   public void testWriteDate3() throws Exception {
     DateTime d = new DateTime(1991,10,11,23,10,DateTimeZone.forOffsetHours(-11));
@@ -154,6 +167,7 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest6", "date", "datetime", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       d.plusMinutes(1).toString(), FORMAT_4_DATE);
   }
+
   @Test
   public void testWriteDate2() throws Exception {
     DateTime d = new DateTime(1991,11,12,0,0, DateTimeZone.forID("US/Eastern"));
@@ -168,46 +182,48 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "date", "datetime", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       d.plusMinutes(1).toString(), FORMAT_4_DATE);
   }
+
   /**
-   * Note that the value that comes back from Hive will have local TZ on it.  Using local is 
+   * Note that the value that comes back from Hive will have local TZ on it.  Using local is
    * arbitrary but DateTime needs TZ (or will assume default) and Hive does not have TZ.
    * So if you start with Pig value in TZ=x and write to Hive, when you read it back the TZ may
    * be different.  The millis value should match, of course.
-   * 
+   *
    * @throws Exception
    */
   @Test
   public void testWriteTimestamp() throws Exception {
     DateTime d = new DateTime(1991,10,11,14,23,30, 10);//uses default TZ
-    pigValueRangeTest("junitTypeTest1", "timestamp", "datetime", null, d.toString(), 
+    pigValueRangeTest("junitTypeTest1", "timestamp", "datetime", null, d.toString(),
       d.toDateTime(DateTimeZone.getDefault()).toString());
     d = d.plusHours(2);
     pigValueRangeTest("junitTypeTest2", "timestamp", "datetime", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Null,
       d.toString(), d.toDateTime(DateTimeZone.getDefault()).toString());
     d = d.toDateTime(DateTimeZone.UTC);
-    pigValueRangeTest("junitTypeTest3", "timestamp", "datetime", null, d.toString(), 
+    pigValueRangeTest("junitTypeTest3", "timestamp", "datetime", null, d.toString(),
       d.toDateTime(DateTimeZone.getDefault()).toString());
 
     d = new DateTime(1991,10,11,23,24,25, 26);
-    pigValueRangeTest("junitTypeTest1", "timestamp", "datetime", null, d.toString(), 
+    pigValueRangeTest("junitTypeTest1", "timestamp", "datetime", null, d.toString(),
       d.toDateTime(DateTimeZone.getDefault()).toString());
     d = d.toDateTime(DateTimeZone.UTC);
-    pigValueRangeTest("junitTypeTest3", "timestamp", "datetime", null, d.toString(), 
+    pigValueRangeTest("junitTypeTest3", "timestamp", "datetime", null, d.toString(),
       d.toDateTime(DateTimeZone.getDefault()).toString());
   }
   //End: tests that check values from Pig that are out of range for target column
 
-
   private void pigValueRangeTestOverflow(String tblName, String hiveType, String pigType,
     HCatBaseStorer.OOR_VALUE_OPT_VALUES goal, String inputValue, String format) throws Exception {
     pigValueRangeTest(tblName, hiveType, pigType, goal, inputValue, null, format);
   }
+
   private void pigValueRangeTestOverflow(String tblName, String hiveType, String pigType,
                                  HCatBaseStorer.OOR_VALUE_OPT_VALUES goal, String inputValue) throws Exception {
     pigValueRangeTest(tblName, hiveType, pigType, goal, inputValue, null, null);
   }
+
   private void pigValueRangeTest(String tblName, String hiveType, String pigType,
-                                 HCatBaseStorer.OOR_VALUE_OPT_VALUES goal, String inputValue, 
+                                 HCatBaseStorer.OOR_VALUE_OPT_VALUES goal, String inputValue,
                                  String expectedValue) throws Exception {
     pigValueRangeTest(tblName, hiveType, pigType, goal, inputValue, expectedValue, null);
   }
@@ -218,6 +234,7 @@ public class TestHCatStorer extends HCat
   String getStorageFormat() {
     return "RCFILE";
   }
+
   /**
    * This is used to test how Pig values of various data types which are out of range for Hive target
    * column are handled.  Currently the options are to raise an error or write NULL.
@@ -236,7 +253,7 @@ public class TestHCatStorer extends HCat
    * @param format date format to use for comparison of values since default DateTime.toString()
    *               includes TZ which is meaningless for Hive DATE type
    */
-  private void pigValueRangeTest(String tblName, String hiveType, String pigType, 
+  private void pigValueRangeTest(String tblName, String hiveType, String pigType,
                                  HCatBaseStorer.OOR_VALUE_OPT_VALUES goal, String inputValue, String expectedValue, String format)
     throws Exception {
     TestHCatLoader.dropTable(tblName, driver);
@@ -309,6 +326,7 @@ public class TestHCatStorer extends HCat
     Unfortunately Timestamp.toString() adjusts the value for local TZ and 't' is a String
     thus the timestamp in 't' doesn't match rawData*/
   }
+
   /**
    * Create a data file with datatypes added in 0.13.  Read it with Pig and use
    * Pig + HCatStorer to write to a Hive table.  Then read it using Pig and Hive
@@ -365,6 +383,7 @@ public class TestHCatStorer extends HCat
     }
     Assert.assertEquals("Expected " + NUM_ROWS + " rows; got " + numRowsRead + " file=" + INPUT_FILE_NAME, NUM_ROWS, numRowsRead);
   }
+
   static void dumpFile(String fileName) throws Exception {
     File file = new File(fileName);
     BufferedReader reader = new BufferedReader(new FileReader(file));
@@ -375,6 +394,7 @@ public class TestHCatStorer extends HCat
     }
     reader.close();
   }
+
   @Test
   public void testPartColsInData() throws IOException, CommandNeedRetryException {
 

Modified: hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java (original)
+++ hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java Fri Sep 19 04:42:59 2014
@@ -25,20 +25,26 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.session.SessionState;
+
 import org.apache.hive.hcatalog.common.HCatUtil;
 import org.apache.hive.hcatalog.data.Pair;
+
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 
-public class TestHCatStorerMulti extends TestCase {
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+public class TestHCatStorerMulti {
   public static final String TEST_DATA_DIR = HCatUtil.makePathASafeFileName(
           System.getProperty("user.dir") + "/build/test/data/" +
                   TestHCatStorerMulti.class.getCanonicalName() + "-" + System.currentTimeMillis());
@@ -77,8 +83,8 @@ public class TestHCatStorerMulti extends
     createTable(tablename, schema, null);
   }
 
-  @Override
-  protected void setUp() throws Exception {
+  @Before
+  public void setUp() throws Exception {
     if (driver == null) {
       HiveConf hiveConf = new HiveConf(this.getClass());
       hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
@@ -92,14 +98,13 @@ public class TestHCatStorerMulti extends
     cleanup();
   }
 
-  @Override
-  protected void tearDown() throws Exception {
+  @After
+  public void tearDown() throws Exception {
     cleanup();
   }
 
+  @Test
   public void testStoreBasicTable() throws Exception {
-
-
     createTable(BASIC_TABLE, "a int, b string");
 
     populateBasicFile();
@@ -117,6 +122,7 @@ public class TestHCatStorerMulti extends
     assertEquals(basicInputData.size(), unpartitionedTableValuesReadFromHiveDriver.size());
   }
 
+  @Test
   public void testStorePartitionedTable() throws Exception {
     createTable(PARTITIONED_TABLE, "a int, b string", "bkt string");
 
@@ -139,9 +145,8 @@ public class TestHCatStorerMulti extends
     assertEquals(basicInputData.size(), partitionedTableValuesReadFromHiveDriver.size());
   }
 
+  @Test
   public void testStoreTableMulti() throws Exception {
-
-
     createTable(BASIC_TABLE, "a int, b string");
     createTable(PARTITIONED_TABLE, "a int, b string", "bkt string");
 

Modified: hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java (original)
+++ hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java Fri Sep 19 04:42:59 2014
@@ -25,10 +25,13 @@ import java.util.Iterator;
 import java.util.UUID;
 
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+
 import org.apache.hive.hcatalog.HcatTestUtils;
 import org.apache.hive.hcatalog.mapreduce.HCatBaseTest;
+
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
+
 import org.junit.Assert;
 import org.junit.Test;
 

Modified: hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatStorer.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatStorer.java?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatStorer.java (original)
+++ hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatStorer.java Fri Sep 19 04:42:59 2014
@@ -18,11 +18,15 @@
  */
 package org.apache.hive.hcatalog.pig;
 
-public class TestOrcHCatStorer extends TestHCatStorerMulti {
+import java.io.IOException;
 
-  @Override
-  protected String storageFormat() {
-    return "orc";
+import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+
+import org.junit.Ignore;
+import org.junit.Test;
+
+public class TestOrcHCatStorer extends TestHCatStorer {
+  @Override String getStorageFormat() {
+    return "ORC";
   }
 }
-

Modified: hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestPigHCatUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestPigHCatUtil.java?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestPigHCatUtil.java (original)
+++ hive/branches/cbo/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestPigHCatUtil.java Fri Sep 19 04:42:59 2014
@@ -20,14 +20,18 @@
 package org.apache.hive.hcatalog.pig;
 
 import com.google.common.collect.Lists;
+
 import junit.framework.Assert;
+
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
+
 import org.apache.pig.ResourceSchema;
 import org.apache.pig.ResourceSchema.ResourceFieldSchema;
 import org.apache.pig.data.DataType;
 import org.apache.pig.impl.util.UDFContext;
+
 import org.junit.Test;
 
 public class TestPigHCatUtil {

Modified: hive/branches/cbo/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java (original)
+++ hive/branches/cbo/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java Fri Sep 19 04:42:59 2014
@@ -24,6 +24,7 @@ import java.net.URISyntaxException;
 import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hive.shims.HadoopShimsSecure;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.util.StringUtils;
@@ -314,9 +315,9 @@ public class TestTempletonUtils {
 
   @Test
   public void testFindContainingJar() throws Exception {
-    String result = TempletonUtils.findContainingJar(ShimLoader.class, ".*hive-shims.*");
+    String result = TempletonUtils.findContainingJar(Configuration.class, ".*hadoop.*\\.jar.*");
     Assert.assertNotNull(result);
-    result = TempletonUtils.findContainingJar(HadoopShimsSecure.class, ".*hive-shims.*");
+    result = TempletonUtils.findContainingJar(FileSystem.class, ".*hadoop.*\\.jar.*");
     Assert.assertNotNull(result);
     result = TempletonUtils.findContainingJar(HadoopShimsSecure.class, ".*unknownjar.*");
     Assert.assertNull(result);

Modified: hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java (original)
+++ hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java Fri Sep 19 04:42:59 2014
@@ -735,7 +735,7 @@ public class TestJdbcDriver2 {
     assertTrue(res.next());
     // skip the last (partitioning) column since it is always non-null
     for (int i = 1; i < meta.getColumnCount(); i++) {
-      assertNull(res.getObject(i));
+      assertNull("Column " + i + " should be null", res.getObject(i));
     }
     // getXXX returns 0 for numeric types, false for boolean and null for other
     assertEquals(0, res.getInt(1));

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Fri Sep 19 04:42:59 2014
@@ -1145,7 +1145,10 @@ public class DDLSemanticAnalyzer extends
       }
     }
 
-    inputs.add(new ReadEntity(getTable(tableName)));
+    Table tbl = getTable(tableName, false);
+    if (tbl != null) {
+      inputs.add(new ReadEntity(getTable(tableName)));
+    }
 
     DropIndexDesc dropIdxDesc = new DropIndexDesc(indexName, tableName);
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java Fri Sep 19 04:42:59 2014
@@ -129,7 +129,9 @@ public class UpdateDeleteSemanticAnalyze
     try {
       mTable = db.getTable(tableName[0], tableName[1]);
     } catch (HiveException e) {
-      throw new SemanticException(ErrorMsg.UPDATEDELETE_PARSE_ERROR.getMsg(), e);
+      LOG.error("Failed to find table " + getDotName(tableName) + " got exception " +
+          e.getMessage());
+      throw new SemanticException(ErrorMsg.INVALID_TABLE, getDotName(tableName));
     }
     List<FieldSchema> partCols = mTable.getPartCols();
 

Modified: hive/branches/cbo/ql/src/test/queries/clientpositive/drop_index.q
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/queries/clientpositive/drop_index.q?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/queries/clientpositive/drop_index.q (original)
+++ hive/branches/cbo/ql/src/test/queries/clientpositive/drop_index.q Fri Sep 19 04:42:59 2014
@@ -1,2 +1,3 @@
 SET hive.exec.drop.ignorenonexistent=false;
 DROP INDEX IF EXISTS UnknownIndex ON src;
+DROP INDEX IF EXISTS UnknownIndex ON UnknownTable;

Modified: hive/branches/cbo/ql/src/test/results/clientpositive/drop_index.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/drop_index.q.out?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/drop_index.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/drop_index.q.out Fri Sep 19 04:42:59 2014
@@ -4,3 +4,7 @@ PREHOOK: Input: default@src
 POSTHOOK: query: DROP INDEX IF EXISTS UnknownIndex ON src
 POSTHOOK: type: DROPINDEX
 POSTHOOK: Input: default@src
+PREHOOK: query: DROP INDEX IF EXISTS UnknownIndex ON UnknownTable
+PREHOOK: type: DROPINDEX
+POSTHOOK: query: DROP INDEX IF EXISTS UnknownIndex ON UnknownTable
+POSTHOOK: type: DROPINDEX

Modified: hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveChar.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveChar.java?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveChar.java (original)
+++ hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveChar.java Fri Sep 19 04:42:59 2014
@@ -55,14 +55,24 @@ public class LazyHiveChar extends
 
   @Override
   public void init(ByteArrayRef bytes, int start, int length) {
-    String byteData = null;
-    try {
-      byteData = Text.decode(bytes.getData(), start, length);
-      data.set(byteData, maxLength);
+    if (oi.isEscaped()) {
+      Text textData =  data.getTextValue();
+      // This is doing a lot of copying here, this could be improved by enforcing length
+      // at the same time as escaping rather than as separate steps.
+      LazyUtils.copyAndEscapeStringDataToText(bytes.getData(), start, length,
+          oi.getEscapeChar(),textData);
+      data.set(textData.toString(), maxLength);
       isNull = false;
-    } catch (CharacterCodingException e) {
-      isNull = true;
-      LOG.debug("Data not in the HiveChar data type range so converted to null.", e);
+    } else {
+      String byteData = null;
+      try {
+        byteData = Text.decode(bytes.getData(), start, length);
+        data.set(byteData, maxLength);
+        isNull = false;
+      } catch (CharacterCodingException e) {
+        isNull = true;
+        LOG.debug("Data not in the HiveChar data type range so converted to null.", e);
+      }
     }
   }
 

Modified: hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java (original)
+++ hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java Fri Sep 19 04:42:59 2014
@@ -55,14 +55,24 @@ public class LazyHiveVarchar extends
 
   @Override
   public void init(ByteArrayRef bytes, int start, int length) {
-    String byteData = null;
-    try {
-      byteData = Text.decode(bytes.getData(), start, length);
-      data.set(byteData, maxLength);
+    if (oi.isEscaped()) {
+      Text textData =  data.getTextValue();
+      // This is doing a lot of copying here, this could be improved by enforcing length
+      // at the same time as escaping rather than as separate steps.
+      LazyUtils.copyAndEscapeStringDataToText(bytes.getData(), start, length,
+          oi.getEscapeChar(),textData);
+      data.set(textData.toString(), maxLength);
       isNull = false;
-    } catch (CharacterCodingException e) {
-      isNull = true;
-      LOG.debug("Data not in the HiveVarchar data type range so converted to null.", e);
+    } else {
+      try {
+        String byteData = null;
+        byteData = Text.decode(bytes.getData(), start, length);
+        data.set(byteData, maxLength);
+        isNull = false;
+      } catch (CharacterCodingException e) {
+        isNull = true;
+        LOG.debug("Data not in the HiveVarchar data type range so converted to null.", e);
+      }
     }
   }
 

Modified: hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyString.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyString.java?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyString.java (original)
+++ hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyString.java Fri Sep 19 04:42:59 2014
@@ -40,40 +40,7 @@ public class LazyString extends LazyPrim
     if (oi.isEscaped()) {
       byte escapeChar = oi.getEscapeChar();
       byte[] inputBytes = bytes.getData();
-
-      // First calculate the length of the output string
-      int outputLength = 0;
-      for (int i = 0; i < length; i++) {
-        if (inputBytes[start + i] != escapeChar) {
-          outputLength++;
-        } else {
-          outputLength++;
-          i++;
-        }
-      }
-
-      // Copy the data over, so that the internal state of Text will be set to
-      // the required outputLength.
-      data.set(bytes.getData(), start, outputLength);
-
-      // We need to copy the data byte by byte only in case the
-      // "outputLength < length" (which means there is at least one escaped
-      // byte.
-      if (outputLength < length) {
-        int k = 0;
-        byte[] outputBytes = data.getBytes();
-        for (int i = 0; i < length; i++) {
-          byte b = inputBytes[start + i];
-          if (b != escapeChar || i == length - 1) {
-            outputBytes[k++] = b;
-          } else {
-            // get the next byte
-            i++;
-            outputBytes[k++] = inputBytes[start + i];
-          }
-        }
-        assert (k == outputLength);
-      }
+      LazyUtils.copyAndEscapeStringDataToText(inputBytes, start, length, escapeChar, data);
     } else {
       // if the data is not escaped, simply copy the data.
       data.set(bytes.getData(), start, length);

Modified: hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java (original)
+++ hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java Fri Sep 19 04:42:59 2014
@@ -412,6 +412,44 @@ public final class LazyUtils {
     }
   }
 
+  public static void copyAndEscapeStringDataToText(byte[] inputBytes, int start, int length,
+      byte escapeChar, Text data) {
+
+    // First calculate the length of the output string
+    int outputLength = 0;
+    for (int i = 0; i < length; i++) {
+      if (inputBytes[start + i] != escapeChar) {
+        outputLength++;
+      } else {
+        outputLength++;
+        i++;
+      }
+    }
+
+    // Copy the data over, so that the internal state of Text will be set to
+    // the required outputLength.
+    data.set(inputBytes, start, outputLength);
+
+    // We need to copy the data byte by byte only in case the
+    // "outputLength < length" (which means there is at least one escaped
+    // byte.
+    if (outputLength < length) {
+      int k = 0;
+      byte[] outputBytes = data.getBytes();
+      for (int i = 0; i < length; i++) {
+        byte b = inputBytes[start + i];
+        if (b != escapeChar || i == length - 1) {
+          outputBytes[k++] = b;
+        } else {
+          // get the next byte
+          i++;
+          outputBytes[k++] = inputBytes[start + i];
+        }
+      }
+      assert (k == outputLength);
+    }
+  }
+
   private LazyUtils() {
     // prevent instantiation
   }

Modified: hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveCharObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveCharObjectInspector.java?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveCharObjectInspector.java (original)
+++ hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveCharObjectInspector.java Fri Sep 19 04:42:59 2014
@@ -29,12 +29,21 @@ public class LazyHiveCharObjectInspector
     extends AbstractPrimitiveLazyObjectInspector<HiveCharWritable>
     implements HiveCharObjectInspector {
 
+  private boolean escaped;
+  private byte escapeChar;
+
   // no-arg ctor required for Kyro
   public LazyHiveCharObjectInspector() {
   }
 
   public LazyHiveCharObjectInspector(CharTypeInfo typeInfo) {
+    this(typeInfo, false, (byte)0);
+  }
+
+  public LazyHiveCharObjectInspector(CharTypeInfo typeInfo, boolean escaped, byte escapeChar) {
     super(typeInfo);
+    this.escaped = escaped;
+    this.escapeChar = escapeChar;
   }
 
   @Override
@@ -63,6 +72,14 @@ public class LazyHiveCharObjectInspector
     return ret;
   }
 
+  public boolean isEscaped() {
+    return escaped;
+  }
+
+  public byte getEscapeChar() {
+    return escapeChar;
+  }
+
   @Override
   public String toString() {
     return getTypeName();

Modified: hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java (original)
+++ hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java Fri Sep 19 04:42:59 2014
@@ -29,12 +29,21 @@ public class LazyHiveVarcharObjectInspec
     extends AbstractPrimitiveLazyObjectInspector<HiveVarcharWritable>
     implements HiveVarcharObjectInspector {
 
+  private boolean escaped;
+  private byte escapeChar;
+
   // no-arg ctor required for Kyro
   public LazyHiveVarcharObjectInspector() {
   }
 
   public LazyHiveVarcharObjectInspector(VarcharTypeInfo typeInfo) {
+    this(typeInfo, false, (byte)0);
+  }
+
+  public LazyHiveVarcharObjectInspector(VarcharTypeInfo typeInfo, boolean escaped, byte escapeChar) {
     super(typeInfo);
+    this.escaped = escaped;
+    this.escapeChar = escapeChar;
   }
 
   @Override
@@ -63,6 +72,14 @@ public class LazyHiveVarcharObjectInspec
     return ret;
   }
 
+  public boolean isEscaped() {
+    return escaped;
+  }
+
+  public byte getEscapeChar() {
+    return escapeChar;
+  }
+
   @Override
   public String toString() {
     return getTypeName();

Modified: hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java (original)
+++ hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java Fri Sep 19 04:42:59 2014
@@ -78,8 +78,10 @@ public final class LazyPrimitiveObjectIn
     // prevent instantiation
   }
 
-  private static HashMap<ArrayList<Object>, LazyStringObjectInspector> cachedLazyStringObjectInspector =
-      new HashMap<ArrayList<Object>, LazyStringObjectInspector>();
+  // Lazy object inspectors for string/char/varchar will all be cached in the same map.
+  // Map key will be list of [typeInfo, isEscaped, escapeChar]
+  private static HashMap<ArrayList<Object>, AbstractPrimitiveLazyObjectInspector> cachedLazyStringTypeOIs =
+      new HashMap<ArrayList<Object>, AbstractPrimitiveLazyObjectInspector>();
 
   private static Map<PrimitiveTypeInfo, AbstractPrimitiveLazyObjectInspector<?>>
      cachedPrimitiveLazyObjectInspectors =
@@ -121,6 +123,10 @@ public final class LazyPrimitiveObjectIn
     switch(primitiveCategory) {
     case STRING:
       return getLazyStringObjectInspector(escaped, escapeChar);
+    case CHAR:
+      return getLazyHiveCharObjectInspector((CharTypeInfo)typeInfo, escaped, escapeChar);
+    case VARCHAR:
+      return getLazyHiveVarcharObjectInspector((VarcharTypeInfo)typeInfo, escaped, escapeChar);
     case BOOLEAN:
       return getLazyBooleanObjectInspector(extBoolean);
     default:
@@ -157,13 +163,44 @@ public final class LazyPrimitiveObjectIn
 
   public static LazyStringObjectInspector getLazyStringObjectInspector(boolean escaped, byte escapeChar) {
     ArrayList<Object> signature = new ArrayList<Object>();
+    signature.add(TypeInfoFactory.stringTypeInfo);
     signature.add(Boolean.valueOf(escaped));
     signature.add(Byte.valueOf(escapeChar));
-    LazyStringObjectInspector result = cachedLazyStringObjectInspector
+    LazyStringObjectInspector result = (LazyStringObjectInspector) cachedLazyStringTypeOIs
         .get(signature);
     if (result == null) {
       result = new LazyStringObjectInspector(escaped, escapeChar);
-      cachedLazyStringObjectInspector.put(signature, result);
+      cachedLazyStringTypeOIs.put(signature, result);
+    }
+    return result;
+  }
+
+  public static LazyHiveCharObjectInspector getLazyHiveCharObjectInspector(
+      CharTypeInfo typeInfo, boolean escaped, byte escapeChar) {
+    ArrayList<Object> signature = new ArrayList<Object>();
+    signature.add(typeInfo);
+    signature.add(Boolean.valueOf(escaped));
+    signature.add(Byte.valueOf(escapeChar));
+    LazyHiveCharObjectInspector result = (LazyHiveCharObjectInspector) cachedLazyStringTypeOIs
+        .get(signature);
+    if (result == null) {
+      result = new LazyHiveCharObjectInspector(typeInfo, escaped, escapeChar);
+      cachedLazyStringTypeOIs.put(signature, result);
+    }
+    return result;
+  }
+
+  public static LazyHiveVarcharObjectInspector getLazyHiveVarcharObjectInspector(
+      VarcharTypeInfo typeInfo, boolean escaped, byte escapeChar) {
+    ArrayList<Object> signature = new ArrayList<Object>();
+    signature.add(typeInfo);
+    signature.add(Boolean.valueOf(escaped));
+    signature.add(Byte.valueOf(escapeChar));
+    LazyHiveVarcharObjectInspector result = (LazyHiveVarcharObjectInspector) cachedLazyStringTypeOIs
+        .get(signature);
+    if (result == null) {
+      result = new LazyHiveVarcharObjectInspector(typeInfo, escaped, escapeChar);
+      cachedLazyStringTypeOIs.put(signature, result);
     }
     return result;
   }

Modified: hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveCharObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveCharObjectInspector.java?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveCharObjectInspector.java (original)
+++ hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveCharObjectInspector.java Fri Sep 19 04:42:59 2014
@@ -91,6 +91,9 @@ public class WritableHiveCharObjectInspe
 
   @Override
   public Object set(Object o, HiveChar value) {
+    if (value == null) {
+      return null;
+    }
     HiveCharWritable writable = (HiveCharWritable) o;
     writable.set(value, getMaxLength());
     return o;
@@ -98,6 +101,9 @@ public class WritableHiveCharObjectInspe
 
   @Override
   public Object set(Object o, String value) {
+    if (value == null) {
+      return null;
+    }
     HiveCharWritable writable = (HiveCharWritable) o;
     writable.set(value, getMaxLength());
     return o;

Modified: hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java?rev=1626122&r1=1626121&r2=1626122&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java (original)
+++ hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java Fri Sep 19 04:42:59 2014
@@ -96,6 +96,9 @@ implements SettableHiveVarcharObjectInsp
 
   @Override
   public Object set(Object o, HiveVarchar value) {
+    if (value == null) {
+      return null;
+    }
     HiveVarcharWritable writable = (HiveVarcharWritable)o;
     writable.set(value, getMaxLength());
     return o;
@@ -103,6 +106,9 @@ implements SettableHiveVarcharObjectInsp
 
   @Override
   public Object set(Object o, String value) {
+    if (value == null) {
+      return null;
+    }
     HiveVarcharWritable writable = (HiveVarcharWritable)o;
     writable.set(value, getMaxLength());
     return o;