You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/10/06 00:26:58 UTC

svn commit: r1629544 [33/33] - in /hive/branches/spark-new: ./ accumulo-handler/ beeline/ beeline/src/java/org/apache/hive/beeline/ bin/ bin/ext/ common/ common/src/java/org/apache/hadoop/hive/conf/ common/src/test/org/apache/hadoop/hive/common/type/ c...

Modified: hive/branches/spark-new/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java (original)
+++ hive/branches/spark-new/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java Sun Oct  5 22:26:43 2014
@@ -19,10 +19,15 @@ package org.apache.hadoop.hive.serde2.ob
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.BaseCharUtils;
+import org.apache.hadoop.io.Text;
+import org.apache.hive.common.util.HiveStringUtils;
 
 public class WritableHiveVarcharObjectInspector extends AbstractPrimitiveWritableObjectInspector
 implements SettableHiveVarcharObjectInspector {
@@ -43,6 +48,12 @@ implements SettableHiveVarcharObjectInsp
     if (o == null) {
       return null;
     }
+
+    if (o instanceof Text) {
+      String str = ((Text)o).toString();
+      return new HiveVarchar(str, ((VarcharTypeInfo)typeInfo).getLength());
+    }
+
     HiveVarcharWritable writable = ((HiveVarcharWritable)o);
     if (doesWritableMatchTypeParams(writable)) {
       return writable.getHiveVarchar();
@@ -57,6 +68,14 @@ implements SettableHiveVarcharObjectInsp
     if (o == null) {
       return null;
     }
+
+    if (o instanceof Text) {
+      String str = ((Text)o).toString();
+      HiveVarcharWritable hcw = new HiveVarcharWritable();
+      hcw.set(str, ((VarcharTypeInfo)typeInfo).getLength());
+      return hcw;
+    }
+
     HiveVarcharWritable writable = ((HiveVarcharWritable)o);
     if (doesWritableMatchTypeParams((HiveVarcharWritable)o)) {
       return writable;

Modified: hive/branches/spark-new/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java (original)
+++ hive/branches/spark-new/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java Sun Oct  5 22:26:43 2014
@@ -22,6 +22,7 @@ import java.math.BigDecimal;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 
 public class HiveDecimalUtils {
 
@@ -134,4 +135,25 @@ public class HiveDecimalUtils {
     }
   }
 
+  public static TypeInfo getDecimalTypeForPrimitiveCategories(
+      PrimitiveTypeInfo a, PrimitiveTypeInfo b) {
+    int prec1 = HiveDecimalUtils.getPrecisionForType(a);
+    int prec2 = HiveDecimalUtils.getPrecisionForType(b);
+    int scale1 = HiveDecimalUtils.getScaleForType(a);
+    int scale2 = HiveDecimalUtils.getScaleForType(b);
+    int intPart = Math.max(prec1 - scale1, prec2 - scale2);
+    int decPart = Math.max(scale1, scale2);
+    int prec =  Math.min(intPart + decPart, HiveDecimal.MAX_PRECISION);
+    int scale = Math.min(decPart, HiveDecimal.MAX_PRECISION - intPart);
+    return TypeInfoFactory.getDecimalTypeInfo(prec, scale);
+  }
+
+  public static DecimalTypeInfo getDecimalTypeForPrimitiveCategory(PrimitiveTypeInfo a) {
+    if (a instanceof DecimalTypeInfo) return (DecimalTypeInfo)a;
+    int prec = HiveDecimalUtils.getPrecisionForType(a);
+    int scale = HiveDecimalUtils.getScaleForType(a);
+    prec =  Math.min(prec, HiveDecimal.MAX_PRECISION);
+    scale = Math.min(scale, HiveDecimal.MAX_PRECISION - (prec - scale));
+    return TypeInfoFactory.getDecimalTypeInfo(prec, scale);
+  }
 }

Modified: hive/branches/spark-new/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java (original)
+++ hive/branches/spark-new/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java Sun Oct  5 22:26:43 2014
@@ -19,9 +19,11 @@
 package org.apache.hadoop.hive.serde2.avro;
 
 import com.google.common.io.Resources;
+
 import org.junit.Assert;
 import org.apache.avro.Schema;
 import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
@@ -44,16 +46,28 @@ public class TestTypeInfoToSchema {
 
   private static Logger LOGGER = Logger.getLogger(TestTypeInfoToSchema.class);
   private static final List<String> COLUMN_NAMES = Arrays.asList("testCol");
-  private static final TypeInfo STRING = TypeInfoFactory.getPrimitiveTypeInfo("string");
-  private static final TypeInfo INT = TypeInfoFactory.getPrimitiveTypeInfo("int");
-  private static final TypeInfo BOOLEAN = TypeInfoFactory.getPrimitiveTypeInfo("boolean");
-  private static final TypeInfo LONG = TypeInfoFactory.getPrimitiveTypeInfo("bigint");
-  private static final TypeInfo FLOAT = TypeInfoFactory.getPrimitiveTypeInfo("float");
-  private static final TypeInfo DOUBLE = TypeInfoFactory.getPrimitiveTypeInfo("double");
-  private static final TypeInfo BINARY = TypeInfoFactory.getPrimitiveTypeInfo("binary");
-  private static final TypeInfo BYTE = TypeInfoFactory.getPrimitiveTypeInfo("tinyint");
-  private static final TypeInfo SHORT = TypeInfoFactory.getPrimitiveTypeInfo("smallint");
-  private static final TypeInfo VOID = TypeInfoFactory.getPrimitiveTypeInfo("void");
+  private static final TypeInfo STRING = TypeInfoFactory.getPrimitiveTypeInfo(
+      serdeConstants.STRING_TYPE_NAME);
+  private static final TypeInfo INT = TypeInfoFactory.getPrimitiveTypeInfo(
+      serdeConstants.INT_TYPE_NAME);
+  private static final TypeInfo BOOLEAN = TypeInfoFactory.getPrimitiveTypeInfo(
+      serdeConstants.BOOLEAN_TYPE_NAME);
+  private static final TypeInfo LONG = TypeInfoFactory.getPrimitiveTypeInfo(
+      serdeConstants.BIGINT_TYPE_NAME);
+  private static final TypeInfo FLOAT = TypeInfoFactory.getPrimitiveTypeInfo(
+      serdeConstants.FLOAT_TYPE_NAME);
+  private static final TypeInfo DOUBLE = TypeInfoFactory.getPrimitiveTypeInfo(
+      serdeConstants.DOUBLE_TYPE_NAME);
+  private static final TypeInfo BINARY = TypeInfoFactory.getPrimitiveTypeInfo(
+      serdeConstants.BINARY_TYPE_NAME);
+  private static final TypeInfo BYTE = TypeInfoFactory.getPrimitiveTypeInfo(
+      serdeConstants.TINYINT_TYPE_NAME);
+  private static final TypeInfo SHORT = TypeInfoFactory.getPrimitiveTypeInfo(
+      serdeConstants.SMALLINT_TYPE_NAME);
+  private static final TypeInfo VOID = TypeInfoFactory.getPrimitiveTypeInfo(
+      serdeConstants.VOID_TYPE_NAME);
+  private static final TypeInfo DATE = TypeInfoFactory.getPrimitiveTypeInfo(
+      serdeConstants.DATE_TYPE_NAME);
   private static final int PRECISION = 4;
   private static final int SCALE = 2;
   private static final TypeInfo DECIMAL = TypeInfoFactory.getPrimitiveTypeInfo(
@@ -205,6 +219,41 @@ public class TestTypeInfoToSchema {
   }
 
   @Test
+  public void createAvroCharSchema() {
+    final String specificSchema = "{" +
+        "\"type\":\"string\"," +
+        "\"logicalType\":\"char\"," +
+        "\"maxLength\":" + CHAR_LEN + "}";
+    String expectedSchema = genSchema(specificSchema);
+
+    Assert.assertEquals("Test for char's avro schema failed",
+        expectedSchema, getAvroSchemaString(CHAR));
+  }
+
+  @Test
+  public void createAvroVarcharSchema() {
+    final String specificSchema = "{" +
+        "\"type\":\"string\"," +
+        "\"logicalType\":\"varchar\"," +
+        "\"maxLength\":" + CHAR_LEN + "}";
+    String expectedSchema = genSchema(specificSchema);
+
+    Assert.assertEquals("Test for varchar's avro schema failed",
+        expectedSchema, getAvroSchemaString(VARCHAR));
+  }
+
+  @Test
+  public void createAvroDateSchema() {
+    final String specificSchema = "{" +
+        "\"type\":\"int\"," +
+        "\"logicalType\":\"date\"}";
+    String expectedSchema = genSchema(specificSchema);
+
+    Assert.assertEquals("Test for date in avro schema failed",
+        expectedSchema, getAvroSchemaString(DATE));
+  }
+
+  @Test
   public void createAvroListSchema() {
     ListTypeInfo listTypeInfo = new ListTypeInfo();
     listTypeInfo.setListElementTypeInfo(STRING);
@@ -313,6 +362,7 @@ public class TestTypeInfoToSchema {
     names.add("field11");
     names.add("field12");
     names.add("field13");
+    names.add("field14");
     structTypeInfo.setAllStructFieldNames(names);
     ArrayList<TypeInfo> typeInfos = new ArrayList<TypeInfo>();
     typeInfos.add(STRING);
@@ -327,6 +377,7 @@ public class TestTypeInfoToSchema {
     typeInfos.add(DOUBLE);
     typeInfos.add(BOOLEAN);
     typeInfos.add(DECIMAL);
+    typeInfos.add(DATE);
     typeInfos.add(VOID);
     structTypeInfo.setAllStructFieldTypeInfos(typeInfos);
     LOGGER.info("structTypeInfo is " + structTypeInfo);

Modified: hive/branches/spark-new/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveCharWritable.java
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveCharWritable.java?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveCharWritable.java (original)
+++ hive/branches/spark-new/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveCharWritable.java Sun Oct  5 22:26:43 2014
@@ -18,10 +18,20 @@
 
 package org.apache.hadoop.hive.serde2.io;
 
-import junit.framework.TestCase;
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+import org.junit.*;
+
+import static org.junit.Assert.*;
 import org.apache.hadoop.hive.common.type.HiveChar;
 
-public class TestHiveCharWritable extends TestCase {
+public class TestHiveCharWritable {
+  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testConstructor() throws Exception {
     HiveCharWritable hcw1 = new HiveCharWritable(new HiveChar("abc", 5));
     assertEquals("abc  ", hcw1.toString());
@@ -30,6 +40,9 @@ public class TestHiveCharWritable extend
     assertEquals("abc  ", hcw2.toString());
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testSet() throws Exception {
     HiveCharWritable hcw1 = new HiveCharWritable();
 
@@ -70,18 +83,27 @@ public class TestHiveCharWritable extend
     assertEquals("ab", hcw1.getTextValue().toString());
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testGetHiveChar() throws Exception {
     HiveCharWritable hcw = new HiveCharWritable();
     hcw.set("abcd", 10);
     assertEquals("abcd      ", hcw.getHiveChar().toString());
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testGetCharacterLength() throws Exception {
     HiveCharWritable hcw = new HiveCharWritable();
     hcw.set("abcd", 10);
     assertEquals(4, hcw.getCharacterLength());
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testEnforceMaxLength() {
     HiveCharWritable hcw1 = new HiveCharWritable();
     hcw1.set("abcdefghij", 10);
@@ -92,6 +114,9 @@ public class TestHiveCharWritable extend
     assertEquals("abcde", hcw1.toString());
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testComparison() throws Exception {
     HiveCharWritable hcw1 = new HiveCharWritable();
     HiveCharWritable hcw2 = new HiveCharWritable();

Modified: hive/branches/spark-new/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java (original)
+++ hive/branches/spark-new/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java Sun Oct  5 22:26:43 2014
@@ -18,7 +18,10 @@
 
 package org.apache.hadoop.hive.serde2.io;
 
-import junit.framework.Assert;
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+import org.junit.*;
+import static org.junit.Assert.*;
 
 import java.math.BigDecimal;
 import java.math.BigInteger;
@@ -29,8 +32,6 @@ import java.util.ArrayList;
 import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hive.common.util.Decimal128FastBuffer;
-import org.junit.Before;
-import org.junit.Test;
 
 /**
  * Unit tests for tsting the fast allocation-free conversion
@@ -38,14 +39,15 @@ import org.junit.Test;
  */
 public class TestHiveDecimalWritable {
 
-    private Decimal128FastBuffer scratch;
+    @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+    @Rule public RepeatingRule repeatingRule = new RepeatingRule();
 
     @Before
     public void setUp() throws Exception {
-      scratch = new Decimal128FastBuffer();
     }
 
     private void doTestFastStreamForHiveDecimal(String valueString) {
+      Decimal128FastBuffer scratch = new Decimal128FastBuffer();
       BigDecimal value = new BigDecimal(valueString);
       Decimal128 dec = new Decimal128();
       dec.update(value);
@@ -61,21 +63,23 @@ public class TestHiveDecimalWritable {
 
       BigDecimal readValue = hd.bigDecimalValue();
 
-      Assert.assertEquals(value, readValue);
+      assertEquals(value, readValue);
 
       // Now test fastUpdate from the same serialized HiveDecimal
       Decimal128 decRead = new Decimal128().fastUpdateFromInternalStorage(
               witness.getInternalStorage(), (short) witness.getScale());
 
-      Assert.assertEquals(dec, decRead);
+      assertEquals(dec, decRead);
 
       // Test fastUpdate from it's own (not fully compacted) serialized output
       Decimal128 decReadSelf = new Decimal128().fastUpdateFromInternalStorage(
               hdw.getInternalStorage(), (short) hdw.getScale());
-      Assert.assertEquals(dec, decReadSelf);
+      assertEquals(dec, decReadSelf);
     }
 
     @Test
+    @Concurrent(count=4)
+    @Repeating(repetition=100)
     public void testFastStreamForHiveDecimal() {
 
       doTestFastStreamForHiveDecimal("0");
@@ -217,7 +221,10 @@ public class TestHiveDecimalWritable {
     }
 
     @Test
+    @Concurrent(count=4)
+    @Repeating(repetition=100)
     public void testHive6594() {
+      Decimal128FastBuffer scratch = new Decimal128FastBuffer();
       String[] vs = new String[] {
           "-4033.445769230769",
           "6984454.211097692"};
@@ -236,7 +243,7 @@ public class TestHiveDecimalWritable {
 
       BigDecimal readValue = hd.bigDecimalValue();
 
-      Assert.assertEquals(d.toBigDecimal().stripTrailingZeros(),
+      assertEquals(d.toBigDecimal().stripTrailingZeros(),
           readValue.stripTrailingZeros());
     }
 }

Modified: hive/branches/spark-new/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveVarcharWritable.java
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveVarcharWritable.java?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveVarcharWritable.java (original)
+++ hive/branches/spark-new/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveVarcharWritable.java Sun Oct  5 22:26:43 2014
@@ -17,11 +17,21 @@
  */
 package org.apache.hadoop.hive.serde2.io;
 
-import junit.framework.TestCase;
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+import org.junit.*;
+import static org.junit.Assert.*;
+
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import java.io.*;
 
-public class TestHiveVarcharWritable extends TestCase {
+public class TestHiveVarcharWritable {
+  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testStringLength() throws Exception {
     HiveVarcharWritable vc1 = new HiveVarcharWritable(new HiveVarchar("0123456789", 10));
     assertEquals(10, vc1.getCharacterLength());
@@ -54,6 +64,9 @@ public class TestHiveVarcharWritable ext
     assertEquals(6, vc1.getCharacterLength());
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testEnforceLength() throws Exception {
     HiveVarcharWritable vc1 = new HiveVarcharWritable(new HiveVarchar("0123456789", 10));
     assertEquals(10, vc1.getCharacterLength());
@@ -66,8 +79,11 @@ public class TestHiveVarcharWritable ext
 
     vc1.enforceMaxLength(8);
     assertEquals(8, vc1.getCharacterLength());
-}
+  }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testComparison() throws Exception {
     HiveVarcharWritable hc1 = new HiveVarcharWritable(new HiveVarchar("abcd", 20));
     HiveVarcharWritable hc2 = new HiveVarcharWritable(new HiveVarchar("abcd", 20));
@@ -101,6 +117,9 @@ public class TestHiveVarcharWritable ext
     assertFalse(0 == hc2.compareTo(hc1));
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testStringValue() throws Exception {
     HiveVarcharWritable vc1 = new HiveVarcharWritable(new HiveVarchar("abcde", 20));
     assertEquals("abcde", vc1.toString());

Modified: hive/branches/spark-new/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java (original)
+++ hive/branches/spark-new/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java Sun Oct  5 22:26:43 2014
@@ -17,6 +17,9 @@
  */
 package org.apache.hadoop.hive.serde2.io;
 
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
@@ -32,15 +35,25 @@ import java.util.List;
 import java.util.Random;
 import java.util.TimeZone;
 
-import junit.framework.TestCase;
+import org.junit.*;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
 
-public class TestTimestampWritable extends TestCase {
+public class TestTimestampWritable {
+
+  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
 
-  private static DateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+  private static ThreadLocal<DateFormat> DATE_FORMAT =
+      new ThreadLocal<DateFormat>() {
+        @Override
+        protected synchronized DateFormat initialValue() {
+          return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+        }
+      };
 
   private static final int HAS_DECIMAL_MASK = 0x80000000;
 
@@ -64,14 +77,14 @@ public class TestTimestampWritable exten
 
   private static long parseToMillis(String s) {
     try {
-      return DATE_FORMAT.parse(s).getTime();
+      return DATE_FORMAT.get().parse(s).getTime();
     } catch (ParseException ex) {
       throw new RuntimeException(ex);
     }
   }
 
-  @Override
-  protected void setUp() {
+  @Before
+  public void setUp() {
     TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
   }
 
@@ -252,6 +265,9 @@ public class TestTimestampWritable exten
     return tsw;
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testReverseNanos() {
     assertEquals(0, reverseNanos(0));
     assertEquals(120000000, reverseNanos(21));
@@ -265,6 +281,8 @@ public class TestTimestampWritable exten
    * Test serializing and deserializing timestamps that can be represented by a number of seconds
    * from 0 to 2147483647 since the UNIX epoch.
    */
+  @Test
+  @Concurrent(count=4)
   public void testTimestampsWithinPositiveIntRange() throws IOException {
     Random rand = new Random(294722773L);
     for (int i = 0; i < 10000; ++i) {
@@ -281,6 +299,8 @@ public class TestTimestampWritable exten
    * Test timestamps that don't necessarily fit between 1970 and 2038. This depends on HIVE-4525
    * being fixed.
    */
+  @Test
+  @Concurrent(count=4)
   public void testTimestampsOutsidePositiveIntRange() throws IOException {
     Random rand = new Random(789149717L);
     for (int i = 0; i < 10000; ++i) {
@@ -289,6 +309,8 @@ public class TestTimestampWritable exten
     }
   }
 
+  @Test
+  @Concurrent(count=4)
   public void testTimestampsInFullRange() throws IOException {
     Random rand = new Random(2904974913L);
     for (int i = 0; i < 10000; ++i) {
@@ -296,6 +318,8 @@ public class TestTimestampWritable exten
     }
   }
 
+  @Test
+  @Concurrent(count=4)
   public void testToFromDouble() {
     Random rand = new Random(294729777L);
     for (int nanosPrecision = 0; nanosPrecision <= 4; ++nanosPrecision) {
@@ -326,6 +350,8 @@ public class TestTimestampWritable exten
     return HiveDecimal.create(d);
   }
 
+  @Test
+  @Concurrent(count=4)
   public void testDecimalToTimestampRandomly() {
     Random rand = new Random(294729777L);
     for (int i = 0; i < 10000; ++i) {
@@ -336,6 +362,9 @@ public class TestTimestampWritable exten
     }
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testDecimalToTimestampCornerCases() {
     Timestamp ts = new Timestamp(parseToMillis("1969-03-04 05:44:33"));
     assertEquals(0, ts.getTime() % 1000);
@@ -347,6 +376,9 @@ public class TestTimestampWritable exten
     }
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testSerializationFormatDirectly() throws IOException {
     assertEquals("1970-01-01 00:00:00", fromIntAndVInts(0).toString());
     assertEquals("1970-01-01 00:00:01", fromIntAndVInts(1).toString());
@@ -374,6 +406,9 @@ public class TestTimestampWritable exten
                       -3210 - 1, seconds >> 31).toString());
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testMaxSize() {
     // This many bytes are necessary to store the reversed nanoseconds.
     assertEquals(5, WritableUtils.getVIntSize(999999999));
@@ -396,6 +431,9 @@ public class TestTimestampWritable exten
     // Therefore, the maximum total size of a serialized timestamp is 4 + 5 + 4 = 13.
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testMillisToSeconds() {
     assertEquals(0, TimestampWritable.millisToSeconds(0));
     assertEquals(-1, TimestampWritable.millisToSeconds(-1));
@@ -427,6 +465,9 @@ public class TestTimestampWritable exten
     return result < 0 ? -1 : (result > 0 ? 1 : 0);
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testBinarySortable() {
     Random rand = new Random(5972977L);
     List<TimestampWritable> tswList = new ArrayList<TimestampWritable>();

Modified: hive/branches/spark-new/serde/src/test/resources/avro-struct.avsc
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/serde/src/test/resources/avro-struct.avsc?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/serde/src/test/resources/avro-struct.avsc (original)
+++ hive/branches/spark-new/serde/src/test/resources/avro-struct.avsc Sun Oct  5 22:26:43 2014
@@ -4,11 +4,11 @@
 "namespace":"",
 "doc":"struct<field1:string,field2:char(5),field3:varchar(5),field4:binary,field5:tinyint,
 field6:smallint,field7:int,field8:bigint,field9:float,field10:double,field11:boolean,
-field12:decimal(4,2),field13:void>",
+field12:decimal(4,2),field13:date,field14:void>",
 "fields":[
 {"name":"field1","type":["null","string"],"doc":"string","default":null},
-{"name":"field2","type":["null","string"],"doc":"char(5)","default":null},
-{"name":"field3","type":["null","string"],"doc":"varchar(5)","default":null},
+{"name":"field2","type":["null",{"type":"string","logicalType":"char","maxLength":5}],"doc":"char(5)","default":null},
+{"name":"field3","type":["null",{"type":"string","logicalType":"varchar","maxLength":5}],"doc":"varchar(5)","default":null},
 {"name":"field4","type":["null","bytes"],"doc":"binary","default":null},
 {"name":"field5","type":["null","int"],"doc":"tinyint","default":null},
 {"name":"field6","type":["null","int"],"doc":"smallint","default":null},
@@ -17,8 +17,8 @@ field12:decimal(4,2),field13:void>",
 {"name":"field9","type":["null","float"],"doc":"float","default":null},
 {"name":"field10","type":["null","double"],"doc":"double","default":null},
 {"name":"field11","type":["null","boolean"],"doc":"boolean","default":null},
-{"name":"field12","type":["null",{"type":"bytes","logicalType":"decimal","precision":4,
-"scale":2}],"doc":"decimal(4,2)","default":null},
-{"name":"field13","type":"null","doc":"void","default":null}
+{"name":"field12","type":["null",{"type":"bytes","logicalType":"decimal","precision":4,"scale":2}],"doc":"decimal(4,2)","default":null},
+{"name":"field13","type":["null",{"type":"int","logicalType":"date"}],"doc":"date","default":null},
+{"name":"field14","type":"null","doc":"void","default":null}
 ]
-}
\ No newline at end of file
+}

Modified: hive/branches/spark-new/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java (original)
+++ hive/branches/spark-new/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java Sun Oct  5 22:26:43 2014
@@ -23,6 +23,7 @@ import java.net.InetSocketAddress;
 import java.net.UnknownHostException;
 import java.util.HashMap;
 import java.util.Map;
+
 import javax.security.auth.login.LoginException;
 import javax.security.sasl.Sasl;
 
@@ -31,6 +32,7 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hive.service.cli.HiveSQLException;
 import org.apache.hive.service.cli.thrift.ThriftCLIService;
 import org.apache.thrift.TProcessorFactory;
@@ -136,15 +138,17 @@ public class HiveAuthFactory {
     return transportFactory;
   }
 
+  /**
+   * Returns the thrift processor factory for HiveServer2 running in binary mode
+   * @param service
+   * @return
+   * @throws LoginException
+   */
   public TProcessorFactory getAuthProcFactory(ThriftCLIService service) throws LoginException {
-    if ("http".equalsIgnoreCase(transportMode)) {
-      return HttpAuthUtils.getAuthProcFactory(service);
+    if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
+      return KerberosSaslHelper.getKerberosProcessorFactory(saslServer, service);
     } else {
-      if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
-        return KerberosSaslHelper.getKerberosProcessorFactory(saslServer, service);
-      } else {
-        return PlainSaslHelper.getPlainProcessorFactory(service);
-      }
+      return PlainSaslHelper.getPlainProcessorFactory(service);
     }
   }
 
@@ -287,7 +291,9 @@ public class HiveAuthFactory {
     try {
       UserGroupInformation sessionUgi;
       if (ShimLoader.getHadoopShims().isSecurityEnabled()) {
-        sessionUgi = ShimLoader.getHadoopShims().createProxyUser(realUser);
+    	KerberosName kerbName = new KerberosName(realUser);
+    	String shortPrincipalName = kerbName.getServiceName();
+        sessionUgi = ShimLoader.getHadoopShims().createProxyUser(shortPrincipalName);
       } else {
         sessionUgi = ShimLoader.getHadoopShims().createRemoteUser(realUser, null);
       }
@@ -300,5 +306,5 @@ public class HiveAuthFactory {
         "Failed to validate proxy privilege of " + realUser + " for " + proxyUser, e);
     }
   }
-
+  
 }

Modified: hive/branches/spark-new/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java (original)
+++ hive/branches/spark-new/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java Sun Oct  5 22:26:43 2014
@@ -22,17 +22,10 @@ import java.io.IOException;
 import java.security.PrivilegedExceptionAction;
 
 import org.apache.commons.codec.binary.Base64;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hive.service.cli.thrift.TCLIService;
-import org.apache.hive.service.cli.thrift.TCLIService.Iface;
-import org.apache.hive.service.cli.thrift.ThriftCLIService;
 import org.apache.http.protocol.BasicHttpContext;
 import org.apache.http.protocol.HttpContext;
-import org.apache.thrift.TProcessor;
-import org.apache.thrift.TProcessorFactory;
-import org.apache.thrift.transport.TTransport;
 import org.ietf.jgss.GSSContext;
 import org.ietf.jgss.GSSCredential;
 import org.ietf.jgss.GSSManager;
@@ -48,11 +41,7 @@ public final class HttpAuthUtils {
   public static final String AUTHORIZATION = "Authorization";
   public static final String BASIC = "Basic";
   public static final String NEGOTIATE = "Negotiate";
-
-  public static TProcessorFactory getAuthProcFactory(ThriftCLIService service) {
-    return new HttpCLIServiceProcessorFactory(service);
-  }
-
+  
   /**
    * @return Stringified Base64 encoded kerberosAuthHeader on success
    */
@@ -62,7 +51,7 @@ public final class HttpAuthUtils {
     String serverPrincipal = getServerPrincipal(principal, host);
     // Uses the Ticket Granting Ticket in the UserGroupInformation
     return clientUGI.doAs(
-      new HttpKerberosClientAction(serverPrincipal, clientUGI.getShortUserName(), serverHttpUrl));
+      new HttpKerberosClientAction(serverPrincipal, clientUGI.getUserName(), serverHttpUrl));
   }
 
   /**
@@ -87,26 +76,6 @@ public final class HttpAuthUtils {
     throw new UnsupportedOperationException("Can't initialize class");
   }
 
-  public static class HttpCLIServiceProcessorFactory extends TProcessorFactory {
-
-    private final ThriftCLIService service;
-    private final HiveConf hiveConf;
-    private final boolean isDoAsEnabled;
-
-    public HttpCLIServiceProcessorFactory(ThriftCLIService service) {
-      super(null);
-      this.service = service;
-      hiveConf = service.getHiveConf();
-      isDoAsEnabled = hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS);
-    }
-
-    @Override
-    public TProcessor getProcessor(TTransport trans) {
-      TProcessor baseProcessor = new TCLIService.Processor<Iface>(service);
-      return isDoAsEnabled ? new HttpCLIServiceUGIProcessor(baseProcessor) : baseProcessor;
-    }
-  }
-
   public static class HttpKerberosClientAction implements PrivilegedExceptionAction<String> {
 
     public static final String HTTP_RESPONSE = "HTTP_RESPONSE";

Modified: hive/branches/spark-new/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java (original)
+++ hive/branches/spark-new/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java Sun Oct  5 22:26:43 2014
@@ -18,7 +18,6 @@
 
 package org.apache.hive.service.auth;
 
-import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Processor;
 import org.apache.hive.service.cli.thrift.TCLIService;
 import org.apache.hive.service.cli.thrift.TCLIService.Iface;
 import org.apache.thrift.TException;
@@ -43,7 +42,7 @@ import org.slf4j.LoggerFactory;
  */
 public class TSetIpAddressProcessor<I extends Iface> extends TCLIService.Processor<Iface> {
 
-  private static final Logger LOGGER = LoggerFactory.getLogger(Processor.class.getName());
+  private static final Logger LOGGER = LoggerFactory.getLogger(TSetIpAddressProcessor.class.getName());
 
   public TSetIpAddressProcessor(Iface iface) {
     super(iface);
@@ -75,7 +74,7 @@ public class TSetIpAddressProcessor<I ex
     if (tSocket == null) {
       LOGGER.warn("Unknown Transport, cannot determine ipAddress");
     } else {
-      THREAD_LOCAL_IP_ADDRESS.set(tSocket.getSocket().getInetAddress().toString());
+      THREAD_LOCAL_IP_ADDRESS.set(tSocket.getSocket().getInetAddress().getHostAddress());
     }
   }
 

Modified: hive/branches/spark-new/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java (original)
+++ hive/branches/spark-new/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java Sun Oct  5 22:26:43 2014
@@ -166,15 +166,20 @@ public class HiveSessionImpl implements 
     IHiveFileProcessor processor = new GlobalHivercFileProcessor();
 
     try {
-      if (hiveConf.getVar(ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION) != null) {
-        String hiverc = hiveConf.getVar(ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION)
-            + File.separator + SessionManager.HIVERCFILE;
-        if (new File(hiverc).exists()) {
-          LOG.info("Running global init file: " + hiverc);
-          int rc = processor.processFile(hiverc);
+      String hiverc = hiveConf.getVar(ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION);
+      if (hiverc != null) {
+        File hivercFile = new File(hiverc);
+        if (hivercFile.isDirectory()) {
+          hivercFile = new File(hivercFile, SessionManager.HIVERCFILE);
+        }
+        if (hivercFile.isFile()) {
+          LOG.info("Running global init file: " + hivercFile);
+          int rc = processor.processFile(hivercFile.getAbsolutePath());
           if (rc != 0) {
-            LOG.warn("Failed on initializing global .hiverc file");
+            LOG.error("Failed on initializing global .hiverc file");
           }
+        } else {
+          LOG.debug("Global init file " + hivercFile + " does not exist");
         }
       }
     } catch (IOException e) {

Modified: hive/branches/spark-new/service/src/java/org/apache/hive/service/cli/session/SessionManager.java
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/service/src/java/org/apache/hive/service/cli/session/SessionManager.java?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/service/src/java/org/apache/hive/service/cli/session/SessionManager.java (original)
+++ hive/branches/spark-new/service/src/java/org/apache/hive/service/cli/session/SessionManager.java Sun Oct  5 22:26:43 2014
@@ -229,6 +229,23 @@ public class SessionManager extends Comp
     return openSession(protocol, username, password, ipAddress, sessionConf, false, null);
   }
 
+  /**
+   * Opens a new session and creates a session handle.
+   * The username passed to this method is the effective username.
+   * If withImpersonation is true (==doAs true) we wrap all the calls in HiveSession 
+   * within a UGI.doAs, where UGI corresponds to the effective user.
+   * @see org.apache.hive.service.cli.thrift.ThriftCLIService#getUserName() 
+   * 
+   * @param protocol
+   * @param username
+   * @param password
+   * @param ipAddress
+   * @param sessionConf
+   * @param withImpersonation
+   * @param delegationToken
+   * @return
+   * @throws HiveSQLException
+   */
   public SessionHandle openSession(TProtocolVersion protocol, String username, String password, String ipAddress,
       Map<String, String> sessionConf, boolean withImpersonation, String delegationToken)
           throws HiveSQLException {

Modified: hive/branches/spark-new/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java (original)
+++ hive/branches/spark-new/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java Sun Oct  5 22:26:43 2014
@@ -262,6 +262,16 @@ public abstract class ThriftCLIService e
     return clientIpAddress;
   }
 
+  /**
+   * Returns the effective username.
+   * 1. If hive.server2.allow.user.substitution = false: the username of the connecting user 
+   * 2. If hive.server2.allow.user.substitution = true: the username of the end user,
+   * that the connecting user is trying to proxy for. 
+   * This includes a check whether the connecting user is allowed to proxy for the end user.
+   * @param req
+   * @return
+   * @throws HiveSQLException
+   */
   private String getUserName(TOpenSessionReq req) throws HiveSQLException {
     String userName = null;
     // Kerberos

Modified: hive/branches/spark-new/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java (original)
+++ hive/branches/spark-new/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java Sun Oct  5 22:26:43 2014
@@ -31,6 +31,7 @@ import org.apache.hadoop.util.Shell;
 import org.apache.hive.service.auth.HiveAuthFactory;
 import org.apache.hive.service.auth.HiveAuthFactory.AuthTypes;
 import org.apache.hive.service.cli.CLIService;
+import org.apache.hive.service.cli.thrift.TCLIService.Iface;
 import org.apache.hive.service.server.ThreadFactoryWithGarbageCleanup;
 import org.apache.thrift.TProcessor;
 import org.apache.thrift.TProcessorFactory;
@@ -102,8 +103,7 @@ public class ThriftHttpCLIService extend
 
       // Thrift configs
       hiveAuthFactory = new HiveAuthFactory(hiveConf);
-      TProcessorFactory processorFactory = hiveAuthFactory.getAuthProcFactory(this);
-      TProcessor processor = processorFactory.getProcessor(null);
+      TProcessor processor = new TCLIService.Processor<Iface>(this);
       TProtocolFactory protocolFactory = new TBinaryProtocol.Factory();
       // Set during the init phase of HiveServer2 if auth mode is kerberos
       // UGI for the hive/_HOST (kerberos) principal

Modified: hive/branches/spark-new/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java (original)
+++ hive/branches/spark-new/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java Sun Oct  5 22:26:43 2014
@@ -32,6 +32,7 @@ import org.apache.commons.codec.binary.S
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hive.service.auth.AuthenticationProviderFactory;
 import org.apache.hive.service.auth.AuthenticationProviderFactory.AuthMethods;
 import org.apache.hive.service.auth.HiveAuthFactory;
@@ -219,7 +220,7 @@ public class ThriftHttpServlet extends T
               "provided by the client.");
         }
         else {
-          return getPrincipalWithoutRealm(gssContext.getSrcName().toString());
+          return getPrincipalWithoutRealmAndHost(gssContext.getSrcName().toString());
         }
       }
       catch (GSSException e) {
@@ -237,8 +238,19 @@ public class ThriftHttpServlet extends T
     }
 
     private String getPrincipalWithoutRealm(String fullPrincipal) {
-      String names[] = fullPrincipal.split("[@]");
-      return names[0];
+      KerberosName fullKerberosName = new KerberosName(fullPrincipal);
+      String serviceName = fullKerberosName.getServiceName();
+      String hostName =  fullKerberosName.getHostName();
+      String principalWithoutRealm = serviceName;
+      if (hostName != null) {
+        principalWithoutRealm = serviceName + "/" + hostName;
+      }
+      return principalWithoutRealm;
+    }
+    
+    private String getPrincipalWithoutRealmAndHost(String fullPrincipal) {
+      KerberosName fullKerberosName = new KerberosName(fullPrincipal);
+      return fullKerberosName.getServiceName();
     }
   }
 

Modified: hive/branches/spark-new/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java (original)
+++ hive/branches/spark-new/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java Sun Oct  5 22:26:43 2014
@@ -150,7 +150,7 @@ public abstract class CLIServiceTest {
     client.closeOperation(opHandle);
 
     // Blocking execute
-    queryString = "SELECT ID FROM TEST_EXEC";
+    queryString = "SELECT ID+1 FROM TEST_EXEC";
     opHandle = client.executeStatement(sessionHandle, queryString, confOverlay);
     // Expect query to be completed now
     assertEquals("Query should be finished",
@@ -225,27 +225,27 @@ public abstract class CLIServiceTest {
     /**
      * Execute an async query with default config
      */
-    queryString = "SELECT ID FROM " + tableName;
+    queryString = "SELECT ID+1 FROM " + tableName;
     runQueryAsync(sessionHandle, queryString, confOverlay, OperationState.FINISHED, longPollingTimeout);
 
     /**
      * Execute an async query with long polling timeout set to 0
      */
     longPollingTimeout = 0;
-    queryString = "SELECT ID FROM " + tableName;
+    queryString = "SELECT ID+1 FROM " + tableName;
     runQueryAsync(sessionHandle, queryString, confOverlay, OperationState.FINISHED, longPollingTimeout);
 
     /**
      * Execute an async query with long polling timeout set to 500 millis
      */
     longPollingTimeout = 500;
-    queryString = "SELECT ID FROM " + tableName;
+    queryString = "SELECT ID+1 FROM " + tableName;
     runQueryAsync(sessionHandle, queryString, confOverlay, OperationState.FINISHED, longPollingTimeout);
 
     /**
      * Cancellation test
      */
-    queryString = "SELECT ID FROM " + tableName;
+    queryString = "SELECT ID+1 FROM " + tableName;
     opHandle = client.executeStatementAsync(sessionHandle, queryString, confOverlay);
     System.out.println("Cancelling " + opHandle);
     client.cancelOperation(opHandle);

Modified: hive/branches/spark-new/service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java (original)
+++ hive/branches/spark-new/service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java Sun Oct  5 22:26:43 2014
@@ -44,6 +44,7 @@ public class TestSessionGlobalInitFile e
   private ThriftCLIServiceClient client;
   private File initFile;
   private String tmpDir;
+  private HiveConf hiveConf;
 
   /**
    * This class is almost the same as EmbeddedThriftBinaryCLIService,
@@ -86,7 +87,7 @@ public class TestSessionGlobalInitFile e
     FileUtils.writeLines(initFile, Arrays.asList(fileContent));
 
     // set up service and client
-    HiveConf hiveConf = new HiveConf();
+    hiveConf = new HiveConf();
     hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION,
         initFile.getParentFile().getAbsolutePath());
     service = new FakeEmbeddedThriftBinaryCLIService(hiveConf);
@@ -102,11 +103,26 @@ public class TestSessionGlobalInitFile e
 
   @Test
   public void testSessionGlobalInitFile() throws Exception {
-    /**
-     * create session, and fetch the property set in global init file. Test if
-     * the global init file .hiverc is loaded correctly by checking the expected
-     * setting property.
-     */
+    File tmpInitFile = new File(initFile.getParent(), "hiverc");
+    Assert.assertTrue("Failed to rename " + initFile + " to " + tmpInitFile,
+      initFile.renameTo(tmpInitFile));
+    initFile = tmpInitFile;
+    hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION,
+        initFile.getAbsolutePath());
+    doTestSessionGlobalInitFile();
+  }
+
+  @Test
+  public void testSessionGlobalInitDir() throws Exception {
+    doTestSessionGlobalInitFile();
+  }
+
+  /**
+   * create session, and fetch the property set in global init file. Test if
+   * the global init file .hiverc is loaded correctly by checking the expected
+   * setting property.
+   */
+  private void doTestSessionGlobalInitFile() throws Exception {
     SessionHandle sessionHandle = client.openSession(null, null, null);
 
     verifyInitProperty("a", "1", sessionHandle);

Modified: hive/branches/spark-new/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java (original)
+++ hive/branches/spark-new/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java Sun Oct  5 22:26:43 2014
@@ -177,7 +177,7 @@ public abstract class ThriftCLIServiceTe
     client.executeStatement(sessHandle, queryString, opConf);
 
     // Execute another query
-    queryString = "SELECT ID FROM TEST_EXEC_THRIFT";
+    queryString = "SELECT ID+1 FROM TEST_EXEC_THRIFT";
     OperationHandle opHandle = client.executeStatement(sessHandle,
         queryString, opConf);
     assertNotNull(opHandle);
@@ -227,7 +227,7 @@ public abstract class ThriftCLIServiceTe
     client.executeStatement(sessHandle, queryString, opConf);
 
     // Execute another query
-    queryString = "SELECT ID FROM TEST_EXEC_ASYNC_THRIFT";
+    queryString = "SELECT ID+1 FROM TEST_EXEC_ASYNC_THRIFT";
     System.out.println("Will attempt to execute: " + queryString);
     opHandle = client.executeStatementAsync(sessHandle,
         queryString, opConf);

Modified: hive/branches/spark-new/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java (original)
+++ hive/branches/spark-new/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java Sun Oct  5 22:26:43 2014
@@ -918,4 +918,14 @@ public class Hadoop20Shims implements Ha
   public boolean hasStickyBit(FsPermission permission) {
     return false;   // not supported
   }
+
+  @Override
+  public boolean supportTrashFeature() {
+    return false;
+  }
+
+  @Override
+  public Path getCurrentTrashPath(Configuration conf, FileSystem fs) {
+    return null;
+  }
 }

Modified: hive/branches/spark-new/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java (original)
+++ hive/branches/spark-new/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java Sun Oct  5 22:26:43 2014
@@ -534,6 +534,16 @@ public class Hadoop20SShims extends Hado
 
   @Override
   public boolean hasStickyBit(FsPermission permission) {
-    return false;   // not supported
+    return false;
+  }
+
+  @Override
+  public boolean supportTrashFeature() {
+    return false;
+  }
+
+  @Override
+  public Path getCurrentTrashPath(Configuration conf, FileSystem fs) {
+    return null;
   }
 }

Modified: hive/branches/spark-new/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java (original)
+++ hive/branches/spark-new/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java Sun Oct  5 22:26:43 2014
@@ -46,6 +46,7 @@ import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.fs.ProxyFileSystem;
 import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.fs.Trash;
+import org.apache.hadoop.fs.TrashPolicy;
 import org.apache.hadoop.fs.permission.AclEntry;
 import org.apache.hadoop.fs.permission.AclEntryScope;
 import org.apache.hadoop.fs.permission.AclEntryType;
@@ -835,4 +836,15 @@ public class Hadoop23Shims extends Hadoo
   public boolean hasStickyBit(FsPermission permission) {
     return permission.getStickyBit();
   }
+
+  @Override
+  public boolean supportTrashFeature() {
+    return true;
+  }
+
+  @Override
+  public Path getCurrentTrashPath(Configuration conf, FileSystem fs) {
+    TrashPolicy tp = TrashPolicy.getInstance(conf, fs, fs.getHomeDirectory());
+    return tp.getCurrentTrashDir();
+  }
 }

Modified: hive/branches/spark-new/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/hive/branches/spark-new/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1629544&r1=1629543&r2=1629544&view=diff
==============================================================================
--- hive/branches/spark-new/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java (original)
+++ hive/branches/spark-new/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java Sun Oct  5 22:26:43 2014
@@ -721,4 +721,14 @@ public interface HadoopShims {
    * @return sticky bit
    */
   boolean hasStickyBit(FsPermission permission);
+
+  /**
+   * @return True if the current hadoop supports trash feature.
+   */
+  boolean supportTrashFeature();
+
+  /**
+   * @return Path to HDFS trash, if current hadoop supports trash feature.  Null otherwise.
+   */
+  Path getCurrentTrashPath(Configuration conf, FileSystem fs);
 }