You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/10/06 05:44:26 UTC

svn commit: r1629562 [38/38] - in /hive/branches/spark: ./ accumulo-handler/ beeline/ beeline/src/java/org/apache/hive/beeline/ bin/ext/ common/ common/src/java/org/apache/hadoop/hive/conf/ common/src/test/org/apache/hadoop/hive/common/type/ contrib/sr...

Modified: hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java (original)
+++ hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java Mon Oct  6 03:44:13 2014
@@ -22,7 +22,6 @@ import java.math.BigDecimal;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 
 public class HiveDecimalUtils {
 
@@ -135,25 +134,4 @@ public class HiveDecimalUtils {
     }
   }
 
-  public static TypeInfo getDecimalTypeForPrimitiveCategories(
-      PrimitiveTypeInfo a, PrimitiveTypeInfo b) {
-    int prec1 = HiveDecimalUtils.getPrecisionForType(a);
-    int prec2 = HiveDecimalUtils.getPrecisionForType(b);
-    int scale1 = HiveDecimalUtils.getScaleForType(a);
-    int scale2 = HiveDecimalUtils.getScaleForType(b);
-    int intPart = Math.max(prec1 - scale1, prec2 - scale2);
-    int decPart = Math.max(scale1, scale2);
-    int prec =  Math.min(intPart + decPart, HiveDecimal.MAX_PRECISION);
-    int scale = Math.min(decPart, HiveDecimal.MAX_PRECISION - intPart);
-    return TypeInfoFactory.getDecimalTypeInfo(prec, scale);
-  }
-
-  public static DecimalTypeInfo getDecimalTypeForPrimitiveCategory(PrimitiveTypeInfo a) {
-    if (a instanceof DecimalTypeInfo) return (DecimalTypeInfo)a;
-    int prec = HiveDecimalUtils.getPrecisionForType(a);
-    int scale = HiveDecimalUtils.getScaleForType(a);
-    prec =  Math.min(prec, HiveDecimal.MAX_PRECISION);
-    scale = Math.min(scale, HiveDecimal.MAX_PRECISION - (prec - scale));
-    return TypeInfoFactory.getDecimalTypeInfo(prec, scale);
-  }
 }

Modified: hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java (original)
+++ hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java Mon Oct  6 03:44:13 2014
@@ -205,30 +205,6 @@ public class TestTypeInfoToSchema {
   }
 
   @Test
-  public void createAvroCharSchema() {
-    final String specificSchema = "{" +
-        "\"type\":\"string\"," +
-        "\"logicalType\":\"char\"," +
-        "\"maxLength\":" + CHAR_LEN + "}";
-    String expectedSchema = genSchema(specificSchema);
-
-    Assert.assertEquals("Test for char's avro schema failed",
-        expectedSchema, getAvroSchemaString(CHAR));
-  }
-
-  @Test
-  public void createAvroVarcharSchema() {
-    final String specificSchema = "{" +
-        "\"type\":\"string\"," +
-        "\"logicalType\":\"varchar\"," +
-        "\"maxLength\":" + CHAR_LEN + "}";
-    String expectedSchema = genSchema(specificSchema);
-
-    Assert.assertEquals("Test for varchar's avro schema failed",
-        expectedSchema, getAvroSchemaString(VARCHAR));
-  }
-
-  @Test
   public void createAvroListSchema() {
     ListTypeInfo listTypeInfo = new ListTypeInfo();
     listTypeInfo.setListElementTypeInfo(STRING);

Modified: hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveCharWritable.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveCharWritable.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveCharWritable.java (original)
+++ hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveCharWritable.java Mon Oct  6 03:44:13 2014
@@ -18,20 +18,10 @@
 
 package org.apache.hadoop.hive.serde2.io;
 
-import com.google.code.tempusfugit.concurrency.annotations.*;
-import com.google.code.tempusfugit.concurrency.*;
-import org.junit.*;
-
-import static org.junit.Assert.*;
+import junit.framework.TestCase;
 import org.apache.hadoop.hive.common.type.HiveChar;
 
-public class TestHiveCharWritable {
-  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
-  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
+public class TestHiveCharWritable extends TestCase {
   public void testConstructor() throws Exception {
     HiveCharWritable hcw1 = new HiveCharWritable(new HiveChar("abc", 5));
     assertEquals("abc  ", hcw1.toString());
@@ -40,9 +30,6 @@ public class TestHiveCharWritable {
     assertEquals("abc  ", hcw2.toString());
   }
 
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testSet() throws Exception {
     HiveCharWritable hcw1 = new HiveCharWritable();
 
@@ -83,27 +70,18 @@ public class TestHiveCharWritable {
     assertEquals("ab", hcw1.getTextValue().toString());
   }
 
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testGetHiveChar() throws Exception {
     HiveCharWritable hcw = new HiveCharWritable();
     hcw.set("abcd", 10);
     assertEquals("abcd      ", hcw.getHiveChar().toString());
   }
 
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testGetCharacterLength() throws Exception {
     HiveCharWritable hcw = new HiveCharWritable();
     hcw.set("abcd", 10);
     assertEquals(4, hcw.getCharacterLength());
   }
 
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testEnforceMaxLength() {
     HiveCharWritable hcw1 = new HiveCharWritable();
     hcw1.set("abcdefghij", 10);
@@ -114,9 +92,6 @@ public class TestHiveCharWritable {
     assertEquals("abcde", hcw1.toString());
   }
 
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testComparison() throws Exception {
     HiveCharWritable hcw1 = new HiveCharWritable();
     HiveCharWritable hcw2 = new HiveCharWritable();

Modified: hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java (original)
+++ hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java Mon Oct  6 03:44:13 2014
@@ -18,10 +18,7 @@
 
 package org.apache.hadoop.hive.serde2.io;
 
-import com.google.code.tempusfugit.concurrency.annotations.*;
-import com.google.code.tempusfugit.concurrency.*;
-import org.junit.*;
-import static org.junit.Assert.*;
+import junit.framework.Assert;
 
 import java.math.BigDecimal;
 import java.math.BigInteger;
@@ -32,6 +29,8 @@ import java.util.ArrayList;
 import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hive.common.util.Decimal128FastBuffer;
+import org.junit.Before;
+import org.junit.Test;
 
 /**
  * Unit tests for tsting the fast allocation-free conversion
@@ -39,15 +38,14 @@ import org.apache.hive.common.util.Decim
  */
 public class TestHiveDecimalWritable {
 
-    @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
-    @Rule public RepeatingRule repeatingRule = new RepeatingRule();
+    private Decimal128FastBuffer scratch;
 
     @Before
     public void setUp() throws Exception {
+      scratch = new Decimal128FastBuffer();
     }
 
     private void doTestFastStreamForHiveDecimal(String valueString) {
-      Decimal128FastBuffer scratch = new Decimal128FastBuffer();
       BigDecimal value = new BigDecimal(valueString);
       Decimal128 dec = new Decimal128();
       dec.update(value);
@@ -63,23 +61,21 @@ public class TestHiveDecimalWritable {
 
       BigDecimal readValue = hd.bigDecimalValue();
 
-      assertEquals(value, readValue);
+      Assert.assertEquals(value, readValue);
 
       // Now test fastUpdate from the same serialized HiveDecimal
       Decimal128 decRead = new Decimal128().fastUpdateFromInternalStorage(
               witness.getInternalStorage(), (short) witness.getScale());
 
-      assertEquals(dec, decRead);
+      Assert.assertEquals(dec, decRead);
 
       // Test fastUpdate from it's own (not fully compacted) serialized output
       Decimal128 decReadSelf = new Decimal128().fastUpdateFromInternalStorage(
               hdw.getInternalStorage(), (short) hdw.getScale());
-      assertEquals(dec, decReadSelf);
+      Assert.assertEquals(dec, decReadSelf);
     }
 
     @Test
-    @Concurrent(count=4)
-    @Repeating(repetition=100)
     public void testFastStreamForHiveDecimal() {
 
       doTestFastStreamForHiveDecimal("0");
@@ -221,10 +217,7 @@ public class TestHiveDecimalWritable {
     }
 
     @Test
-    @Concurrent(count=4)
-    @Repeating(repetition=100)
     public void testHive6594() {
-      Decimal128FastBuffer scratch = new Decimal128FastBuffer();
       String[] vs = new String[] {
           "-4033.445769230769",
           "6984454.211097692"};
@@ -243,7 +236,7 @@ public class TestHiveDecimalWritable {
 
       BigDecimal readValue = hd.bigDecimalValue();
 
-      assertEquals(d.toBigDecimal().stripTrailingZeros(),
+      Assert.assertEquals(d.toBigDecimal().stripTrailingZeros(),
           readValue.stripTrailingZeros());
     }
 }

Modified: hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveVarcharWritable.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveVarcharWritable.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveVarcharWritable.java (original)
+++ hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveVarcharWritable.java Mon Oct  6 03:44:13 2014
@@ -17,21 +17,11 @@
  */
 package org.apache.hadoop.hive.serde2.io;
 
-import com.google.code.tempusfugit.concurrency.annotations.*;
-import com.google.code.tempusfugit.concurrency.*;
-import org.junit.*;
-import static org.junit.Assert.*;
-
+import junit.framework.TestCase;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import java.io.*;
 
-public class TestHiveVarcharWritable {
-  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
-  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
+public class TestHiveVarcharWritable extends TestCase {
   public void testStringLength() throws Exception {
     HiveVarcharWritable vc1 = new HiveVarcharWritable(new HiveVarchar("0123456789", 10));
     assertEquals(10, vc1.getCharacterLength());
@@ -64,9 +54,6 @@ public class TestHiveVarcharWritable {
     assertEquals(6, vc1.getCharacterLength());
   }
 
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testEnforceLength() throws Exception {
     HiveVarcharWritable vc1 = new HiveVarcharWritable(new HiveVarchar("0123456789", 10));
     assertEquals(10, vc1.getCharacterLength());
@@ -79,11 +66,8 @@ public class TestHiveVarcharWritable {
 
     vc1.enforceMaxLength(8);
     assertEquals(8, vc1.getCharacterLength());
-  }
+}
 
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testComparison() throws Exception {
     HiveVarcharWritable hc1 = new HiveVarcharWritable(new HiveVarchar("abcd", 20));
     HiveVarcharWritable hc2 = new HiveVarcharWritable(new HiveVarchar("abcd", 20));
@@ -117,9 +101,6 @@ public class TestHiveVarcharWritable {
     assertFalse(0 == hc2.compareTo(hc1));
   }
 
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testStringValue() throws Exception {
     HiveVarcharWritable vc1 = new HiveVarcharWritable(new HiveVarchar("abcde", 20));
     assertEquals("abcde", vc1.toString());

Modified: hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java (original)
+++ hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java Mon Oct  6 03:44:13 2014
@@ -17,9 +17,6 @@
  */
 package org.apache.hadoop.hive.serde2.io;
 
-import com.google.code.tempusfugit.concurrency.annotations.*;
-import com.google.code.tempusfugit.concurrency.*;
-
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
@@ -35,25 +32,15 @@ import java.util.List;
 import java.util.Random;
 import java.util.TimeZone;
 
-import org.junit.*;
-import static org.junit.Assert.*;
+import junit.framework.TestCase;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
 
-public class TestTimestampWritable {
-
-  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
-  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
+public class TestTimestampWritable extends TestCase {
 
-  private static ThreadLocal<DateFormat> DATE_FORMAT =
-      new ThreadLocal<DateFormat>() {
-        @Override
-        protected synchronized DateFormat initialValue() {
-          return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
-        }
-      };
+  private static DateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
 
   private static final int HAS_DECIMAL_MASK = 0x80000000;
 
@@ -77,14 +64,14 @@ public class TestTimestampWritable {
 
   private static long parseToMillis(String s) {
     try {
-      return DATE_FORMAT.get().parse(s).getTime();
+      return DATE_FORMAT.parse(s).getTime();
     } catch (ParseException ex) {
       throw new RuntimeException(ex);
     }
   }
 
-  @Before
-  public void setUp() {
+  @Override
+  protected void setUp() {
     TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
   }
 
@@ -265,9 +252,6 @@ public class TestTimestampWritable {
     return tsw;
   }
 
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testReverseNanos() {
     assertEquals(0, reverseNanos(0));
     assertEquals(120000000, reverseNanos(21));
@@ -281,8 +265,6 @@ public class TestTimestampWritable {
    * Test serializing and deserializing timestamps that can be represented by a number of seconds
    * from 0 to 2147483647 since the UNIX epoch.
    */
-  @Test
-  @Concurrent(count=4)
   public void testTimestampsWithinPositiveIntRange() throws IOException {
     Random rand = new Random(294722773L);
     for (int i = 0; i < 10000; ++i) {
@@ -299,8 +281,6 @@ public class TestTimestampWritable {
    * Test timestamps that don't necessarily fit between 1970 and 2038. This depends on HIVE-4525
    * being fixed.
    */
-  @Test
-  @Concurrent(count=4)
   public void testTimestampsOutsidePositiveIntRange() throws IOException {
     Random rand = new Random(789149717L);
     for (int i = 0; i < 10000; ++i) {
@@ -309,8 +289,6 @@ public class TestTimestampWritable {
     }
   }
 
-  @Test
-  @Concurrent(count=4)
   public void testTimestampsInFullRange() throws IOException {
     Random rand = new Random(2904974913L);
     for (int i = 0; i < 10000; ++i) {
@@ -318,8 +296,6 @@ public class TestTimestampWritable {
     }
   }
 
-  @Test
-  @Concurrent(count=4)
   public void testToFromDouble() {
     Random rand = new Random(294729777L);
     for (int nanosPrecision = 0; nanosPrecision <= 4; ++nanosPrecision) {
@@ -350,8 +326,6 @@ public class TestTimestampWritable {
     return HiveDecimal.create(d);
   }
 
-  @Test
-  @Concurrent(count=4)
   public void testDecimalToTimestampRandomly() {
     Random rand = new Random(294729777L);
     for (int i = 0; i < 10000; ++i) {
@@ -362,9 +336,6 @@ public class TestTimestampWritable {
     }
   }
 
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testDecimalToTimestampCornerCases() {
     Timestamp ts = new Timestamp(parseToMillis("1969-03-04 05:44:33"));
     assertEquals(0, ts.getTime() % 1000);
@@ -376,9 +347,6 @@ public class TestTimestampWritable {
     }
   }
 
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testSerializationFormatDirectly() throws IOException {
     assertEquals("1970-01-01 00:00:00", fromIntAndVInts(0).toString());
     assertEquals("1970-01-01 00:00:01", fromIntAndVInts(1).toString());
@@ -406,9 +374,6 @@ public class TestTimestampWritable {
                       -3210 - 1, seconds >> 31).toString());
   }
 
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testMaxSize() {
     // This many bytes are necessary to store the reversed nanoseconds.
     assertEquals(5, WritableUtils.getVIntSize(999999999));
@@ -431,9 +396,6 @@ public class TestTimestampWritable {
     // Therefore, the maximum total size of a serialized timestamp is 4 + 5 + 4 = 13.
   }
 
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testMillisToSeconds() {
     assertEquals(0, TimestampWritable.millisToSeconds(0));
     assertEquals(-1, TimestampWritable.millisToSeconds(-1));
@@ -465,9 +427,6 @@ public class TestTimestampWritable {
     return result < 0 ? -1 : (result > 0 ? 1 : 0);
   }
 
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
   public void testBinarySortable() {
     Random rand = new Random(5972977L);
     List<TimestampWritable> tswList = new ArrayList<TimestampWritable>();

Modified: hive/branches/spark/serde/src/test/resources/avro-struct.avsc
URL: http://svn.apache.org/viewvc/hive/branches/spark/serde/src/test/resources/avro-struct.avsc?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/serde/src/test/resources/avro-struct.avsc (original)
+++ hive/branches/spark/serde/src/test/resources/avro-struct.avsc Mon Oct  6 03:44:13 2014
@@ -7,8 +7,8 @@ field6:smallint,field7:int,field8:bigint
 field12:decimal(4,2),field13:void>",
 "fields":[
 {"name":"field1","type":["null","string"],"doc":"string","default":null},
-{"name":"field2","type":["null",{"type":"string","logicalType":"char","maxLength":5}],"doc":"char(5)","default":null},
-{"name":"field3","type":["null",{"type":"string","logicalType":"varchar","maxLength":5}],"doc":"varchar(5)","default":null},
+{"name":"field2","type":["null","string"],"doc":"char(5)","default":null},
+{"name":"field3","type":["null","string"],"doc":"varchar(5)","default":null},
 {"name":"field4","type":["null","bytes"],"doc":"binary","default":null},
 {"name":"field5","type":["null","int"],"doc":"tinyint","default":null},
 {"name":"field6","type":["null","int"],"doc":"smallint","default":null},
@@ -21,4 +21,4 @@ field12:decimal(4,2),field13:void>",
 "scale":2}],"doc":"decimal(4,2)","default":null},
 {"name":"field13","type":"null","doc":"void","default":null}
 ]
-}
+}
\ No newline at end of file

Modified: hive/branches/spark/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java (original)
+++ hive/branches/spark/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java Mon Oct  6 03:44:13 2014
@@ -62,7 +62,7 @@ public final class HttpAuthUtils {
     String serverPrincipal = getServerPrincipal(principal, host);
     // Uses the Ticket Granting Ticket in the UserGroupInformation
     return clientUGI.doAs(
-      new HttpKerberosClientAction(serverPrincipal, clientUGI.getUserName(), serverHttpUrl));
+      new HttpKerberosClientAction(serverPrincipal, clientUGI.getShortUserName(), serverHttpUrl));
   }
 
   /**

Modified: hive/branches/spark/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java (original)
+++ hive/branches/spark/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java Mon Oct  6 03:44:13 2014
@@ -75,7 +75,7 @@ public class TSetIpAddressProcessor<I ex
     if (tSocket == null) {
       LOGGER.warn("Unknown Transport, cannot determine ipAddress");
     } else {
-      THREAD_LOCAL_IP_ADDRESS.set(tSocket.getSocket().getInetAddress().getHostAddress());
+      THREAD_LOCAL_IP_ADDRESS.set(tSocket.getSocket().getInetAddress().toString());
     }
   }
 

Modified: hive/branches/spark/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java (original)
+++ hive/branches/spark/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java Mon Oct  6 03:44:13 2014
@@ -166,20 +166,15 @@ public class HiveSessionImpl implements 
     IHiveFileProcessor processor = new GlobalHivercFileProcessor();
 
     try {
-      String hiverc = hiveConf.getVar(ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION);
-      if (hiverc != null) {
-        File hivercFile = new File(hiverc);
-        if (hivercFile.isDirectory()) {
-          hivercFile = new File(hivercFile, SessionManager.HIVERCFILE);
-        }
-        if (hivercFile.isFile()) {
-          LOG.info("Running global init file: " + hivercFile);
-          int rc = processor.processFile(hivercFile.getAbsolutePath());
+      if (hiveConf.getVar(ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION) != null) {
+        String hiverc = hiveConf.getVar(ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION)
+            + File.separator + SessionManager.HIVERCFILE;
+        if (new File(hiverc).exists()) {
+          LOG.info("Running global init file: " + hiverc);
+          int rc = processor.processFile(hiverc);
           if (rc != 0) {
-            LOG.error("Failed on initializing global .hiverc file");
+            LOG.warn("Failed on initializing global .hiverc file");
           }
-        } else {
-          LOG.debug("Global init file " + hivercFile + " does not exist");
         }
       }
     } catch (IOException e) {

Modified: hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java (original)
+++ hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java Mon Oct  6 03:44:13 2014
@@ -32,7 +32,6 @@ import org.apache.commons.codec.binary.S
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hive.service.auth.AuthenticationProviderFactory;
 import org.apache.hive.service.auth.AuthenticationProviderFactory.AuthMethods;
 import org.apache.hive.service.auth.HiveAuthFactory;
@@ -220,7 +219,7 @@ public class ThriftHttpServlet extends T
               "provided by the client.");
         }
         else {
-          return getPrincipalWithoutRealmAndHost(gssContext.getSrcName().toString());
+          return getPrincipalWithoutRealm(gssContext.getSrcName().toString());
         }
       }
       catch (GSSException e) {
@@ -238,19 +237,8 @@ public class ThriftHttpServlet extends T
     }
 
     private String getPrincipalWithoutRealm(String fullPrincipal) {
-      KerberosName fullKerberosName = new KerberosName(fullPrincipal);
-      String serviceName = fullKerberosName.getServiceName();
-      String hostName =  fullKerberosName.getHostName();
-      String principalWithoutRealm = serviceName;
-      if (hostName != null) {
-        principalWithoutRealm = serviceName + "/" + hostName;
-      }
-      return principalWithoutRealm;
-    }
-    
-    private String getPrincipalWithoutRealmAndHost(String fullPrincipal) {
-      KerberosName fullKerberosName = new KerberosName(fullPrincipal);
-      return fullKerberosName.getServiceName();
+      String names[] = fullPrincipal.split("[@]");
+      return names[0];
     }
   }
 

Modified: hive/branches/spark/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java (original)
+++ hive/branches/spark/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java Mon Oct  6 03:44:13 2014
@@ -150,7 +150,7 @@ public abstract class CLIServiceTest {
     client.closeOperation(opHandle);
 
     // Blocking execute
-    queryString = "SELECT ID+1 FROM TEST_EXEC";
+    queryString = "SELECT ID FROM TEST_EXEC";
     opHandle = client.executeStatement(sessionHandle, queryString, confOverlay);
     // Expect query to be completed now
     assertEquals("Query should be finished",
@@ -225,27 +225,27 @@ public abstract class CLIServiceTest {
     /**
      * Execute an async query with default config
      */
-    queryString = "SELECT ID+1 FROM " + tableName;
+    queryString = "SELECT ID FROM " + tableName;
     runQueryAsync(sessionHandle, queryString, confOverlay, OperationState.FINISHED, longPollingTimeout);
 
     /**
      * Execute an async query with long polling timeout set to 0
      */
     longPollingTimeout = 0;
-    queryString = "SELECT ID+1 FROM " + tableName;
+    queryString = "SELECT ID FROM " + tableName;
     runQueryAsync(sessionHandle, queryString, confOverlay, OperationState.FINISHED, longPollingTimeout);
 
     /**
      * Execute an async query with long polling timeout set to 500 millis
      */
     longPollingTimeout = 500;
-    queryString = "SELECT ID+1 FROM " + tableName;
+    queryString = "SELECT ID FROM " + tableName;
     runQueryAsync(sessionHandle, queryString, confOverlay, OperationState.FINISHED, longPollingTimeout);
 
     /**
      * Cancellation test
      */
-    queryString = "SELECT ID+1 FROM " + tableName;
+    queryString = "SELECT ID FROM " + tableName;
     opHandle = client.executeStatementAsync(sessionHandle, queryString, confOverlay);
     System.out.println("Cancelling " + opHandle);
     client.cancelOperation(opHandle);

Modified: hive/branches/spark/service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java (original)
+++ hive/branches/spark/service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java Mon Oct  6 03:44:13 2014
@@ -44,7 +44,6 @@ public class TestSessionGlobalInitFile e
   private ThriftCLIServiceClient client;
   private File initFile;
   private String tmpDir;
-  private HiveConf hiveConf;
 
   /**
    * This class is almost the same as EmbeddedThriftBinaryCLIService,
@@ -87,7 +86,7 @@ public class TestSessionGlobalInitFile e
     FileUtils.writeLines(initFile, Arrays.asList(fileContent));
 
     // set up service and client
-    hiveConf = new HiveConf();
+    HiveConf hiveConf = new HiveConf();
     hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION,
         initFile.getParentFile().getAbsolutePath());
     service = new FakeEmbeddedThriftBinaryCLIService(hiveConf);
@@ -103,26 +102,11 @@ public class TestSessionGlobalInitFile e
 
   @Test
   public void testSessionGlobalInitFile() throws Exception {
-    File tmpInitFile = new File(initFile.getParent(), "hiverc");
-    Assert.assertTrue("Failed to rename " + initFile + " to " + tmpInitFile,
-      initFile.renameTo(tmpInitFile));
-    initFile = tmpInitFile;
-    hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION,
-        initFile.getAbsolutePath());
-    doTestSessionGlobalInitFile();
-  }
-
-  @Test
-  public void testSessionGlobalInitDir() throws Exception {
-    doTestSessionGlobalInitFile();
-  }
-
-  /**
-   * create session, and fetch the property set in global init file. Test if
-   * the global init file .hiverc is loaded correctly by checking the expected
-   * setting property.
-   */
-  private void doTestSessionGlobalInitFile() throws Exception {
+    /**
+     * create session, and fetch the property set in global init file. Test if
+     * the global init file .hiverc is loaded correctly by checking the expected
+     * setting property.
+     */
     SessionHandle sessionHandle = client.openSession(null, null, null);
 
     verifyInitProperty("a", "1", sessionHandle);

Modified: hive/branches/spark/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java (original)
+++ hive/branches/spark/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java Mon Oct  6 03:44:13 2014
@@ -177,7 +177,7 @@ public abstract class ThriftCLIServiceTe
     client.executeStatement(sessHandle, queryString, opConf);
 
     // Execute another query
-    queryString = "SELECT ID+1 FROM TEST_EXEC_THRIFT";
+    queryString = "SELECT ID FROM TEST_EXEC_THRIFT";
     OperationHandle opHandle = client.executeStatement(sessHandle,
         queryString, opConf);
     assertNotNull(opHandle);
@@ -227,7 +227,7 @@ public abstract class ThriftCLIServiceTe
     client.executeStatement(sessHandle, queryString, opConf);
 
     // Execute another query
-    queryString = "SELECT ID+1 FROM TEST_EXEC_ASYNC_THRIFT";
+    queryString = "SELECT ID FROM TEST_EXEC_ASYNC_THRIFT";
     System.out.println("Will attempt to execute: " + queryString);
     opHandle = client.executeStatementAsync(sessHandle,
         queryString, opConf);