You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jd...@apache.org on 2014/09/26 04:10:58 UTC

svn commit: r1627700 - in /hive/trunk: common/ common/src/test/org/apache/hadoop/hive/common/type/ serde/ serde/src/test/org/apache/hadoop/hive/serde2/io/

Author: jdere
Date: Fri Sep 26 02:10:58 2014
New Revision: 1627700

URL: http://svn.apache.org/r1627700
Log:
HIVE-8229: Add multithreaded tests for the Hive Writable data types (Jason Dere, reviewed by Thejas Nair)

Added:
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java
Modified:
    hive/trunk/common/pom.xml
    hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveChar.java
    hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
    hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java
    hive/trunk/serde/pom.xml
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveCharWritable.java
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveVarcharWritable.java
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java

Modified: hive/trunk/common/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/common/pom.xml?rev=1627700&r1=1627699&r2=1627700&view=diff
==============================================================================
--- hive/trunk/common/pom.xml (original)
+++ hive/trunk/common/pom.xml Fri Sep 26 02:10:58 2014
@@ -72,6 +72,12 @@
     </dependency>
     <!-- test inter-project -->
     <dependency>
+      <groupId>com.google.code.tempus-fugit</groupId>
+      <artifactId>tempus-fugit</artifactId>
+      <version>${tempus-fugit.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <version>${junit.version}</version>

Modified: hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveChar.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveChar.java?rev=1627700&r1=1627699&r2=1627700&view=diff
==============================================================================
--- hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveChar.java (original)
+++ hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveChar.java Fri Sep 26 02:10:58 2014
@@ -18,10 +18,19 @@
 
 package org.apache.hadoop.hive.common.type;
 
-import junit.framework.TestCase;
-
-public class TestHiveChar extends TestCase {
-
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+import org.junit.*;
+import static org.junit.Assert.*;
+
+public class TestHiveChar {
+
+  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testBasic() {
     HiveChar hc = new HiveChar("abc", 10);
     assertEquals("abc       ", hc.toString());
@@ -47,6 +56,9 @@ public class TestHiveChar extends TestCa
     assertEquals(3, hc.getCharacterLength());
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testStringLength() {
     HiveChar hc = new HiveChar();
 
@@ -60,6 +72,9 @@ public class TestHiveChar extends TestCa
     assertEquals("0123456789     ", hc.toString());
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testComparison() {
     HiveChar hc1 = new HiveChar();
     HiveChar hc2 = new HiveChar();

Modified: hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java?rev=1627700&r1=1627699&r2=1627700&view=diff
==============================================================================
--- hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java (original)
+++ hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java Fri Sep 26 02:10:58 2014
@@ -20,12 +20,19 @@ package org.apache.hadoop.hive.common.ty
 import java.math.BigDecimal;
 import java.math.BigInteger;
 
-import org.junit.Assert;
-import org.junit.Test;
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+import org.junit.*;
+import static org.junit.Assert.*;
 
 public class TestHiveDecimal {
 
+  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
+
   @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testPrecisionScaleEnforcement() {
     String decStr = "1786135888657847525803324040144343378.09799306448796128931113691624";
     HiveDecimal dec = HiveDecimal.create(decStr);
@@ -82,6 +89,8 @@ public class TestHiveDecimal {
   }
 
   @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testMultiply() {
     HiveDecimal dec1 = HiveDecimal.create("0.00001786135888657847525803");
     HiveDecimal dec2 = HiveDecimal.create("3.0000123456789");
@@ -105,6 +114,8 @@ public class TestHiveDecimal {
   }
 
   @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testPow() {
     HiveDecimal dec = HiveDecimal.create("3.00001415926");
     Assert.assertEquals(dec.pow(2), dec.multiply(dec));
@@ -118,6 +129,8 @@ public class TestHiveDecimal {
   }
 
   @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testDivide() {
     HiveDecimal dec1 = HiveDecimal.create("3.14");
     HiveDecimal dec2 = HiveDecimal.create("3");
@@ -133,6 +146,8 @@ public class TestHiveDecimal {
   }
 
   @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testPlus() {
     HiveDecimal dec1 = HiveDecimal.create("99999999999999999999999999999999999");
     HiveDecimal dec2 = HiveDecimal.create("1");
@@ -145,6 +160,8 @@ public class TestHiveDecimal {
 
 
   @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testSubtract() {
       HiveDecimal dec1 = HiveDecimal.create("3.140");
       HiveDecimal dec2 = HiveDecimal.create("1.00");
@@ -152,6 +169,8 @@ public class TestHiveDecimal {
   }
 
   @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testPosMod() {
     HiveDecimal hd1 = HiveDecimal.create("-100.91");
     HiveDecimal hd2 = HiveDecimal.create("9.8");
@@ -160,12 +179,16 @@ public class TestHiveDecimal {
   }
 
   @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testHashCode() {
       Assert.assertEquals(HiveDecimal.create("9").hashCode(), HiveDecimal.create("9.00").hashCode());
       Assert.assertEquals(HiveDecimal.create("0").hashCode(), HiveDecimal.create("0.00").hashCode());
   }
 
   @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testException() {
     HiveDecimal dec = HiveDecimal.create("3.1415.926");
     Assert.assertNull(dec);
@@ -174,6 +197,8 @@ public class TestHiveDecimal {
   }
 
   @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testBinaryConversion() {
     testBinaryConversion("0.00");
     testBinaryConversion("-12.25");

Modified: hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java?rev=1627700&r1=1627699&r2=1627700&view=diff
==============================================================================
--- hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java (original)
+++ hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java Fri Sep 26 02:10:58 2014
@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.hive.common.type;
 
-import junit.framework.TestCase;
 
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.common.LogUtils;
@@ -28,8 +27,15 @@ import java.io.IOException;
 import java.io.InputStreamReader;
 import java.util.Random;
 
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+import org.junit.*;
+import static org.junit.Assert.*;
+
+public class TestHiveVarchar {
+  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
 
-public class TestHiveVarchar extends TestCase {
   public TestHiveVarchar() {
     super();
   }
@@ -65,6 +71,9 @@ public class TestHiveVarchar extends Tes
     }
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testStringLength() throws Exception {
     int strLen = 20;
     int[] lengths = { 15, 20, 25 };
@@ -124,6 +133,9 @@ public class TestHiveVarchar extends Tes
     assertEquals(5, vc1.getCharacterLength());
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testComparison() throws Exception {
     HiveVarchar hc1 = new HiveVarchar("abcd", 20);
     HiveVarchar hc2 = new HiveVarchar("abcd", 20);

Modified: hive/trunk/serde/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/serde/pom.xml?rev=1627700&r1=1627699&r2=1627700&view=diff
==============================================================================
--- hive/trunk/serde/pom.xml (original)
+++ hive/trunk/serde/pom.xml Fri Sep 26 02:10:58 2014
@@ -78,6 +78,12 @@
 
       <!-- test inter-project -->
     <dependency>
+      <groupId>com.google.code.tempus-fugit</groupId>
+      <artifactId>tempus-fugit</artifactId>
+      <version>${tempus-fugit.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <version>${junit.version}</version>

Added: hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java?rev=1627700&view=auto
==============================================================================
--- hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java (added)
+++ hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java Fri Sep 26 02:10:58 2014
@@ -0,0 +1,138 @@
+package org.apache.hadoop.hive.serde2.io;
+
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+import org.junit.*;
+
+import static org.junit.Assert.*;
+import java.io.*;
+import java.sql.Date;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+
+public class TestDateWritable {
+
+  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testConstructor() {
+    Date date = Date.valueOf(getRandomDateString());
+    DateWritable dw1 = new DateWritable(date);
+    DateWritable dw2 = new DateWritable(dw1);
+    DateWritable dw3 = new DateWritable(dw1.getDays());
+
+    assertEquals(dw1, dw1);
+    assertEquals(dw1, dw2);
+    assertEquals(dw2, dw3);
+    assertEquals(date, dw1.get());
+    assertEquals(date, dw2.get());
+    assertEquals(date, dw3.get());
+  }
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testComparison() {
+    // Get 2 different dates
+    Date date1 = Date.valueOf(getRandomDateString());
+    Date date2 = Date.valueOf(getRandomDateString());
+    while (date1.equals(date2)) {
+      date2 = Date.valueOf(getRandomDateString());
+    }
+
+    DateWritable dw1 = new DateWritable(date1);
+    DateWritable dw2 = new DateWritable(date2);
+    DateWritable dw3 = new DateWritable(date1);
+
+    assertTrue("Dates should be equal", dw1.equals(dw1));
+    assertTrue("Dates should be equal", dw1.equals(dw3));
+    assertTrue("Dates should be equal", dw3.equals(dw1));
+    assertEquals("Dates should be equal", 0, dw1.compareTo(dw1));
+    assertEquals("Dates should be equal", 0, dw1.compareTo(dw3));
+    assertEquals("Dates should be equal", 0, dw3.compareTo(dw1));
+
+    assertFalse("Dates not should be equal", dw1.equals(dw2));
+    assertFalse("Dates not should be equal", dw2.equals(dw1));
+    assertTrue("Dates not should be equal", 0 != dw1.compareTo(dw2));
+    assertTrue("Dates not should be equal", 0 != dw2.compareTo(dw1));
+  }
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testGettersSetters() {
+    Date date1 = Date.valueOf(getRandomDateString());
+    Date date2 = Date.valueOf(getRandomDateString());
+    Date date3 = Date.valueOf(getRandomDateString());
+    DateWritable dw1 = new DateWritable(date1);
+    DateWritable dw2 = new DateWritable(date2);
+    DateWritable dw3 = new DateWritable(date3);
+    DateWritable dw4 = new DateWritable();
+
+    // Getters
+    assertEquals(date1, dw1.get());
+    assertEquals(date1.getTime() / 1000, dw1.getTimeInSeconds());
+
+    dw4.set(Date.valueOf("1970-01-02"));
+    assertEquals(1, dw4.getDays());
+    dw4.set(Date.valueOf("1971-01-01"));
+    assertEquals(365, dw4.getDays());
+
+    // Setters
+    dw4.set(dw1.getDays());
+    assertEquals(dw1, dw4);
+
+    dw4.set(dw2.get());
+    assertEquals(dw2, dw4);
+
+    dw4.set(dw3);
+    assertEquals(dw3, dw4);
+  }
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testWritableMethods() throws Throwable {
+    DateWritable dw1 = new DateWritable(Date.valueOf(getRandomDateString()));
+    DateWritable dw2 = new DateWritable();
+    ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
+    DataOutput out = new DataOutputStream(byteStream);
+
+    dw1.write(out);
+    dw2.readFields(new DataInputStream(new ByteArrayInputStream(byteStream.toByteArray())));
+
+    assertEquals("Dates should be equal", dw1, dw2);
+  }
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testDateValueOf() {
+    // Just making sure Date.valueOf() works ok
+    String dateStr = getRandomDateString();
+    Date date = Date.valueOf(dateStr);
+    assertEquals(dateStr, date.toString());
+  }
+
+  private static String[] dateStrings = new String[365];
+
+  @BeforeClass
+  public static void setupDateStrings() {
+    DateFormat format = new SimpleDateFormat("yyyy-MM-dd");
+    Date initialDate = Date.valueOf("2014-01-01");
+    Calendar cal = Calendar.getInstance();
+    cal.setTime(initialDate);
+    for (int idx = 0; idx < 365; ++idx) {
+      dateStrings[idx] = format.format(cal.getTime());
+      cal.add(1, Calendar.DAY_OF_YEAR);
+    }
+  }
+
+  private static String getRandomDateString() {
+    return dateStrings[(int) (Math.random() * 365)];
+  }
+}

Modified: hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveCharWritable.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveCharWritable.java?rev=1627700&r1=1627699&r2=1627700&view=diff
==============================================================================
--- hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveCharWritable.java (original)
+++ hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveCharWritable.java Fri Sep 26 02:10:58 2014
@@ -18,10 +18,20 @@
 
 package org.apache.hadoop.hive.serde2.io;
 
-import junit.framework.TestCase;
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+import org.junit.*;
+
+import static org.junit.Assert.*;
 import org.apache.hadoop.hive.common.type.HiveChar;
 
-public class TestHiveCharWritable extends TestCase {
+public class TestHiveCharWritable {
+  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testConstructor() throws Exception {
     HiveCharWritable hcw1 = new HiveCharWritable(new HiveChar("abc", 5));
     assertEquals("abc  ", hcw1.toString());
@@ -30,6 +40,9 @@ public class TestHiveCharWritable extend
     assertEquals("abc  ", hcw2.toString());
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testSet() throws Exception {
     HiveCharWritable hcw1 = new HiveCharWritable();
 
@@ -70,18 +83,27 @@ public class TestHiveCharWritable extend
     assertEquals("ab", hcw1.getTextValue().toString());
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testGetHiveChar() throws Exception {
     HiveCharWritable hcw = new HiveCharWritable();
     hcw.set("abcd", 10);
     assertEquals("abcd      ", hcw.getHiveChar().toString());
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testGetCharacterLength() throws Exception {
     HiveCharWritable hcw = new HiveCharWritable();
     hcw.set("abcd", 10);
     assertEquals(4, hcw.getCharacterLength());
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testEnforceMaxLength() {
     HiveCharWritable hcw1 = new HiveCharWritable();
     hcw1.set("abcdefghij", 10);
@@ -92,6 +114,9 @@ public class TestHiveCharWritable extend
     assertEquals("abcde", hcw1.toString());
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testComparison() throws Exception {
     HiveCharWritable hcw1 = new HiveCharWritable();
     HiveCharWritable hcw2 = new HiveCharWritable();

Modified: hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java?rev=1627700&r1=1627699&r2=1627700&view=diff
==============================================================================
--- hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java (original)
+++ hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java Fri Sep 26 02:10:58 2014
@@ -18,7 +18,10 @@
 
 package org.apache.hadoop.hive.serde2.io;
 
-import junit.framework.Assert;
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+import org.junit.*;
+import static org.junit.Assert.*;
 
 import java.math.BigDecimal;
 import java.math.BigInteger;
@@ -29,8 +32,6 @@ import java.util.ArrayList;
 import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hive.common.util.Decimal128FastBuffer;
-import org.junit.Before;
-import org.junit.Test;
 
 /**
  * Unit tests for tsting the fast allocation-free conversion
@@ -38,14 +39,15 @@ import org.junit.Test;
  */
 public class TestHiveDecimalWritable {
 
-    private Decimal128FastBuffer scratch;
+    @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+    @Rule public RepeatingRule repeatingRule = new RepeatingRule();
 
     @Before
     public void setUp() throws Exception {
-      scratch = new Decimal128FastBuffer();
     }
 
     private void doTestFastStreamForHiveDecimal(String valueString) {
+      Decimal128FastBuffer scratch = new Decimal128FastBuffer();
       BigDecimal value = new BigDecimal(valueString);
       Decimal128 dec = new Decimal128();
       dec.update(value);
@@ -61,21 +63,23 @@ public class TestHiveDecimalWritable {
 
       BigDecimal readValue = hd.bigDecimalValue();
 
-      Assert.assertEquals(value, readValue);
+      assertEquals(value, readValue);
 
       // Now test fastUpdate from the same serialized HiveDecimal
       Decimal128 decRead = new Decimal128().fastUpdateFromInternalStorage(
               witness.getInternalStorage(), (short) witness.getScale());
 
-      Assert.assertEquals(dec, decRead);
+      assertEquals(dec, decRead);
 
       // Test fastUpdate from it's own (not fully compacted) serialized output
       Decimal128 decReadSelf = new Decimal128().fastUpdateFromInternalStorage(
               hdw.getInternalStorage(), (short) hdw.getScale());
-      Assert.assertEquals(dec, decReadSelf);
+      assertEquals(dec, decReadSelf);
     }
 
     @Test
+    @Concurrent(count=4)
+    @Repeating(repetition=100)
     public void testFastStreamForHiveDecimal() {
 
       doTestFastStreamForHiveDecimal("0");
@@ -217,7 +221,10 @@ public class TestHiveDecimalWritable {
     }
 
     @Test
+    @Concurrent(count=4)
+    @Repeating(repetition=100)
     public void testHive6594() {
+      Decimal128FastBuffer scratch = new Decimal128FastBuffer();
       String[] vs = new String[] {
           "-4033.445769230769",
           "6984454.211097692"};
@@ -236,7 +243,7 @@ public class TestHiveDecimalWritable {
 
       BigDecimal readValue = hd.bigDecimalValue();
 
-      Assert.assertEquals(d.toBigDecimal().stripTrailingZeros(),
+      assertEquals(d.toBigDecimal().stripTrailingZeros(),
           readValue.stripTrailingZeros());
     }
 }

Modified: hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveVarcharWritable.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveVarcharWritable.java?rev=1627700&r1=1627699&r2=1627700&view=diff
==============================================================================
--- hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveVarcharWritable.java (original)
+++ hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveVarcharWritable.java Fri Sep 26 02:10:58 2014
@@ -17,11 +17,21 @@
  */
 package org.apache.hadoop.hive.serde2.io;
 
-import junit.framework.TestCase;
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+import org.junit.*;
+import static org.junit.Assert.*;
+
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import java.io.*;
 
-public class TestHiveVarcharWritable extends TestCase {
+public class TestHiveVarcharWritable {
+  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testStringLength() throws Exception {
     HiveVarcharWritable vc1 = new HiveVarcharWritable(new HiveVarchar("0123456789", 10));
     assertEquals(10, vc1.getCharacterLength());
@@ -54,6 +64,9 @@ public class TestHiveVarcharWritable ext
     assertEquals(6, vc1.getCharacterLength());
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testEnforceLength() throws Exception {
     HiveVarcharWritable vc1 = new HiveVarcharWritable(new HiveVarchar("0123456789", 10));
     assertEquals(10, vc1.getCharacterLength());
@@ -66,8 +79,11 @@ public class TestHiveVarcharWritable ext
 
     vc1.enforceMaxLength(8);
     assertEquals(8, vc1.getCharacterLength());
-}
+  }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testComparison() throws Exception {
     HiveVarcharWritable hc1 = new HiveVarcharWritable(new HiveVarchar("abcd", 20));
     HiveVarcharWritable hc2 = new HiveVarcharWritable(new HiveVarchar("abcd", 20));
@@ -101,6 +117,9 @@ public class TestHiveVarcharWritable ext
     assertFalse(0 == hc2.compareTo(hc1));
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testStringValue() throws Exception {
     HiveVarcharWritable vc1 = new HiveVarcharWritable(new HiveVarchar("abcde", 20));
     assertEquals("abcde", vc1.toString());

Modified: hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java?rev=1627700&r1=1627699&r2=1627700&view=diff
==============================================================================
--- hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java (original)
+++ hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java Fri Sep 26 02:10:58 2014
@@ -17,6 +17,9 @@
  */
 package org.apache.hadoop.hive.serde2.io;
 
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
@@ -32,15 +35,25 @@ import java.util.List;
 import java.util.Random;
 import java.util.TimeZone;
 
-import junit.framework.TestCase;
+import org.junit.*;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
 
-public class TestTimestampWritable extends TestCase {
+public class TestTimestampWritable {
+
+  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
 
-  private static DateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+  private static ThreadLocal<DateFormat> DATE_FORMAT =
+      new ThreadLocal<DateFormat>() {
+        @Override
+        protected synchronized DateFormat initialValue() {
+          return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+        }
+      };
 
   private static final int HAS_DECIMAL_MASK = 0x80000000;
 
@@ -64,14 +77,14 @@ public class TestTimestampWritable exten
 
   private static long parseToMillis(String s) {
     try {
-      return DATE_FORMAT.parse(s).getTime();
+      return DATE_FORMAT.get().parse(s).getTime();
     } catch (ParseException ex) {
       throw new RuntimeException(ex);
     }
   }
 
-  @Override
-  protected void setUp() {
+  @Before
+  public void setUp() {
     TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
   }
 
@@ -252,6 +265,9 @@ public class TestTimestampWritable exten
     return tsw;
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testReverseNanos() {
     assertEquals(0, reverseNanos(0));
     assertEquals(120000000, reverseNanos(21));
@@ -265,6 +281,8 @@ public class TestTimestampWritable exten
    * Test serializing and deserializing timestamps that can be represented by a number of seconds
    * from 0 to 2147483647 since the UNIX epoch.
    */
+  @Test
+  @Concurrent(count=4)
   public void testTimestampsWithinPositiveIntRange() throws IOException {
     Random rand = new Random(294722773L);
     for (int i = 0; i < 10000; ++i) {
@@ -281,6 +299,8 @@ public class TestTimestampWritable exten
    * Test timestamps that don't necessarily fit between 1970 and 2038. This depends on HIVE-4525
    * being fixed.
    */
+  @Test
+  @Concurrent(count=4)
   public void testTimestampsOutsidePositiveIntRange() throws IOException {
     Random rand = new Random(789149717L);
     for (int i = 0; i < 10000; ++i) {
@@ -289,6 +309,8 @@ public class TestTimestampWritable exten
     }
   }
 
+  @Test
+  @Concurrent(count=4)
   public void testTimestampsInFullRange() throws IOException {
     Random rand = new Random(2904974913L);
     for (int i = 0; i < 10000; ++i) {
@@ -296,6 +318,8 @@ public class TestTimestampWritable exten
     }
   }
 
+  @Test
+  @Concurrent(count=4)
   public void testToFromDouble() {
     Random rand = new Random(294729777L);
     for (int nanosPrecision = 0; nanosPrecision <= 4; ++nanosPrecision) {
@@ -326,6 +350,8 @@ public class TestTimestampWritable exten
     return HiveDecimal.create(d);
   }
 
+  @Test
+  @Concurrent(count=4)
   public void testDecimalToTimestampRandomly() {
     Random rand = new Random(294729777L);
     for (int i = 0; i < 10000; ++i) {
@@ -336,6 +362,9 @@ public class TestTimestampWritable exten
     }
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testDecimalToTimestampCornerCases() {
     Timestamp ts = new Timestamp(parseToMillis("1969-03-04 05:44:33"));
     assertEquals(0, ts.getTime() % 1000);
@@ -347,6 +376,9 @@ public class TestTimestampWritable exten
     }
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testSerializationFormatDirectly() throws IOException {
     assertEquals("1970-01-01 00:00:00", fromIntAndVInts(0).toString());
     assertEquals("1970-01-01 00:00:01", fromIntAndVInts(1).toString());
@@ -374,6 +406,9 @@ public class TestTimestampWritable exten
                       -3210 - 1, seconds >> 31).toString());
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testMaxSize() {
     // This many bytes are necessary to store the reversed nanoseconds.
     assertEquals(5, WritableUtils.getVIntSize(999999999));
@@ -396,6 +431,9 @@ public class TestTimestampWritable exten
     // Therefore, the maximum total size of a serialized timestamp is 4 + 5 + 4 = 13.
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testMillisToSeconds() {
     assertEquals(0, TimestampWritable.millisToSeconds(0));
     assertEquals(-1, TimestampWritable.millisToSeconds(-1));
@@ -427,6 +465,9 @@ public class TestTimestampWritable exten
     return result < 0 ? -1 : (result > 0 ? 1 : 0);
   }
 
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
   public void testBinarySortable() {
     Random rand = new Random(5972977L);
     List<TimestampWritable> tswList = new ArrayList<TimestampWritable>();