You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hcatalog-commits@incubator.apache.org by tr...@apache.org on 2012/09/08 15:27:47 UTC

svn commit: r1382319 - in /incubator/hcatalog/trunk: ./ src/java/org/apache/hcatalog/common/ src/java/org/apache/hcatalog/data/ src/test/org/apache/hcatalog/data/

Author: travis
Date: Sat Sep  8 15:27:47 2012
New Revision: 1382319

URL: http://svn.apache.org/viewvc?rev=1382319&view=rev
Log:
HCATALOG-489 HCatalog style cleanups and readd javac debug option

Modified:
    incubator/hcatalog/trunk/CHANGES.txt
    incubator/hcatalog/trunk/build-common.xml
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/LazyHCatRecord.java
    incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestLazyHCatRecord.java
    incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestReaderWriter.java

Modified: incubator/hcatalog/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/CHANGES.txt?rev=1382319&r1=1382318&r2=1382319&view=diff
==============================================================================
--- incubator/hcatalog/trunk/CHANGES.txt (original)
+++ incubator/hcatalog/trunk/CHANGES.txt Sat Sep  8 15:27:47 2012
@@ -38,6 +38,8 @@ Trunk (unreleased changes)
   HCAT-427 Document storage-based authorization (lefty via gates)
 
   IMPROVEMENTS
+  HCAT-489 HCatalog style cleanups and readd javac debug option (traviscrawford)
+
   HCAT-431 document hcat type to java class/pig type mapping (lefty via khorgath)
 
   HCAT-492 Document CTAS workaround for Hive with JSON serde (lefty via khorgath)

Modified: incubator/hcatalog/trunk/build-common.xml
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/build-common.xml?rev=1382319&r1=1382318&r2=1382319&view=diff
==============================================================================
--- incubator/hcatalog/trunk/build-common.xml (original)
+++ incubator/hcatalog/trunk/build-common.xml Sat Sep  8 15:27:47 2012
@@ -40,6 +40,7 @@
           deprecation="false"
           sourcepath=""
           includes="**/*.java"
+          debug="${javac.debug}"
           encoding="utf-8"
           srcdir="@{srcDir}"
           destdir="@{destDir}"

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java?rev=1382319&r1=1382318&r2=1382319&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java Sat Sep  8 15:27:47 2012
@@ -208,8 +208,7 @@ public class HCatUtil {
      * @return HCatSchema instance which contains the partition columns
      * @throws IOException
      */
-    public static HCatSchema getPartitionColumns(Table table)
-            throws IOException {
+    public static HCatSchema getPartitionColumns(Table table) throws IOException {
         HCatSchema cols = new HCatSchema(new LinkedList<HCatFieldSchema>());
         if (table.getPartitionKeys().size() != 0) {
             for (FieldSchema fs : table.getPartitionKeys()) {

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/LazyHCatRecord.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/LazyHCatRecord.java?rev=1382319&r1=1382318&r2=1382319&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/LazyHCatRecord.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/LazyHCatRecord.java Sat Sep  8 15:27:47 2012
@@ -42,7 +42,7 @@ public class LazyHCatRecord extends HCat
 
   public static final Logger LOG = LoggerFactory.getLogger(LazyHCatRecord.class.getName());
 
-  private Object o;
+  private Object wrappedObject;
   private StructObjectInspector soi;
   
   @Override
@@ -50,7 +50,7 @@ public class LazyHCatRecord extends HCat
     try {
       StructField fref = soi.getAllStructFieldRefs().get(fieldNum);
       return HCatRecordSerDe.serializeField(
-          soi.getStructFieldData(o, fref),
+          soi.getStructFieldData(wrappedObject, fref),
           fref.getFieldObjectInspector());
     } catch (SerDeException e) {
       throw new IllegalStateException("SerDe Exception deserializing",e);
@@ -115,18 +115,14 @@ public class LazyHCatRecord extends HCat
     throw new UnsupportedOperationException("not allowed to run copy() on LazyHCatRecord");
   }
   
-  public LazyHCatRecord(Object o, ObjectInspector oi)
-  throws Exception {
-
+  public LazyHCatRecord(Object wrappedObject, ObjectInspector oi) throws Exception {
     if (oi.getCategory() != Category.STRUCT) {
-      throw new SerDeException(getClass().toString()
-          + " can only make a lazy hcat record from objects of " + 
-          "struct types, but we got: "
-          + oi.getTypeName());
+      throw new SerDeException(getClass().toString() + " can only make a lazy hcat record from " +
+          "objects of struct types, but we got: " + oi.getTypeName());
     }
 
     this.soi = (StructObjectInspector)oi;
-    this.o = o;
+    this.wrappedObject = wrappedObject;
   }
 
   @Override

Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestLazyHCatRecord.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestLazyHCatRecord.java?rev=1382319&r1=1382318&r2=1382319&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestLazyHCatRecord.java (original)
+++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestLazyHCatRecord.java Sat Sep  8 15:27:47 2012
@@ -24,48 +24,50 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-
 import org.apache.hcatalog.data.schema.HCatSchema;
 import org.apache.hcatalog.data.schema.HCatSchemaUtils;
+import org.junit.Assert;
+import org.junit.Test;
 
-import junit.framework.TestCase;
-
-public class TestLazyHCatRecord extends TestCase{
+public class TestLazyHCatRecord {
 
   private final int INT_CONST = 789;
   private final long LONG_CONST = 5000000000L;
   private final double DOUBLE_CONST = 3.141592654;
   private final String STRING_CONST = "hello world";
 
-
+  @Test
   public void testGet() throws Exception {
     HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector());
-    assertEquals(INT_CONST, ((Integer)r.get(0)).intValue());
-    assertEquals(LONG_CONST, ((Long)r.get(1)).longValue());
-    assertEquals(DOUBLE_CONST, ((Double)r.get(2)).doubleValue());
-    assertEquals(STRING_CONST, (String)r.get(3));
+    Assert.assertEquals(INT_CONST, ((Integer) r.get(0)).intValue());
+    Assert.assertEquals(LONG_CONST, ((Long) r.get(1)).longValue());
+    Assert.assertEquals(DOUBLE_CONST, ((Double) r.get(2)).doubleValue(), 0);
+    Assert.assertEquals(STRING_CONST, (String) r.get(3));
   }
 
+  @Test
   public void testGetWithName() throws Exception {
     TypeInfo ti = getTypeInfo();
     HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector(ti));
     HCatSchema schema = HCatSchemaUtils.getHCatSchema(ti)
                                           .get(0).getStructSubSchema();
-    assertEquals(INT_CONST, ((Integer)r.get("an_int", schema)).intValue());
-    assertEquals(LONG_CONST, ((Long)r.get("a_long", schema)).longValue());
-    assertEquals(DOUBLE_CONST, ((Double)r.get("a_double", schema)).doubleValue());
-    assertEquals(STRING_CONST, (String)r.get("a_string", schema));
+    Assert.assertEquals(INT_CONST, ((Integer) r.get("an_int", schema)).intValue());
+    Assert.assertEquals(LONG_CONST, ((Long) r.get("a_long", schema)).longValue());
+    Assert.assertEquals(DOUBLE_CONST, ((Double) r.get("a_double", schema)).doubleValue(), 0);
+    Assert.assertEquals(STRING_CONST, (String) r.get("a_string", schema));
   }
 
+  @Test
   public void testGetAll() throws Exception {
     HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector());
     List<Object> list = r.getAll();
-    assertEquals(INT_CONST, ((Integer)list.get(0)).intValue());
-    assertEquals(LONG_CONST, ((Long)list.get(1)).longValue());
-    assertEquals(DOUBLE_CONST, ((Double)list.get(2)).doubleValue());
-    assertEquals(STRING_CONST, (String)list.get(3));
+    Assert.assertEquals(INT_CONST, ((Integer) list.get(0)).intValue());
+    Assert.assertEquals(LONG_CONST, ((Long) list.get(1)).longValue());
+    Assert.assertEquals(DOUBLE_CONST, ((Double) list.get(2)).doubleValue(), 0);
+    Assert.assertEquals(STRING_CONST, (String) list.get(3));
   }
 
+  @Test
   public void testSet() throws Exception {
     HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector());
     boolean sawException = false;
@@ -74,14 +76,16 @@ public class TestLazyHCatRecord extends 
     } catch (UnsupportedOperationException uoe) {
       sawException = true;
     }
-    assertTrue(sawException);
+    Assert.assertTrue(sawException);
   }
 
+  @Test
   public void testSize() throws Exception {
     HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector());
-    assertEquals(4, r.size());
+    Assert.assertEquals(4, r.size());
   }
 
+  @Test
   public void testReadFields() throws Exception {
     HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector());
     boolean sawException = false;
@@ -90,9 +94,10 @@ public class TestLazyHCatRecord extends 
     } catch (UnsupportedOperationException uoe) {
       sawException = true;
     }
-    assertTrue(sawException);
+    Assert.assertTrue(sawException);
   }
 
+  @Test
   public void testWrite() throws Exception {
     HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector());
     boolean sawException = false;
@@ -101,9 +106,10 @@ public class TestLazyHCatRecord extends 
     } catch (UnsupportedOperationException uoe) {
       sawException = true;
     }
-    assertTrue(sawException);
+    Assert.assertTrue(sawException);
   }
 
+  @Test
   public void testSetWithName() throws Exception {
     HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector());
     boolean sawException = false;
@@ -112,9 +118,10 @@ public class TestLazyHCatRecord extends 
     } catch (UnsupportedOperationException uoe) {
       sawException = true;
     }
-    assertTrue(sawException);
+    Assert.assertTrue(sawException);
   }
 
+  @Test
   public void testRemove() throws Exception {
     HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector());
     boolean sawException = false;
@@ -123,9 +130,10 @@ public class TestLazyHCatRecord extends 
     } catch (UnsupportedOperationException uoe) {
       sawException = true;
     }
-    assertTrue(sawException);
+    Assert.assertTrue(sawException);
   }
 
+  @Test
   public void testCopy() throws Exception {
     HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector());
     boolean sawException = false;
@@ -134,19 +142,19 @@ public class TestLazyHCatRecord extends 
     } catch (UnsupportedOperationException uoe) {
       sawException = true;
     }
-    assertTrue(sawException);
+    Assert.assertTrue(sawException);
   }
 
+  @Test
   public void testGetWritable() throws Exception {
     HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector()).getWritable();
-    assertEquals(INT_CONST, ((Integer)r.get(0)).intValue());
-    assertEquals(LONG_CONST, ((Long)r.get(1)).longValue());
-    assertEquals(DOUBLE_CONST, ((Double)r.get(2)).doubleValue());
-    assertEquals(STRING_CONST, (String)r.get(3));
-    assertEquals("org.apache.hcatalog.data.DefaultHCatRecord", r.getClass().getName());
+    Assert.assertEquals(INT_CONST, ((Integer) r.get(0)).intValue());
+    Assert.assertEquals(LONG_CONST, ((Long) r.get(1)).longValue());
+    Assert.assertEquals(DOUBLE_CONST, ((Double) r.get(2)).doubleValue(), 0);
+    Assert.assertEquals(STRING_CONST, (String) r.get(3));
+    Assert.assertEquals("org.apache.hcatalog.data.DefaultHCatRecord", r.getClass().getName());
   }
 
-
   private HCatRecord getHCatRecord() throws Exception {
 
     List<Object> rec_1 = new ArrayList<Object>(4);

Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestReaderWriter.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestReaderWriter.java?rev=1382319&r1=1382318&r2=1382319&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestReaderWriter.java (original)
+++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestReaderWriter.java Sat Sep  8 15:27:47 2012
@@ -32,12 +32,8 @@ import java.util.Map;
 import java.util.Map.Entry;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.cli.CliSessionState;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-import org.apache.hadoop.hive.ql.Driver;
-import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hcatalog.common.HCatException;
 import org.apache.hcatalog.data.transfer.DataTransferFactory;
@@ -47,21 +43,19 @@ import org.apache.hcatalog.data.transfer
 import org.apache.hcatalog.data.transfer.ReaderContext;
 import org.apache.hcatalog.data.transfer.WriteEntity;
 import org.apache.hcatalog.data.transfer.WriterContext;
+import org.apache.hcatalog.mapreduce.HCatBaseTest;
 import org.junit.Assert;
 import org.junit.Test;
 
-public class TestReaderWriter {
+public class TestReaderWriter extends HCatBaseTest {
 
   @Test
   public void test() throws MetaException, CommandNeedRetryException,
       IOException, ClassNotFoundException {
 
-    HiveConf conf = new HiveConf(getClass());
-    Driver driver = new Driver(conf);
-    SessionState.start(new CliSessionState(conf));
     driver.run("drop table mytbl");
     driver.run("create table mytbl (a string, b int)");
-    Iterator<Entry<String, String>> itr = conf.iterator();
+    Iterator<Entry<String, String>> itr = hiveConf.iterator();
     Map<String, String> map = new HashMap<String, String>();
     while (itr.hasNext()) {
       Entry<String, String> kv = itr.next();