You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@parquet.apache.org by ti...@apache.org on 2014/09/22 20:11:18 UTC

git commit: PARQUET-94: Fix bug in ParquetScroogeScheme constructor, minor cleanup

Repository: incubator-parquet-mr
Updated Branches:
  refs/heads/master 501e8feac -> 9cdcf3bbd


PARQUET-94: Fix bug in ParquetScroogeScheme constructor, minor cleanup

I noticed that ParquetScroogeScheme's constructor ignores the provided klass argument.
I also added in missing type parameters for the Config object where they were missing.

Author: Alex Levenson <al...@twitter.com>

Closes #61 from isnotinvain/alexlevenson/parquet-scrooge-cleanup and squashes the following commits:

2b16007 [Alex Levenson] Fix bug in ParquetScroogeScheme constructor, minor cleanup


Project: http://git-wip-us.apache.org/repos/asf/incubator-parquet-mr/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-parquet-mr/commit/9cdcf3bb
Tree: http://git-wip-us.apache.org/repos/asf/incubator-parquet-mr/tree/9cdcf3bb
Diff: http://git-wip-us.apache.org/repos/asf/incubator-parquet-mr/diff/9cdcf3bb

Branch: refs/heads/master
Commit: 9cdcf3bbdf8f772d3fadf388b2db048598c155e9
Parents: 501e8fe
Author: Alex Levenson <al...@twitter.com>
Authored: Mon Sep 22 11:11:08 2014 -0700
Committer: Tianshuo Deng <td...@twitter.com>
Committed: Mon Sep 22 11:11:08 2014 -0700

----------------------------------------------------------------------
 .../parquet/cascading/ParquetTBaseScheme.java   | 13 +++++------
 .../parquet/cascading/ParquetValueScheme.java   | 23 ++++++++++----------
 .../parquet/scrooge/ParquetScroogeScheme.java   | 10 +++------
 3 files changed, 19 insertions(+), 27 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-parquet-mr/blob/9cdcf3bb/parquet-cascading/src/main/java/parquet/cascading/ParquetTBaseScheme.java
----------------------------------------------------------------------
diff --git a/parquet-cascading/src/main/java/parquet/cascading/ParquetTBaseScheme.java b/parquet-cascading/src/main/java/parquet/cascading/ParquetTBaseScheme.java
index 40817af..c84addc 100644
--- a/parquet-cascading/src/main/java/parquet/cascading/ParquetTBaseScheme.java
+++ b/parquet-cascading/src/main/java/parquet/cascading/ParquetTBaseScheme.java
@@ -26,7 +26,6 @@ import parquet.filter2.predicate.FilterPredicate;
 import parquet.hadoop.ParquetInputFormat;
 import parquet.hadoop.mapred.DeprecatedParquetInputFormat;
 import parquet.hadoop.mapred.DeprecatedParquetOutputFormat;
-import parquet.hadoop.thrift.ParquetThriftInputFormat;
 import parquet.hadoop.thrift.ThriftReadSupport;
 import parquet.hadoop.thrift.ThriftWriteSupport;
 import parquet.thrift.TBaseRecordConverter;
@@ -35,26 +34,25 @@ public class ParquetTBaseScheme<T extends TBase<?,?>> extends ParquetValueScheme
 
   // In the case of reads, we can read the thrift class from the file metadata
   public ParquetTBaseScheme() {
-    this(new Config());
+    this(new Config<T>());
   }
 
   public ParquetTBaseScheme(Class<T> thriftClass) {
-    this(new Config().withRecordClass(thriftClass));
+    this(new Config<T>().withRecordClass(thriftClass));
   }
 
   public ParquetTBaseScheme(FilterPredicate filterPredicate) {
-    this(new Config().withFilterPredicate(filterPredicate));
+    this(new Config<T>().withFilterPredicate(filterPredicate));
   }
 
   public ParquetTBaseScheme(FilterPredicate filterPredicate, Class<T> thriftClass) {
-    this(new Config().withRecordClass(thriftClass).withFilterPredicate(filterPredicate));
+    this(new Config<T>().withRecordClass(thriftClass).withFilterPredicate(filterPredicate));
   }
 
-  public ParquetTBaseScheme(Config config) {
+  public ParquetTBaseScheme(Config<T> config) {
     super(config);
   }
 
-  @SuppressWarnings("rawtypes")
   @Override
   public void sourceConfInit(FlowProcess<JobConf> fp,
       Tap<JobConf, RecordReader, OutputCollector> tap, JobConf jobConf) {
@@ -64,7 +62,6 @@ public class ParquetTBaseScheme<T extends TBase<?,?>> extends ParquetValueScheme
     ThriftReadSupport.setRecordConverterClass(jobConf, TBaseRecordConverter.class);
   }
 
-  @SuppressWarnings("rawtypes")
   @Override
   public void sinkConfInit(FlowProcess<JobConf> fp,
       Tap<JobConf, RecordReader, OutputCollector> tap, JobConf jobConf) {

http://git-wip-us.apache.org/repos/asf/incubator-parquet-mr/blob/9cdcf3bb/parquet-cascading/src/main/java/parquet/cascading/ParquetValueScheme.java
----------------------------------------------------------------------
diff --git a/parquet-cascading/src/main/java/parquet/cascading/ParquetValueScheme.java b/parquet-cascading/src/main/java/parquet/cascading/ParquetValueScheme.java
index 6e8c13a..b994ff6 100644
--- a/parquet-cascading/src/main/java/parquet/cascading/ParquetValueScheme.java
+++ b/parquet-cascading/src/main/java/parquet/cascading/ParquetValueScheme.java
@@ -32,10 +32,8 @@ import cascading.tuple.TupleEntry;
 import parquet.filter2.predicate.FilterPredicate;
 import parquet.hadoop.ParquetInputFormat;
 import parquet.hadoop.mapred.Container;
-import parquet.hadoop.mapred.DeprecatedParquetOutputFormat;
 import parquet.hadoop.thrift.ParquetThriftInputFormat;
 import parquet.hadoop.thrift.ThriftReadSupport;
-import parquet.hadoop.thrift.ThriftWriteSupport;
 
 import static parquet.Preconditions.checkNotNull;
 
@@ -76,31 +74,31 @@ public abstract class ParquetValueScheme<T> extends Scheme<JobConf, RecordReader
       return klass;
     }
 
-    public Config withFilterPredicate(FilterPredicate f) {
-      return new Config(this.klass, checkNotNull(f, "filterPredicate"), this.projectionString);
+    public Config<T> withFilterPredicate(FilterPredicate f) {
+      return new Config<T>(this.klass, checkNotNull(f, "filterPredicate"), this.projectionString);
     }
 
-    public Config withProjectionString(String p) {
-      return new Config(this.klass, this.filterPredicate, checkNotNull(p, "projectionFilter"));
+    public Config<T> withProjectionString(String p) {
+      return new Config<T>(this.klass, this.filterPredicate, checkNotNull(p, "projectionFilter"));
     }
 
-    public Config withRecordClass(Class<T> klass) {
-      return new Config(checkNotNull(klass, "recordClass"), this.filterPredicate, this.projectionString);
+    public Config<T> withRecordClass(Class<T> klass) {
+      return new Config<T>(checkNotNull(klass, "recordClass"), this.filterPredicate, this.projectionString);
     }
   }
 
   private static final long serialVersionUID = 157560846420730043L;
-  protected final Config config;
+  protected final Config<T> config;
 
   public ParquetValueScheme() {
-    this(new Config());
+    this(new Config<T>());
   }
 
   public ParquetValueScheme(FilterPredicate filterPredicate) {
-    this(new Config().withFilterPredicate(filterPredicate));
+    this(new Config<T>().withFilterPredicate(filterPredicate));
   }
 
-  public ParquetValueScheme(Config config) {
+  public ParquetValueScheme(Config<T> config) {
     this.config = config;
   }
 
@@ -143,6 +141,7 @@ public abstract class ParquetValueScheme<T> extends Scheme<JobConf, RecordReader
     return true;
   }
 
+  @SuppressWarnings("unchecked")
   @Override
   public void sink(FlowProcess<JobConf> fp, SinkCall<Object[], OutputCollector> sc)
       throws IOException {

http://git-wip-us.apache.org/repos/asf/incubator-parquet-mr/blob/9cdcf3bb/parquet-scrooge/src/main/java/parquet/scrooge/ParquetScroogeScheme.java
----------------------------------------------------------------------
diff --git a/parquet-scrooge/src/main/java/parquet/scrooge/ParquetScroogeScheme.java b/parquet-scrooge/src/main/java/parquet/scrooge/ParquetScroogeScheme.java
index 0f46f8f..1ca9cc6 100644
--- a/parquet-scrooge/src/main/java/parquet/scrooge/ParquetScroogeScheme.java
+++ b/parquet-scrooge/src/main/java/parquet/scrooge/ParquetScroogeScheme.java
@@ -31,7 +31,6 @@ import parquet.cascading.ParquetValueScheme;
 import parquet.filter2.predicate.FilterPredicate;
 import parquet.hadoop.ParquetInputFormat;
 import parquet.hadoop.mapred.DeprecatedParquetInputFormat;
-import parquet.hadoop.thrift.ParquetThriftInputFormat;
 import parquet.hadoop.thrift.ThriftReadSupport;
 
 public class ParquetScroogeScheme<T extends ThriftStruct> extends ParquetValueScheme<T> {
@@ -39,18 +38,17 @@ public class ParquetScroogeScheme<T extends ThriftStruct> extends ParquetValueSc
   private static final long serialVersionUID = -8332274507341448397L;
 
   public ParquetScroogeScheme(Class<T> klass) {
-    this(new Config().withRecordClass(klass));
+    this(new Config<T>().withRecordClass(klass));
   }
 
   public ParquetScroogeScheme(FilterPredicate filterPredicate, Class<T> klass) {
-    this(new Config().withFilterPredicate(filterPredicate));
+    this(new Config<T>().withFilterPredicate(filterPredicate).withRecordClass(klass));
   }
 
-  public ParquetScroogeScheme(Config config) {
+  public ParquetScroogeScheme(Config<T> config) {
     super(config);
   }
 
-  @SuppressWarnings("rawtypes")
   @Override
   public void sinkConfInit(FlowProcess<JobConf> arg0,
       Tap<JobConf, RecordReader, OutputCollector> arg1, JobConf arg2) {
@@ -63,8 +61,6 @@ public class ParquetScroogeScheme<T extends ThriftStruct> extends ParquetValueSc
   @Override
   public boolean isSink() { return false; }
 
-
-  @SuppressWarnings("rawtypes")
   @Override
   public void sourceConfInit(FlowProcess<JobConf> fp,
       Tap<JobConf, RecordReader, OutputCollector> tap, JobConf jobConf) {