You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tinkerpop.apache.org by ok...@apache.org on 2016/02/03 22:28:58 UTC

[3/5] incubator-tinkerpop git commit: Added a protected constructor() to LP_O_OB_S_SE_SL_Traverser and thus, enable it to be serialized. Fixed a configuration check issue in InputFormatRDD. TINKERPOP-1119 and TINKERPOP-1117.

Added a protected constructor() to LP_O_OB_S_SE_SL_Traverser and thus, enable it to be serialized. Fixed a configuration check issue in InputFormatRDD. TINKERPOP-1119 and TINKERPOP-1117.


Project: http://git-wip-us.apache.org/repos/asf/incubator-tinkerpop/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-tinkerpop/commit/580c683a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-tinkerpop/tree/580c683a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-tinkerpop/diff/580c683a

Branch: refs/heads/TINKERPOP-962
Commit: 580c683a3d3d5e67b8321ae436cb4e806f682c0e
Parents: 94ca074
Author: Marko A. Rodriguez <ok...@gmail.com>
Authored: Wed Feb 3 14:00:23 2016 -0700
Committer: Marko A. Rodriguez <ok...@gmail.com>
Committed: Wed Feb 3 14:00:23 2016 -0700

----------------------------------------------------------------------
 CHANGELOG.asciidoc                                             | 1 +
 .../process/traversal/traverser/LP_O_OB_S_SE_SL_Traverser.java | 3 +++
 .../tinkerpop/gremlin/spark/structure/io/InputFormatRDD.java   | 6 +-----
 3 files changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-tinkerpop/blob/580c683a/CHANGELOG.asciidoc
----------------------------------------------------------------------
diff --git a/CHANGELOG.asciidoc b/CHANGELOG.asciidoc
index a61153e..eea65c1 100644
--- a/CHANGELOG.asciidoc
+++ b/CHANGELOG.asciidoc
@@ -26,6 +26,7 @@ image::https://raw.githubusercontent.com/apache/incubator-tinkerpop/master/docs/
 TinkerPop 3.1.1 (NOT OFFICIALLY RELEASED YET)
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
+* Fixed a constructor/serialization bug in `LP_O_OB_S_SE_SL_Traverser`.
 * Added a lazy iterator, memory safe implementation of MapReduce to `SparkGraphComputer`.
 * Added `MapReduce.combine()` support to `SparkGraphComputer`.
 * Bumped to Neo4j 2.3.2.

http://git-wip-us.apache.org/repos/asf/incubator-tinkerpop/blob/580c683a/gremlin-core/src/main/java/org/apache/tinkerpop/gremlin/process/traversal/traverser/LP_O_OB_S_SE_SL_Traverser.java
----------------------------------------------------------------------
diff --git a/gremlin-core/src/main/java/org/apache/tinkerpop/gremlin/process/traversal/traverser/LP_O_OB_S_SE_SL_Traverser.java b/gremlin-core/src/main/java/org/apache/tinkerpop/gremlin/process/traversal/traverser/LP_O_OB_S_SE_SL_Traverser.java
index 9781f84..8ec5de5 100644
--- a/gremlin-core/src/main/java/org/apache/tinkerpop/gremlin/process/traversal/traverser/LP_O_OB_S_SE_SL_Traverser.java
+++ b/gremlin-core/src/main/java/org/apache/tinkerpop/gremlin/process/traversal/traverser/LP_O_OB_S_SE_SL_Traverser.java
@@ -35,6 +35,9 @@ public class LP_O_OB_S_SE_SL_Traverser<T> extends O_OB_S_SE_SL_Traverser<T> {
 
     protected Path path;
 
+    protected LP_O_OB_S_SE_SL_Traverser() {
+    }
+
     public LP_O_OB_S_SE_SL_Traverser(final T t, final Step<T, ?> step) {
         super(t, step);
         this.path = ImmutablePath.make();

http://git-wip-us.apache.org/repos/asf/incubator-tinkerpop/blob/580c683a/spark-gremlin/src/main/java/org/apache/tinkerpop/gremlin/spark/structure/io/InputFormatRDD.java
----------------------------------------------------------------------
diff --git a/spark-gremlin/src/main/java/org/apache/tinkerpop/gremlin/spark/structure/io/InputFormatRDD.java b/spark-gremlin/src/main/java/org/apache/tinkerpop/gremlin/spark/structure/io/InputFormatRDD.java
index 57d7080..a7bfbc1 100644
--- a/spark-gremlin/src/main/java/org/apache/tinkerpop/gremlin/spark/structure/io/InputFormatRDD.java
+++ b/spark-gremlin/src/main/java/org/apache/tinkerpop/gremlin/spark/structure/io/InputFormatRDD.java
@@ -26,7 +26,6 @@ import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.apache.spark.api.java.JavaPairRDD;
 import org.apache.spark.api.java.JavaSparkContext;
 import org.apache.tinkerpop.gremlin.hadoop.Constants;
-import org.apache.tinkerpop.gremlin.hadoop.structure.io.FileSystemStorage;
 import org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable;
 import org.apache.tinkerpop.gremlin.hadoop.structure.io.VertexWritable;
 import org.apache.tinkerpop.gremlin.hadoop.structure.util.ConfUtil;
@@ -40,7 +39,6 @@ public final class InputFormatRDD implements InputRDD {
     @Override
     public JavaPairRDD<Object, VertexWritable> readGraphRDD(final Configuration configuration, final JavaSparkContext sparkContext) {
         final org.apache.hadoop.conf.Configuration hadoopConfiguration = ConfUtil.makeHadoopConfiguration(configuration);
-        hadoopConfiguration.set(configuration.getString(Constants.GREMLIN_HADOOP_INPUT_LOCATION), Constants.getSearchGraphLocation(configuration.getString(Constants.GREMLIN_HADOOP_INPUT_LOCATION), FileSystemStorage.open(hadoopConfiguration)).get());
         return sparkContext.newAPIHadoopRDD(hadoopConfiguration,
                 (Class<InputFormat<NullWritable, VertexWritable>>) hadoopConfiguration.getClass(Constants.GREMLIN_HADOOP_GRAPH_INPUT_FORMAT, InputFormat.class),
                 NullWritable.class,
@@ -50,9 +48,7 @@ public final class InputFormatRDD implements InputRDD {
 
     @Override
     public <K, V> JavaPairRDD<K, V> readMemoryRDD(final Configuration configuration, final String memoryKey, final JavaSparkContext sparkContext) {
-        final org.apache.hadoop.conf.Configuration hadoopConfiguration = ConfUtil.makeHadoopConfiguration(configuration);
-        hadoopConfiguration.set(configuration.getString(Constants.GREMLIN_HADOOP_INPUT_LOCATION), Constants.getMemoryLocation(configuration.getString(Constants.GREMLIN_HADOOP_INPUT_LOCATION), memoryKey));
-        return sparkContext.newAPIHadoopRDD(hadoopConfiguration,
+        return sparkContext.newAPIHadoopRDD(ConfUtil.makeHadoopConfiguration(configuration),
                 SequenceFileInputFormat.class,
                 ObjectWritable.class,
                 ObjectWritable.class)