You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@geode.apache.org by kl...@apache.org on 2017/05/31 17:01:14 UTC

[09/32] geode git commit: Undoing spark connector changes related to geode 1.2

Undoing spark connector changes related to geode 1.2

The spark connector builds against geode 1.0.0-incubating. The spark
connector was no longer compiling with these changes.


Project: http://git-wip-us.apache.org/repos/asf/geode/repo
Commit: http://git-wip-us.apache.org/repos/asf/geode/commit/0dae918d
Tree: http://git-wip-us.apache.org/repos/asf/geode/tree/0dae918d
Diff: http://git-wip-us.apache.org/repos/asf/geode/diff/0dae918d

Branch: refs/heads/feature/GEODE-1279
Commit: 0dae918df3b4c7bc53abdbf57c92dddba8e814f2
Parents: e79d27d
Author: Lynn Hughes-Godfrey <lh...@pivotal.io>
Authored: Wed May 24 15:31:09 2017 -0700
Committer: Dan Smith <up...@apache.org>
Committed: Thu May 25 10:36:36 2017 -0700

----------------------------------------------------------------------
 .../geodefunctions/RetrieveRegionFunction.java  | 20 ++++++++++----------
 .../internal/DefaultGeodeConnection.scala       |  4 ++--
 2 files changed, 12 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/geode/blob/0dae918d/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/RetrieveRegionFunction.java
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/RetrieveRegionFunction.java b/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/RetrieveRegionFunction.java
index 7407cc8..096e4d5 100644
--- a/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/RetrieveRegionFunction.java
+++ b/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/RetrieveRegionFunction.java
@@ -16,24 +16,25 @@
  */
 package org.apache.geode.spark.connector.internal.geodefunctions;
 
+import java.util.Iterator;
+import org.apache.logging.log4j.Logger;
+
 import org.apache.geode.cache.Cache;
 import org.apache.geode.cache.CacheFactory;
-import org.apache.geode.cache.Region;
-import org.apache.geode.cache.execute.Function;
-import org.apache.geode.cache.execute.FunctionContext;
 import org.apache.geode.cache.execute.FunctionException;
-import org.apache.geode.cache.partition.PartitionRegionHelper;
 import org.apache.geode.cache.query.Query;
 import org.apache.geode.cache.query.QueryService;
 import org.apache.geode.cache.query.SelectResults;
 import org.apache.geode.cache.query.Struct;
+import org.apache.geode.internal.cache.*;
+import org.apache.geode.cache.Region;
+import org.apache.geode.cache.execute.Function;
+import org.apache.geode.cache.execute.FunctionContext;
+import org.apache.geode.cache.partition.PartitionRegionHelper;
 import org.apache.geode.internal.cache.execute.InternalRegionFunctionContext;
 import org.apache.geode.internal.cache.execute.InternalResultSender;
 import org.apache.geode.internal.cache.partitioned.PREntriesIterator;
 import org.apache.geode.internal.logging.LogService;
-import org.apache.logging.log4j.Logger;
-
-import java.util.Iterator;
 
 /**
  * GemFire function that is used by `SparkContext.geodeRegion(regionPath, whereClause)`
@@ -84,11 +85,10 @@ public class RetrieveRegionFunction implements Function {
     InternalRegionFunctionContext irfc = (InternalRegionFunctionContext) context;
     LocalRegion localRegion = (LocalRegion) irfc.getDataSet();
     boolean partitioned = localRegion.getDataPolicy().withPartitioning();
-    if (StringUtils.isBlank(where)) {
+    if (where.trim().isEmpty())
       retrieveFullRegion(irfc, partitioned, taskDesc);
-    } else {
+    else
       retrieveRegionWithWhereClause(irfc, localRegion, partitioned, where, taskDesc);
-    }
   }
 
   /** ------------------------------------------ */

http://git-wip-us.apache.org/repos/asf/geode/blob/0dae918d/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/DefaultGeodeConnection.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/DefaultGeodeConnection.scala b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/DefaultGeodeConnection.scala
index b5dcf1d..670a3f8 100644
--- a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/DefaultGeodeConnection.scala
+++ b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/DefaultGeodeConnection.scala
@@ -129,7 +129,7 @@ private[connector] class DefaultGeodeConnection (
     val collector = new StructStreamingResultCollector(desc)
         // RetrieveRegionResultCollector[(K, V)]
     import scala.collection.JavaConversions.setAsJavaSet
-    val exec = FunctionService.onRegion(region).setArguments(args).withCollector(collector).asInstanceOf[InternalExecution]
+    val exec = FunctionService.onRegion(region).withArgs(args).withCollector(collector).asInstanceOf[InternalExecution]
       .withBucketFilter(split.bucketSet.map(Integer.valueOf))
     exec.setWaitOnExceptionFlag(true)
     exec.execute(RetrieveRegionFunction.ID)
@@ -144,7 +144,7 @@ private[connector] class DefaultGeodeConnection (
     val args: Array[String] = Array[String](queryString, bucketSet.toString)
     val exec = FunctionService.onRegion(region).withCollector(collector).asInstanceOf[InternalExecution]
       .withBucketFilter(bucketSet.map(Integer.valueOf))
-      .setArguments(args)
+      .withArgs(args)
     exec.execute(QueryFunction.ID)
     collector.getResult
   }