You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@geode.apache.org by hi...@apache.org on 2016/09/20 23:00:42 UTC

[13/14] incubator-geode git commit: GEODE-37 changed package name in spark-connector

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRegionRDD.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRegionRDD.scala b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRegionRDD.scala
index 6980c0f..a73be8f 100644
--- a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRegionRDD.scala
+++ b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRegionRDD.scala
@@ -14,14 +14,14 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal.rdd
+package org.apache.geode.spark.connector.internal.rdd
 
 import scala.collection.Seq
 import scala.reflect.ClassTag
 import org.apache.spark.rdd.RDD
 import org.apache.spark.{TaskContext, Partition, SparkContext}
-import io.pivotal.geode.spark.connector.{GeodeConnectionConf, PreferredPartitionerPropKey}
-import io.pivotal.geode.spark.connector.internal.rdd.GeodeRDDPartitioner._
+import org.apache.geode.spark.connector.{GeodeConnectionConf, PreferredPartitionerPropKey}
+import org.apache.geode.spark.connector.internal.rdd.GeodeRDDPartitioner._
 
 /**
  * This class exposes Geode region as a RDD.

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/javaapi/GeodeJavaRegionRDD.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/javaapi/GeodeJavaRegionRDD.scala b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/javaapi/GeodeJavaRegionRDD.scala
index f859173..9c1f923 100644
--- a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/javaapi/GeodeJavaRegionRDD.scala
+++ b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/javaapi/GeodeJavaRegionRDD.scala
@@ -14,9 +14,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.javaapi
+package org.apache.geode.spark.connector.javaapi
 
-import io.pivotal.geode.spark.connector.internal.rdd.GeodeRegionRDD
+import org.apache.geode.spark.connector.internal.rdd.GeodeRegionRDD
 import org.apache.spark.api.java.JavaPairRDD
 
 class GeodeJavaRegionRDD[K, V](rdd: GeodeRegionRDD[K, V]) extends JavaPairRDD[K, V](rdd)(rdd.kClassTag, rdd.vClassTag) {

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/javaapi/JavaAPIHelper.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/javaapi/JavaAPIHelper.scala b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/javaapi/JavaAPIHelper.scala
index ffa6195..423b8bb 100644
--- a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/javaapi/JavaAPIHelper.scala
+++ b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/javaapi/JavaAPIHelper.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.javaapi
+package org.apache.geode.spark.connector.javaapi
 
 import org.apache.spark.api.java.{JavaPairRDD, JavaRDD}
 import org.apache.spark.streaming.api.java.{JavaPairDStream, JavaDStream}

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/package.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/package.scala b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/package.scala
index 6f9a780..296695f 100644
--- a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/package.scala
+++ b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/package.scala
@@ -14,9 +14,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark
+package org.apache.geode.spark
 
-import io.pivotal.geode.spark.connector.internal.rdd.{ServerSplitsPartitioner, OnePartitionPartitioner}
+import org.apache.geode.spark.connector.internal.rdd.{ServerSplitsPartitioner, OnePartitionPartitioner}
 import org.apache.spark.SparkContext
 import org.apache.spark.rdd.RDD
 import org.apache.spark.sql.SQLContext

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/streaming/GeodeDStreamFunctions.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/streaming/GeodeDStreamFunctions.scala b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/streaming/GeodeDStreamFunctions.scala
index 4d46429..7f06fba 100644
--- a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/streaming/GeodeDStreamFunctions.scala
+++ b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/streaming/GeodeDStreamFunctions.scala
@@ -14,17 +14,17 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.streaming
+package org.apache.geode.spark.connector.streaming
 
-import io.pivotal.geode.spark.connector.GeodeConnectionConf
-import io.pivotal.geode.spark.connector.internal.rdd.{GeodePairRDDWriter, GeodeRDDWriter}
+import org.apache.geode.spark.connector.GeodeConnectionConf
+import org.apache.geode.spark.connector.internal.rdd.{GeodePairRDDWriter, GeodeRDDWriter}
 import org.apache.spark.Logging
 import org.apache.spark.api.java.function.PairFunction
 import org.apache.spark.streaming.dstream.DStream
 
 /**
  * Extra geode functions on DStream of non-pair elements through an implicit conversion.
- * Import `io.pivotal.geode.spark.connector.streaming._` at the top of your program to
+ * Import `org.apache.geode.spark.connector.streaming._` at the top of your program to
  * use these functions.
  */
 class GeodeDStreamFunctions[T](val dstream: DStream[T]) extends Serializable with Logging {
@@ -63,7 +63,7 @@ class GeodeDStreamFunctions[T](val dstream: DStream[T]) extends Serializable wit
 
 /**
  * Extra geode functions on DStream of (key, value) pairs through an implicit conversion.
- * Import `io.pivotal.geode.spark.connector.streaming._` at the top of your program to
+ * Import `org.apache.geode.spark.connector.streaming._` at the top of your program to
  * use these functions.
  */
 class GeodePairDStreamFunctions[K, V](val dstream: DStream[(K,V)]) extends Serializable with Logging {

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/streaming/package.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/streaming/package.scala b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/streaming/package.scala
index 0d1f1eb..c4e0f27 100644
--- a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/streaming/package.scala
+++ b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/streaming/package.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector
+package org.apache.geode.spark.connector
 
 import org.apache.spark.streaming.dstream.DStream
 

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/test/java/org/apache/geode/spark/connector/JavaAPITest.java
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-connector/src/test/java/org/apache/geode/spark/connector/JavaAPITest.java b/geode-spark-connector/geode-spark-connector/src/test/java/org/apache/geode/spark/connector/JavaAPITest.java
index 142907e..9af52da 100644
--- a/geode-spark-connector/geode-spark-connector/src/test/java/org/apache/geode/spark/connector/JavaAPITest.java
+++ b/geode-spark-connector/geode-spark-connector/src/test/java/org/apache/geode/spark/connector/JavaAPITest.java
@@ -14,9 +14,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector;
+package org.apache.geode.spark.connector;
 
-import io.pivotal.geode.spark.connector.javaapi.*;
+import org.apache.geode.spark.connector.javaapi.*;
 import org.apache.spark.SparkContext;
 import org.apache.spark.api.java.JavaPairRDD;
 import org.apache.spark.api.java.JavaRDD;
@@ -38,7 +38,7 @@ import scala.collection.mutable.LinkedList;
 import scala.reflect.ClassTag;
 
 import static org.junit.Assert.*;
-import static io.pivotal.geode.spark.connector.javaapi.GeodeJavaUtil.*;
+import static org.apache.geode.spark.connector.javaapi.GeodeJavaUtil.*;
 import static org.mockito.Matchers.eq;
 import static org.mockito.Mockito.*;
 

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/test/scala/org/apache/geode/spark/connector/GeodeFunctionDeployerTest.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-connector/src/test/scala/org/apache/geode/spark/connector/GeodeFunctionDeployerTest.scala b/geode-spark-connector/geode-spark-connector/src/test/scala/org/apache/geode/spark/connector/GeodeFunctionDeployerTest.scala
index 4e45dc2..77a5668 100644
--- a/geode-spark-connector/geode-spark-connector/src/test/scala/org/apache/geode/spark/connector/GeodeFunctionDeployerTest.scala
+++ b/geode-spark-connector/geode-spark-connector/src/test/scala/org/apache/geode/spark/connector/GeodeFunctionDeployerTest.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector
+package org.apache.geode.spark.connector
 
 import org.mockito.Mockito._
 import org.scalatest.mock.MockitoSugar

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/test/scala/org/apache/geode/spark/connector/internal/DefaultGeodeConnectionManagerTest.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-connector/src/test/scala/org/apache/geode/spark/connector/internal/DefaultGeodeConnectionManagerTest.scala b/geode-spark-connector/geode-spark-connector/src/test/scala/org/apache/geode/spark/connector/internal/DefaultGeodeConnectionManagerTest.scala
index 798912c..4863a61 100644
--- a/geode-spark-connector/geode-spark-connector/src/test/scala/org/apache/geode/spark/connector/internal/DefaultGeodeConnectionManagerTest.scala
+++ b/geode-spark-connector/geode-spark-connector/src/test/scala/org/apache/geode/spark/connector/internal/DefaultGeodeConnectionManagerTest.scala
@@ -14,9 +14,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal
+package org.apache.geode.spark.connector.internal
 
-import io.pivotal.geode.spark.connector.GeodeConnectionConf
+import org.apache.geode.spark.connector.GeodeConnectionConf
 import org.mockito.Mockito._
 import org.scalatest.mock.MockitoSugar
 import org.scalatest.{FunSuite, Matchers}

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/test/scala/org/apache/geode/spark/connector/internal/gemfirefunctions/StructStreamingResultSenderAndCollectorTest.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-connector/src/test/scala/org/apache/geode/spark/connector/internal/gemfirefunctions/StructStreamingResultSenderAndCollectorTest.scala b/geode-spark-connector/geode-spark-connector/src/test/scala/org/apache/geode/spark/connector/internal/gemfirefunctions/StructStreamingResultSenderAndCollectorTest.scala
index c95f1dc..ca77fb0 100644
--- a/geode-spark-connector/geode-spark-connector/src/test/scala/org/apache/geode/spark/connector/internal/gemfirefunctions/StructStreamingResultSenderAndCollectorTest.scala
+++ b/geode-spark-connector/geode-spark-connector/src/test/scala/org/apache/geode/spark/connector/internal/gemfirefunctions/StructStreamingResultSenderAndCollectorTest.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal.geodefunctions
+package org.apache.geode.spark.connector.internal.geodefunctions
 
 import org.apache.geode.DataSerializer
 import org.apache.geode.cache.execute.{ResultCollector, ResultSender}

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/test/scala/org/apache/geode/spark/connector/internal/oql/QueryParserTest.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-connector/src/test/scala/org/apache/geode/spark/connector/internal/oql/QueryParserTest.scala b/geode-spark-connector/geode-spark-connector/src/test/scala/org/apache/geode/spark/connector/internal/oql/QueryParserTest.scala
index 54394e8..f5c32dc 100644
--- a/geode-spark-connector/geode-spark-connector/src/test/scala/org/apache/geode/spark/connector/internal/oql/QueryParserTest.scala
+++ b/geode-spark-connector/geode-spark-connector/src/test/scala/org/apache/geode/spark/connector/internal/oql/QueryParserTest.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal.oql
+package org.apache.geode.spark.connector.internal.oql
 
 import org.scalatest.FunSuite
 
@@ -55,8 +55,8 @@ class QueryParserTest extends FunSuite {
     assert(r == "List(/r1/r2)")
   }
 
-  test("IMPORT io.pivotal.geode IMPORT com.mypackage SELECT key,value FROM /root/sub.entries WHERE status = 'active' ORDER BY id desc") {
-    val r = QueryParser.parseOQL("IMPORT io.pivotal.geode IMPORT com.mypackage SELECT key,value FROM /root/sub.entries WHERE status = 'active' ORDER BY id desc").get
+  test("IMPORT org.apache.geode IMPORT com.mypackage SELECT key,value FROM /root/sub.entries WHERE status = 'active' ORDER BY id desc") {
+    val r = QueryParser.parseOQL("IMPORT org.apache.geode IMPORT com.mypackage SELECT key,value FROM /root/sub.entries WHERE status = 'active' ORDER BY id desc").get
     assert(r == "List(/root/sub.entries)")
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/ConnectorImplicitsTest.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/ConnectorImplicitsTest.scala b/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/ConnectorImplicitsTest.scala
index b0464cc..7b247af 100644
--- a/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/ConnectorImplicitsTest.scala
+++ b/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/ConnectorImplicitsTest.scala
@@ -14,9 +14,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package unittest.io.pivotal.geode.spark.connector
+package unittest.org.apache.geode.spark.connector
 
-import io.pivotal.geode.spark.connector._
+import org.apache.geode.spark.connector._
 import org.apache.spark.SparkContext
 import org.apache.spark.sql.SQLContext
 import org.scalatest.FunSuite

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/GeodeConnectionConfTest.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/GeodeConnectionConfTest.scala b/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/GeodeConnectionConfTest.scala
index a3076f4..2fad7a6 100644
--- a/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/GeodeConnectionConfTest.scala
+++ b/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/GeodeConnectionConfTest.scala
@@ -14,13 +14,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package unittest.io.pivotal.geode.spark.connector
+package unittest.org.apache.geode.spark.connector
 
 import org.apache.spark.SparkConf
 import org.mockito.Mockito._
 import org.scalatest.mock.MockitoSugar
 import org.scalatest.{Matchers, FunSuite}
-import io.pivotal.geode.spark.connector._
+import org.apache.geode.spark.connector._
 
 class GeodeConnectionConfTest extends FunSuite with Matchers with MockitoSugar {
 

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/GeodeDStreamFunctionsTest.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/GeodeDStreamFunctionsTest.scala b/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/GeodeDStreamFunctionsTest.scala
index d671722..19ec4f0 100644
--- a/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/GeodeDStreamFunctionsTest.scala
+++ b/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/GeodeDStreamFunctionsTest.scala
@@ -14,10 +14,10 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package unittest.io.pivotal.geode.spark.connector
+package unittest.org.apache.geode.spark.connector
 
 import org.apache.geode.cache.Region
-import io.pivotal.geode.spark.connector.{GeodeConnection, GeodeConnectionConf}
+import org.apache.geode.spark.connector.{GeodeConnection, GeodeConnectionConf}
 import org.apache.spark.rdd.RDD
 import org.apache.spark.streaming.dstream.DStream
 import org.mockito.Mockito._
@@ -30,7 +30,7 @@ import scala.reflect.ClassTag
 class GeodeDStreamFunctionsTest extends FunSuite with Matchers with MockitoSugar {
 
   test("test GeodePairDStreamFunctions Implicit") {
-    import io.pivotal.geode.spark.connector.streaming._
+    import org.apache.geode.spark.connector.streaming._
     val mockDStream = mock[DStream[(Int, String)]]
     // the implicit make the following line valid
     val pairDStream: GeodePairDStreamFunctions[Int, String] = mockDStream
@@ -38,7 +38,7 @@ class GeodeDStreamFunctionsTest extends FunSuite with Matchers with MockitoSugar
   }
 
   test("test GeodeDStreamFunctions Implicit") {
-    import io.pivotal.geode.spark.connector.streaming._
+    import org.apache.geode.spark.connector.streaming._
     val mockDStream = mock[DStream[String]]
     // the implicit make the following line valid
     val dstream: GeodeDStreamFunctions[String] = mockDStream
@@ -57,7 +57,7 @@ class GeodeDStreamFunctionsTest extends FunSuite with Matchers with MockitoSugar
   }
 
   test("test GeodePairDStreamFunctions.saveToGeode()") {
-    import io.pivotal.geode.spark.connector.streaming._
+    import org.apache.geode.spark.connector.streaming._
     val (regionPath, mockConnConf, mockConnection, mockRegion) = createMocks[String, String]("test")
     val mockDStream = mock[DStream[(String, String)]]
     mockDStream.saveToGeode(regionPath, mockConnConf)
@@ -67,7 +67,7 @@ class GeodeDStreamFunctionsTest extends FunSuite with Matchers with MockitoSugar
   }
 
   test("test GeodeDStreamFunctions.saveToGeode()") {
-    import io.pivotal.geode.spark.connector.streaming._
+    import org.apache.geode.spark.connector.streaming._
     val (regionPath, mockConnConf, mockConnection, mockRegion) = createMocks[String, Int]("test")
     val mockDStream = mock[DStream[String]]
     mockDStream.saveToGeode[String, Int](regionPath,  (s: String) => (s, s.length), mockConnConf)

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/GeodeRDDFunctionsTest.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/GeodeRDDFunctionsTest.scala b/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/GeodeRDDFunctionsTest.scala
index 5259198..a6fc91d 100644
--- a/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/GeodeRDDFunctionsTest.scala
+++ b/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/GeodeRDDFunctionsTest.scala
@@ -14,11 +14,11 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package unittest.io.pivotal.geode.spark.connector
+package unittest.org.apache.geode.spark.connector
 
 import org.apache.geode.cache.Region
-import io.pivotal.geode.spark.connector._
-import io.pivotal.geode.spark.connector.internal.rdd.{GeodeRDDWriter, GeodePairRDDWriter}
+import org.apache.geode.spark.connector._
+import org.apache.geode.spark.connector.internal.rdd.{GeodeRDDWriter, GeodePairRDDWriter}
 import org.apache.spark.{TaskContext, SparkContext}
 import org.apache.spark.rdd.RDD
 import org.mockito.Mockito._
@@ -31,7 +31,7 @@ import org.mockito.Matchers.{eq => mockEq, any => mockAny}
 class GeodeRDDFunctionsTest extends FunSuite with Matchers with MockitoSugar {
 
   test("test PairRDDFunction Implicit") {
-    import io.pivotal.geode.spark.connector._
+    import org.apache.geode.spark.connector._
     val mockRDD = mock[RDD[(Int, String)]]
     // the implicit make the following line valid
     val pairRDD: GeodePairRDDFunctions[Int, String] = mockRDD
@@ -39,7 +39,7 @@ class GeodeRDDFunctionsTest extends FunSuite with Matchers with MockitoSugar {
   }
   
   test("test RDDFunction Implicit") {
-    import io.pivotal.geode.spark.connector._
+    import org.apache.geode.spark.connector._
     val mockRDD = mock[RDD[String]]
     // the implicit make the following line valid
     val nonPairRDD: GeodeRDDFunctions[String] = mockRDD
@@ -85,7 +85,7 @@ class GeodeRDDFunctionsTest extends FunSuite with Matchers with MockitoSugar {
   }
   
   def verifyPairRDDFunction(useOpConf: Boolean): Unit = {
-    import io.pivotal.geode.spark.connector._
+    import org.apache.geode.spark.connector._
     val (regionPath, mockConnConf, mockConnection, mockRegion) = createMocks[String, String]("test")
     val mockRDD = mock[RDD[(String, String)]]
     val mockSparkContext = mock[SparkContext]
@@ -115,7 +115,7 @@ class GeodeRDDFunctionsTest extends FunSuite with Matchers with MockitoSugar {
   }
   
   def verifyRDDFunction(useOpConf: Boolean): Unit = {
-    import io.pivotal.geode.spark.connector._
+    import org.apache.geode.spark.connector._
     val (regionPath, mockConnConf, mockConnection, mockRegion) = createMocks[Int, String]("test")
     val mockRDD = mock[RDD[(String)]]
     val mockSparkContext = mock[SparkContext]

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/LocatorHelperTest.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/LocatorHelperTest.scala b/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/LocatorHelperTest.scala
index c775784..5719996 100644
--- a/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/LocatorHelperTest.scala
+++ b/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/LocatorHelperTest.scala
@@ -14,11 +14,11 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package unittest.io.pivotal.geode.spark.connector
+package unittest.org.apache.geode.spark.connector
 
 import java.net.InetAddress
 
-import io.pivotal.geode.spark.connector.internal.LocatorHelper
+import org.apache.geode.spark.connector.internal.LocatorHelper
 import org.scalatest.FunSuite
 
 class LocatorHelperTest extends FunSuite {

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/rdd/GeodeRDDPartitionerTest.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/rdd/GeodeRDDPartitionerTest.scala b/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/rdd/GeodeRDDPartitionerTest.scala
index 2f92c1d..9a42866 100644
--- a/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/rdd/GeodeRDDPartitionerTest.scala
+++ b/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/rdd/GeodeRDDPartitionerTest.scala
@@ -14,13 +14,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package unittest.io.pivotal.geode.spark.connector.rdd
+package unittest.org.apache.geode.spark.connector.rdd
 
 import org.apache.geode.distributed.internal.ServerLocation
-import io.pivotal.geode.spark.connector.internal.RegionMetadata
-import io.pivotal.geode.spark.connector.internal.rdd.GeodeRDDPartitioner._
-import io.pivotal.geode.spark.connector.GeodeConnection
-import io.pivotal.geode.spark.connector.internal.rdd._
+import org.apache.geode.spark.connector.internal.RegionMetadata
+import org.apache.geode.spark.connector.internal.rdd.GeodeRDDPartitioner._
+import org.apache.geode.spark.connector.GeodeConnection
+import org.apache.geode.spark.connector.internal.rdd._
 import org.apache.spark.Partition
 import org.mockito.Mockito._
 import org.scalatest.mock.MockitoSugar
@@ -55,7 +55,7 @@ class GeodeRDDPartitionerTest extends FunSuite with Matchers with MockitoSugar {
   }
   
   test("GeodeRDDPartitioner.apply method") {
-    import io.pivotal.geode.spark.connector.internal.rdd.GeodeRDDPartitioner._
+    import org.apache.geode.spark.connector.internal.rdd.GeodeRDDPartitioner._
     for ((name, partitioner) <- partitioners) assert(GeodeRDDPartitioner(name) == partitioner)
     assert(GeodeRDDPartitioner("dummy") == GeodeRDDPartitioner.defaultPartitionedRegionPartitioner)
     assert(GeodeRDDPartitioner() == GeodeRDDPartitioner.defaultPartitionedRegionPartitioner)
@@ -158,7 +158,7 @@ class GeodeRDDPartitionerTest extends FunSuite with Matchers with MockitoSugar {
   }
 
   test("ServerSplitsPartitioner.partitions(): partitioned region w/ some data ") {
-    import io.pivotal.geode.spark.connector.NumberPartitionsPerServerPropKey
+    import org.apache.geode.spark.connector.NumberPartitionsPerServerPropKey
     val regionPath = "test"
     val mockConnection = mock[GeodeConnection]
     val map: Map[(String, Int), Set[Int]] = Map(

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/rdd/GeodeRegionRDDTest.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/rdd/GeodeRegionRDDTest.scala b/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/rdd/GeodeRegionRDDTest.scala
index 63eddf9..8a5c67e 100644
--- a/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/rdd/GeodeRegionRDDTest.scala
+++ b/geode-spark-connector/geode-spark-connector/src/test/scala/unittest/org/apache/geode/spark/connector/rdd/GeodeRegionRDDTest.scala
@@ -14,12 +14,12 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package unittest.io.pivotal.geode.spark.connector.rdd
+package unittest.org.apache.geode.spark.connector.rdd
 
 import org.apache.geode.cache.Region
-import io.pivotal.geode.spark.connector.internal.RegionMetadata
-import io.pivotal.geode.spark.connector.internal.rdd.{GeodeRDDPartition, GeodeRegionRDD}
-import io.pivotal.geode.spark.connector.{GeodeConnectionConf, GeodeConnection}
+import org.apache.geode.spark.connector.internal.RegionMetadata
+import org.apache.geode.spark.connector.internal.rdd.{GeodeRDDPartition, GeodeRegionRDD}
+import org.apache.geode.spark.connector.{GeodeConnectionConf, GeodeConnection}
 import org.apache.spark.{TaskContext, Partition, SparkContext}
 import org.mockito.Mockito._
 import org.mockito.Matchers.{eq => mockEq, any => mockAny}
@@ -78,7 +78,7 @@ class GeodeRegionRDDTest extends FunSuite with Matchers with MockitoSugar {
 
   test("getPartitions with replicated region and preferred OnePartitionPartitioner") {
     // since it's replicated region, so OnePartitionPartitioner will be used, i.e., override preferred partitioner
-    import io.pivotal.geode.spark.connector.{PreferredPartitionerPropKey, OnePartitionPartitionerName}
+    import org.apache.geode.spark.connector.{PreferredPartitionerPropKey, OnePartitionPartitionerName}
     val (regionPath, mockRegion, mockConnConf, mockConnection) = createMocks[String, String]("test")
     when(mockConnection.getRegionMetadata[String, String](regionPath)).thenReturn(Some(new RegionMetadata(regionPath, false, 0, null)))
     implicit val mockConnConf2 = mockConnConf

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/OQLJavaDemo.java
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/OQLJavaDemo.java b/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/OQLJavaDemo.java
index adcf072..9107796 100644
--- a/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/OQLJavaDemo.java
+++ b/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/OQLJavaDemo.java
@@ -20,7 +20,7 @@ import org.apache.spark.SparkConf;
 import org.apache.spark.api.java.JavaSparkContext;
 import org.apache.spark.sql.DataFrame;
 import org.apache.spark.sql.SQLContext;
-import static io.pivotal.geode.spark.connector.javaapi.GeodeJavaUtil.*;
+import static org.apache.geode.spark.connector.javaapi.GeodeJavaUtil.*;
 
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/PairRDDSaveJavaDemo.java
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/PairRDDSaveJavaDemo.java b/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/PairRDDSaveJavaDemo.java
index 52d2a99..d1edd41 100644
--- a/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/PairRDDSaveJavaDemo.java
+++ b/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/PairRDDSaveJavaDemo.java
@@ -16,7 +16,7 @@
  */
 package demo;
 
-import io.pivotal.geode.spark.connector.GeodeConnectionConf;
+import org.apache.geode.spark.connector.GeodeConnectionConf;
 import org.apache.spark.SparkConf;
 import org.apache.spark.api.java.JavaPairRDD;
 import org.apache.spark.api.java.JavaRDD;
@@ -24,7 +24,7 @@ import org.apache.spark.api.java.JavaSparkContext;
 import scala.Tuple2;
 import java.util.*;
 
-import static io.pivotal.geode.spark.connector.javaapi.GeodeJavaUtil.*;
+import static org.apache.geode.spark.connector.javaapi.GeodeJavaUtil.*;
 
 /**
  * This Spark application demonstrates how to save a RDD to Geode using Geode Spark

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/RDDSaveJavaDemo.java
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/RDDSaveJavaDemo.java b/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/RDDSaveJavaDemo.java
index 1125de5..22c01f4 100644
--- a/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/RDDSaveJavaDemo.java
+++ b/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/RDDSaveJavaDemo.java
@@ -16,7 +16,7 @@
  */
 package demo;
 
-import io.pivotal.geode.spark.connector.GeodeConnectionConf;
+import org.apache.geode.spark.connector.GeodeConnectionConf;
 import org.apache.spark.SparkConf;
 import org.apache.spark.api.java.JavaRDD;
 import org.apache.spark.api.java.JavaSparkContext;
@@ -26,7 +26,7 @@ import scala.Tuple2;
 import java.util.ArrayList;
 import java.util.List;
 
-import static io.pivotal.geode.spark.connector.javaapi.GeodeJavaUtil.*;
+import static org.apache.geode.spark.connector.javaapi.GeodeJavaUtil.*;
 
 /**
  * This Spark application demonstrates how to save a RDD to Geode using Geode Spark

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/RegionToRDDJavaDemo.java
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/RegionToRDDJavaDemo.java b/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/RegionToRDDJavaDemo.java
index 1ce8ceb..41a07f5 100644
--- a/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/RegionToRDDJavaDemo.java
+++ b/geode-spark-connector/geode-spark-demos/basic-demos/src/main/java/demo/RegionToRDDJavaDemo.java
@@ -19,7 +19,7 @@ package demo;
 import org.apache.spark.SparkConf;
 import org.apache.spark.api.java.JavaPairRDD;
 import org.apache.spark.api.java.JavaSparkContext;
-import static io.pivotal.geode.spark.connector.javaapi.GeodeJavaUtil.*;
+import static org.apache.geode.spark.connector.javaapi.GeodeJavaUtil.*;
 
 /**
  * This Spark application demonstrates how to expose a region in Geode as a RDD using Geode

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-demos/basic-demos/src/main/scala/demo/NetworkWordCount.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/geode-spark-demos/basic-demos/src/main/scala/demo/NetworkWordCount.scala b/geode-spark-connector/geode-spark-demos/basic-demos/src/main/scala/demo/NetworkWordCount.scala
index 810b380..063ea69 100644
--- a/geode-spark-connector/geode-spark-demos/basic-demos/src/main/scala/demo/NetworkWordCount.scala
+++ b/geode-spark-connector/geode-spark-demos/basic-demos/src/main/scala/demo/NetworkWordCount.scala
@@ -18,8 +18,8 @@ package demo
 
 import org.apache.spark.SparkConf
 import org.apache.spark.streaming.{Seconds, StreamingContext}
-import io.pivotal.geode.spark.connector.GeodeLocatorPropKey
-import io.pivotal.geode.spark.connector.streaming._
+import org.apache.geode.spark.connector.GeodeLocatorPropKey
+import org.apache.geode.spark.connector.streaming._
 
 /**
  * Counts words in UTF8 encoded, '\n' delimited text received from the network every second.

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/project/Settings.scala
----------------------------------------------------------------------
diff --git a/geode-spark-connector/project/Settings.scala b/geode-spark-connector/project/Settings.scala
index 9aefa9b..8cfe38f 100644
--- a/geode-spark-connector/project/Settings.scala
+++ b/geode-spark-connector/project/Settings.scala
@@ -24,7 +24,7 @@ object Settings extends Build {
     organization := "io.pivotal",
     version := "0.5.0",
     scalaVersion := "2.10.4",
-    organization := "io.pivotal.geode.spark",
+    organization := "org.apache.geode.spark",
     organizationHomepage := Some(url("http://www.pivotal.io/"))
   )