You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bahir.apache.org by lr...@apache.org on 2019/06/12 20:32:05 UTC

[bahir] branch master updated (221c1f4 -> 971a88e)

This is an automated email from the ASF dual-hosted git repository.

lresende pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/bahir.git.


    from 221c1f4  Updated sql-cloudant dependencies (#90)
     new 843cd71  Fix warnings around depecrated usage of shouldRunTest
     new 971a88e  Fix warnings about undeclared variables on javadoc

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../org/apache/bahir/cloudant/ClientSparkFunSuite.scala |  2 +-
 .../apache/bahir/cloudant/CloudantAllDocsDFSuite.scala  | 15 ++++++++-------
 .../apache/bahir/cloudant/CloudantChangesDFSuite.scala  | 17 +++++++++--------
 .../org/apache/bahir/cloudant/CloudantOptionSuite.scala | 15 ++++++++-------
 .../apache/bahir/cloudant/CloudantSparkSQLSuite.scala   |  4 ++--
 .../spark/sql/mqtt/HdfsBasedMQTTStreamSource.scala      |  6 +++---
 .../spark/streaming/pubnub/PubNubStreamSuite.scala      |  6 +++---
 .../spark/streaming/pubsub/PubsubStreamSuite.scala      |  2 +-
 .../spark/streaming/twitter/TwitterStreamSuite.scala    |  2 +-
 9 files changed, 36 insertions(+), 33 deletions(-)


[bahir] 02/02: Fix warnings about undeclared variables on javadoc

Posted by lr...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

lresende pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/bahir.git

commit 971a88e66ed43d1d4c0e9346cce95caf0e5019ea
Author: Luciano Resende <lr...@apache.org>
AuthorDate: Tue Jun 11 14:49:37 2019 +0200

    Fix warnings about undeclared variables on javadoc
---
 .../scala/org/apache/spark/sql/mqtt/HdfsBasedMQTTStreamSource.scala | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/sql-streaming-mqtt/src/main/scala/org/apache/spark/sql/mqtt/HdfsBasedMQTTStreamSource.scala b/sql-streaming-mqtt/src/main/scala/org/apache/spark/sql/mqtt/HdfsBasedMQTTStreamSource.scala
index fd39557..d6aab23 100644
--- a/sql-streaming-mqtt/src/main/scala/org/apache/spark/sql/mqtt/HdfsBasedMQTTStreamSource.scala
+++ b/sql-streaming-mqtt/src/main/scala/org/apache/spark/sql/mqtt/HdfsBasedMQTTStreamSource.scala
@@ -321,9 +321,9 @@ class HdfsBasedMQTTStreamSource(
   }
 
   /**
-   * Returns the data that is between the offsets (`start`, `end`].
-   * The batch return the data in file ${checkpointPath}/receivedMessages/${end}.
-   * `Start` and `end` value have the relationship: `end value` = `start valud` + 1,
+   * Returns the data that is between the offsets (`start`, `end`).
+   * The batch return the data in file {checkpointPath}/receivedMessages/{end}.
+   * `Start` and `end` value have the relationship: `end value` = `start value` + 1,
    * if `start` is not None.
    */
   override def getBatch(start: Option[Offset], end: Offset): DataFrame = {


[bahir] 01/02: Fix warnings around depecrated usage of shouldRunTest

Posted by lr...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

lresende pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/bahir.git

commit 843cd71a2c1b14e6babe9538cd0e9982f8bab836
Author: Luciano Resende <lr...@apache.org>
AuthorDate: Tue Jun 11 13:47:17 2019 +0200

    Fix warnings around depecrated usage of shouldRunTest
---
 .../org/apache/bahir/cloudant/ClientSparkFunSuite.scala |  2 +-
 .../apache/bahir/cloudant/CloudantAllDocsDFSuite.scala  | 15 ++++++++-------
 .../apache/bahir/cloudant/CloudantChangesDFSuite.scala  | 17 +++++++++--------
 .../org/apache/bahir/cloudant/CloudantOptionSuite.scala | 15 ++++++++-------
 .../apache/bahir/cloudant/CloudantSparkSQLSuite.scala   |  4 ++--
 .../spark/streaming/pubnub/PubNubStreamSuite.scala      |  6 +++---
 .../spark/streaming/pubsub/PubsubStreamSuite.scala      |  2 +-
 .../spark/streaming/twitter/TwitterStreamSuite.scala    |  2 +-
 8 files changed, 33 insertions(+), 30 deletions(-)

diff --git a/sql-cloudant/src/test/scala/org/apache/bahir/cloudant/ClientSparkFunSuite.scala b/sql-cloudant/src/test/scala/org/apache/bahir/cloudant/ClientSparkFunSuite.scala
index ed14f17..6946803 100644
--- a/sql-cloudant/src/test/scala/org/apache/bahir/cloudant/ClientSparkFunSuite.scala
+++ b/sql-cloudant/src/test/scala/org/apache/bahir/cloudant/ClientSparkFunSuite.scala
@@ -40,7 +40,7 @@ class ClientSparkFunSuite extends ConditionalSparkFunSuite with BeforeAndAfter {
   var spark: SparkSession = _
 
   override def beforeAll() {
-    runIf(TestUtils.shouldRunTest) {
+    runIf(() => TestUtils.shouldRunTest()) {
       tempDir.mkdirs()
       tempDir.deleteOnExit()
       setupClient()
diff --git a/sql-cloudant/src/test/scala/org/apache/bahir/cloudant/CloudantAllDocsDFSuite.scala b/sql-cloudant/src/test/scala/org/apache/bahir/cloudant/CloudantAllDocsDFSuite.scala
index c0389d8..e012336 100644
--- a/sql-cloudant/src/test/scala/org/apache/bahir/cloudant/CloudantAllDocsDFSuite.scala
+++ b/sql-cloudant/src/test/scala/org/apache/bahir/cloudant/CloudantAllDocsDFSuite.scala
@@ -35,7 +35,7 @@ class CloudantAllDocsDFSuite extends ClientSparkFunSuite {
       .getOrCreate()
   }
 
-  testIf("load and save data from Cloudant database", TestUtils.shouldRunTest) {
+  testIf("load and save data from Cloudant database", () => TestUtils.shouldRunTest()) {
     // Loading data from Cloudant db
     val df = spark.read.format("org.apache.bahir.cloudant").load("n_flight")
     // Caching df in memory to speed computations
@@ -46,7 +46,7 @@ class CloudantAllDocsDFSuite extends ClientSparkFunSuite {
     assert(df.count() == 100)
   }
 
-  testIf("load and count data from Cloudant search index", TestUtils.shouldRunTest) {
+  testIf("load and count data from Cloudant search index", () => TestUtils.shouldRunTest()) {
     val df = spark.read.format("org.apache.bahir.cloudant")
       .option("index", "_design/view/_search/n_flights").load("n_flight")
     val total = df.filter(df("flightSegmentId") >"AA14")
@@ -55,7 +55,7 @@ class CloudantAllDocsDFSuite extends ClientSparkFunSuite {
     assert(total == 89)
   }
 
-  testIf("load data and count rows in filtered dataframe", TestUtils.shouldRunTest) {
+  testIf("load data and count rows in filtered dataframe", () => TestUtils.shouldRunTest()) {
     // Loading data from Cloudant db
     val df = spark.read.format("org.apache.bahir.cloudant")
       .load("n_airportcodemapping")
@@ -64,7 +64,7 @@ class CloudantAllDocsDFSuite extends ClientSparkFunSuite {
   }
 
   // save data to Cloudant test
-  testIf("save filtered dataframe to database", TestUtils.shouldRunTest) {
+  testIf("save filtered dataframe to database", () => TestUtils.shouldRunTest()) {
     val df = spark.read.format("org.apache.bahir.cloudant").load("n_flight")
 
     // Saving data frame with filter to Cloudant db
@@ -81,7 +81,8 @@ class CloudantAllDocsDFSuite extends ClientSparkFunSuite {
   }
 
   // createDBOnSave option test
-  testIf("save dataframe to database using createDBOnSave=true option", TestUtils.shouldRunTest) {
+  testIf("save dataframe to database using createDBOnSave=true option",
+         () => TestUtils.shouldRunTest()) {
     val df = spark.read.format("org.apache.bahir.cloudant")
       .load("n_airportcodemapping")
 
@@ -106,13 +107,13 @@ class CloudantAllDocsDFSuite extends ClientSparkFunSuite {
   }
 
   // view option tests
-  testIf("load and count data from view", TestUtils.shouldRunTest) {
+  testIf("load and count data from view", () => TestUtils.shouldRunTest()) {
     val df = spark.read.format("org.apache.bahir.cloudant")
       .option("view", "_design/view/_view/AA0").load("n_flight")
     assert(df.count() == 1)
   }
 
-  testIf("load data from view with MapReduce function", TestUtils.shouldRunTest) {
+  testIf("load data from view with MapReduce function", () => TestUtils.shouldRunTest()) {
     val df = spark.read.format("org.apache.bahir.cloudant")
       .option("view", "_design/view/_view/AAreduce?reduce=true")
       .load("n_flight")
diff --git a/sql-cloudant/src/test/scala/org/apache/bahir/cloudant/CloudantChangesDFSuite.scala b/sql-cloudant/src/test/scala/org/apache/bahir/cloudant/CloudantChangesDFSuite.scala
index dd5d508..fe92ef2 100644
--- a/sql-cloudant/src/test/scala/org/apache/bahir/cloudant/CloudantChangesDFSuite.scala
+++ b/sql-cloudant/src/test/scala/org/apache/bahir/cloudant/CloudantChangesDFSuite.scala
@@ -47,7 +47,7 @@ class CloudantChangesDFSuite extends ClientSparkFunSuite {
     assert(df.count() == 100)
   }
 
-  testIf("load and count data from Cloudant search index", TestUtils.shouldRunTest) {
+  testIf("load and count data from Cloudant search index", () => TestUtils.shouldRunTest()) {
     val df = spark.read.format("org.apache.bahir.cloudant")
       .option("index", "_design/view/_search/n_flights").load("n_flight")
     val total = df.filter(df("flightSegmentId") >"AA14")
@@ -56,7 +56,7 @@ class CloudantChangesDFSuite extends ClientSparkFunSuite {
     assert(total == 89)
   }
 
-  testIf("load data and verify deleted doc is not in results", TestUtils.shouldRunTest) {
+  testIf("load data and verify deleted doc is not in results", () => TestUtils.shouldRunTest()) {
     val db = client.database("n_flight", false)
     // delete a saved doc to verify it's not included when loading data
     db.remove(deletedDoc.get("_id").getAsString, deletedDoc.get("_rev").getAsString)
@@ -68,7 +68,7 @@ class CloudantChangesDFSuite extends ClientSparkFunSuite {
     assert(!df.columns.contains("_deleted"))
   }
 
-  testIf("load data and count rows in filtered dataframe", TestUtils.shouldRunTest) {
+  testIf("load data and count rows in filtered dataframe", () => TestUtils.shouldRunTest()) {
     // Loading data from Cloudant db
     val df = spark.read.format("org.apache.bahir.cloudant")
       .load("n_airportcodemapping")
@@ -77,7 +77,7 @@ class CloudantChangesDFSuite extends ClientSparkFunSuite {
   }
 
   // save data to Cloudant test
-  testIf("save filtered dataframe to database", TestUtils.shouldRunTest) {
+  testIf("save filtered dataframe to database", () => TestUtils.shouldRunTest()) {
     val df = spark.read.format("org.apache.bahir.cloudant").load("n_flight")
 
     // Saving data frame with filter to Cloudant db
@@ -94,7 +94,8 @@ class CloudantChangesDFSuite extends ClientSparkFunSuite {
   }
 
   // createDBOnSave option test
-  testIf("save dataframe to database using createDBOnSave=true option", TestUtils.shouldRunTest) {
+  testIf("save dataframe to database using createDBOnSave=true option",
+         () => TestUtils.shouldRunTest()) {
     val df = spark.read.format("org.apache.bahir.cloudant")
       .load("n_airportcodemapping")
 
@@ -124,13 +125,13 @@ class CloudantChangesDFSuite extends ClientSparkFunSuite {
   }
 
   // view option tests
-  testIf("load and count data from view", TestUtils.shouldRunTest) {
+  testIf("load and count data from view", () => TestUtils.shouldRunTest()) {
     val df = spark.read.format("org.apache.bahir.cloudant")
       .option("view", "_design/view/_view/AA0").load("n_flight")
     assert(df.count() == 1)
   }
 
-  testIf("load data from view with MapReduce function", TestUtils.shouldRunTest) {
+  testIf("load data from view with MapReduce function", () => TestUtils.shouldRunTest()) {
     val df = spark.read.format("org.apache.bahir.cloudant")
       .option("view", "_design/view/_view/AAreduce?reduce=true")
       .load("n_flight")
@@ -138,7 +139,7 @@ class CloudantChangesDFSuite extends ClientSparkFunSuite {
   }
 
   testIf("load data and verify total count of selector, filter, and view option",
-      TestUtils.shouldRunTest) {
+      () => TestUtils.shouldRunTest()) {
     val df = spark.read.format("org.apache.bahir.cloudant")
       .option("selector", "{\"flightSegmentId\": {\"$eq\": \"AA202\"}}")
       .load("n_flight")
diff --git a/sql-cloudant/src/test/scala/org/apache/bahir/cloudant/CloudantOptionSuite.scala b/sql-cloudant/src/test/scala/org/apache/bahir/cloudant/CloudantOptionSuite.scala
index f5df759..5fb5d53 100644
--- a/sql-cloudant/src/test/scala/org/apache/bahir/cloudant/CloudantOptionSuite.scala
+++ b/sql-cloudant/src/test/scala/org/apache/bahir/cloudant/CloudantOptionSuite.scala
@@ -29,7 +29,7 @@ class CloudantOptionSuite extends ClientSparkFunSuite with BeforeAndAfter {
     spark.close()
   }
 
-  testIf("invalid api receiver option throws an error message", TestUtils.shouldRunTest) {
+  testIf("invalid api receiver option throws an error message", () => TestUtils.shouldRunTest()) {
     spark = SparkSession.builder().config(conf)
       .config("cloudant.protocol", TestUtils.getProtocol)
       .config("cloudant.host", TestUtils.getHost)
@@ -45,7 +45,7 @@ class CloudantOptionSuite extends ClientSparkFunSuite with BeforeAndAfter {
       s"is invalid. Please supply the valid option '_all_docs' or '_changes'.")
   }
 
-  testIf("empty username option throws an error message", TestUtils.shouldRunTest) {
+  testIf("empty username option throws an error message", () => TestUtils.shouldRunTest()) {
     spark = SparkSession.builder().config(conf)
       .config("cloudant.protocol", TestUtils.getProtocol)
       .config("cloudant.host", TestUtils.getHost)
@@ -60,7 +60,7 @@ class CloudantOptionSuite extends ClientSparkFunSuite with BeforeAndAfter {
       s"is empty. Please supply the required value.")
   }
 
-  testIf("empty password option throws an error message", TestUtils.shouldRunTest) {
+  testIf("empty password option throws an error message", () => TestUtils.shouldRunTest()) {
     spark = SparkSession.builder().config(conf)
       .config("cloudant.protocol", TestUtils.getProtocol)
       .config("cloudant.host", TestUtils.getHost)
@@ -75,7 +75,7 @@ class CloudantOptionSuite extends ClientSparkFunSuite with BeforeAndAfter {
       s"is empty. Please supply the required value.")
   }
 
-  testIf("empty databaseName throws an error message", TestUtils.shouldRunTest) {
+  testIf("empty databaseName throws an error message", () => TestUtils.shouldRunTest()) {
     spark = SparkSession.builder().config(conf)
       .config("cloudant.protocol", TestUtils.getProtocol)
       .config("cloudant.host", TestUtils.getHost)
@@ -91,7 +91,7 @@ class CloudantOptionSuite extends ClientSparkFunSuite with BeforeAndAfter {
   }
 
   testIf("incorrect password throws an error message for changes receiver",
-      TestUtils.shouldRunTest) {
+      () => TestUtils.shouldRunTest()) {
     spark = SparkSession.builder().config(conf)
       .config("cloudant.protocol", TestUtils.getProtocol)
       .config("cloudant.host", TestUtils.getHost)
@@ -108,7 +108,8 @@ class CloudantOptionSuite extends ClientSparkFunSuite with BeforeAndAfter {
       "\"reason\":\"Name or password is incorrect.\"}")
   }
 
-  testIf("string with valid value for cloudant.numberOfRetries option", TestUtils.shouldRunTest) {
+  testIf("string with valid value for cloudant.numberOfRetries option",
+         () => TestUtils.shouldRunTest()) {
     spark = SparkSession.builder().config(conf)
       .config("cloudant.protocol", TestUtils.getProtocol)
       .config("cloudant.host", TestUtils.getHost)
@@ -122,7 +123,7 @@ class CloudantOptionSuite extends ClientSparkFunSuite with BeforeAndAfter {
   }
 
   testIf("invalid value for cloudant.numberOfRetries option throws an error message",
-      TestUtils.shouldRunTest) {
+      () => TestUtils.shouldRunTest()) {
     spark = SparkSession.builder().config(conf)
       .config("cloudant.protocol", TestUtils.getProtocol)
       .config("cloudant.host", TestUtils.getHost)
diff --git a/sql-cloudant/src/test/scala/org/apache/bahir/cloudant/CloudantSparkSQLSuite.scala b/sql-cloudant/src/test/scala/org/apache/bahir/cloudant/CloudantSparkSQLSuite.scala
index 6736da2..9100130 100644
--- a/sql-cloudant/src/test/scala/org/apache/bahir/cloudant/CloudantSparkSQLSuite.scala
+++ b/sql-cloudant/src/test/scala/org/apache/bahir/cloudant/CloudantSparkSQLSuite.scala
@@ -39,7 +39,7 @@ class CloudantSparkSQLSuite extends ClientSparkFunSuite {
   }
 
   testIf("verify results from temp view of database n_airportcodemapping",
-      TestUtils.shouldRunTest) {
+      () => TestUtils.shouldRunTest()) {
     // create a temp table from Cloudant db and query it using sql syntax
     val sparkSql = spark.sql(
       s"""
@@ -67,7 +67,7 @@ class CloudantSparkSQLSuite extends ClientSparkFunSuite {
     assert(df2count == airportData.count())
   }
 
-  testIf("verify results from temp view of index in n_flight", TestUtils.shouldRunTest) {
+  testIf("verify results from temp view of index in n_flight", () => TestUtils.shouldRunTest()) {
     // create a temp table from Cloudant index  and query it using sql syntax
     val sparkSql = spark.sql(
       s"""
diff --git a/streaming-pubnub/src/test/scala/org/apache/spark/streaming/pubnub/PubNubStreamSuite.scala b/streaming-pubnub/src/test/scala/org/apache/spark/streaming/pubnub/PubNubStreamSuite.scala
index 91011fb..4990b1f 100644
--- a/streaming-pubnub/src/test/scala/org/apache/spark/streaming/pubnub/PubNubStreamSuite.scala
+++ b/streaming-pubnub/src/test/scala/org/apache/spark/streaming/pubnub/PubNubStreamSuite.scala
@@ -65,7 +65,7 @@ class PubNubStreamSuite extends ConditionalSparkFunSuite with Eventually with Be
     }
   }
 
-  testIf("Stream receives messages", shouldRunTest) {
+  testIf("Stream receives messages", () => PubNubStreamSuite.this.shouldRunTest()) {
     val nbOfMsg = 5
     var publishedMessages: List[JsonObject] = List()
     @volatile var receivedMessages: Set[SparkPubNubMessage] = Set()
@@ -101,7 +101,7 @@ class PubNubStreamSuite extends ConditionalSparkFunSuite with Eventually with Be
     }
   }
 
-  testIf("Message filtering", shouldRunTest) {
+  testIf("Message filtering", () => PubNubStreamSuite.this.shouldRunTest()) {
     val config = new PNConfiguration()
     config.setSubscribeKey(subscribeKey)
     config.setPublishKey(publishKey)
@@ -134,7 +134,7 @@ class PubNubStreamSuite extends ConditionalSparkFunSuite with Eventually with Be
     }
   }
 
-  testIf("Test time token", shouldRunTest) {
+  testIf("Test time token", () => PubNubStreamSuite.this.shouldRunTest()) {
     val config = new PNConfiguration()
     config.setSubscribeKey(subscribeKey)
     config.setPublishKey(publishKey)
diff --git a/streaming-pubsub/src/test/scala/org/apache/spark/streaming/pubsub/PubsubStreamSuite.scala b/streaming-pubsub/src/test/scala/org/apache/spark/streaming/pubsub/PubsubStreamSuite.scala
index b02f21d..8a67038 100644
--- a/streaming-pubsub/src/test/scala/org/apache/spark/streaming/pubsub/PubsubStreamSuite.scala
+++ b/streaming-pubsub/src/test/scala/org/apache/spark/streaming/pubsub/PubsubStreamSuite.scala
@@ -52,7 +52,7 @@ class PubsubStreamSuite extends ConditionalSparkFunSuite with Eventually with Be
   private var subForCreateFullName: String = _
 
   override def beforeAll(): Unit = {
-    runIf(PubsubTestUtils.shouldRunTest) {
+    runIf(() => PubsubTestUtils.shouldRunTest()) {
       pubsubTestUtils = new PubsubTestUtils
       topicFullName = pubsubTestUtils.getFullTopicPath(topicName)
       subscriptionFullName = pubsubTestUtils.getFullSubscriptionPath(subscriptionName)
diff --git a/streaming-twitter/src/test/scala/org/apache/spark/streaming/twitter/TwitterStreamSuite.scala b/streaming-twitter/src/test/scala/org/apache/spark/streaming/twitter/TwitterStreamSuite.scala
index 7d7886d..77c6625 100644
--- a/streaming-twitter/src/test/scala/org/apache/spark/streaming/twitter/TwitterStreamSuite.scala
+++ b/streaming-twitter/src/test/scala/org/apache/spark/streaming/twitter/TwitterStreamSuite.scala
@@ -71,7 +71,7 @@ class TwitterStreamSuite extends ConditionalSparkFunSuite
       ssc, Some(authorization), Some(query), StorageLevel.MEMORY_AND_DISK_SER_2)
   }
 
-  testIf("messages received", shouldRunTest) {
+  testIf("messages received", () => TwitterStreamSuite.this.shouldRunTest()) {
     val userId = TwitterFactory.getSingleton.updateStatus(
       UUID.randomUUID().toString
     ).getUser.getId