You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2017/01/27 06:03:39 UTC

hbase git commit: HBase-17549 HBase-Spark Module: Corrected - Incorrect log at println and unwanted comment code

Repository: hbase
Updated Branches:
  refs/heads/master fb2c89b1b -> 92fc4c0cc


HBase-17549 HBase-Spark Module: Corrected - Incorrect log at println and unwanted comment code

Signed-off-by: Michael Stack <st...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/92fc4c0c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/92fc4c0c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/92fc4c0c

Branch: refs/heads/master
Commit: 92fc4c0cc8efddae74662ac26c9b821402dd6394
Parents: fb2c89b
Author: chetkhatri <ck...@gmail.com>
Authored: Fri Jan 27 11:13:15 2017 +0530
Committer: Michael Stack <st...@apache.org>
Committed: Thu Jan 26 21:58:25 2017 -0800

----------------------------------------------------------------------
 .../spark/example/hbasecontext/HBaseBulkDeleteExample.scala  | 4 ++--
 .../spark/example/hbasecontext/HBaseBulkGetExample.scala     | 4 ++--
 .../spark/example/hbasecontext/HBaseBulkPutExample.scala     | 4 ++--
 .../example/hbasecontext/HBaseBulkPutExampleFromFile.scala   | 2 +-
 .../example/hbasecontext/HBaseBulkPutTimestampExample.scala  | 2 +-
 .../example/hbasecontext/HBaseDistributedScanExample.scala   | 8 +++-----
 .../example/hbasecontext/HBaseStreamingBulkPutExample.scala  | 6 +++---
 .../hbase/spark/example/rdd/HBaseBulkDeleteExample.scala     | 4 ++--
 .../hadoop/hbase/spark/example/rdd/HBaseBulkGetExample.scala | 4 ++--
 .../hadoop/hbase/spark/example/rdd/HBaseBulkPutExample.scala | 4 ++--
 .../spark/example/rdd/HBaseForeachPartitionExample.scala     | 4 ++--
 .../hbase/spark/example/rdd/HBaseMapPartitionExample.scala   | 4 ++--
 12 files changed, 24 insertions(+), 26 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/92fc4c0c/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkDeleteExample.scala
----------------------------------------------------------------------
diff --git a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkDeleteExample.scala b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkDeleteExample.scala
index f77721f..46135a5 100644
--- a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkDeleteExample.scala
+++ b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkDeleteExample.scala
@@ -31,7 +31,7 @@ import org.apache.spark.SparkConf
 object HBaseBulkDeleteExample {
   def main(args: Array[String]) {
     if (args.length < 1) {
-      println("HBaseBulkDeletesExample {tableName} ")
+      println("HBaseBulkDeleteExample {tableName} missing an argument")
       return
     }
 
@@ -60,4 +60,4 @@ object HBaseBulkDeleteExample {
       sc.stop()
     }
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/92fc4c0c/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkGetExample.scala
----------------------------------------------------------------------
diff --git a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkGetExample.scala b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkGetExample.scala
index 88f52fb..1bdc90d 100644
--- a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkGetExample.scala
+++ b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkGetExample.scala
@@ -26,13 +26,13 @@ import org.apache.hadoop.hbase.client.Result
 import org.apache.spark.SparkConf
 
 /**
- * This is a simple example of getting records in HBase
+ * This is a simple example of getting records from HBase
  * with the bulkGet function.
  */
 object HBaseBulkGetExample {
   def main(args: Array[String]) {
     if (args.length < 1) {
-      println("HBaseBulkGetExample {tableName}")
+      println("HBaseBulkGetExample {tableName} missing an argument")
       return
     }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/92fc4c0c/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkPutExample.scala
----------------------------------------------------------------------
diff --git a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkPutExample.scala b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkPutExample.scala
index 735efed..063f2c2 100644
--- a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkPutExample.scala
+++ b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkPutExample.scala
@@ -31,7 +31,7 @@ import org.apache.spark.SparkConf
 object HBaseBulkPutExample {
   def main(args: Array[String]) {
     if (args.length < 2) {
-      println("HBaseBulkPutExample {tableName} {columnFamily}")
+      println("HBaseBulkPutExample {tableName} {columnFamily} are missing an arguments")
       return
     }
 
@@ -72,4 +72,4 @@ object HBaseBulkPutExample {
       sc.stop()
     }
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/92fc4c0c/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkPutExampleFromFile.scala
----------------------------------------------------------------------
diff --git a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkPutExampleFromFile.scala b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkPutExampleFromFile.scala
index 3fd3006..37a0358 100644
--- a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkPutExampleFromFile.scala
+++ b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkPutExampleFromFile.scala
@@ -35,7 +35,7 @@ import org.apache.spark.SparkConf
 object HBaseBulkPutExampleFromFile {
   def main(args: Array[String]) {
     if (args.length < 3) {
-      println("HBaseBulkPutExampleFromFile {tableName} {columnFamily} {inputFile}")
+      println("HBaseBulkPutExampleFromFile {tableName} {columnFamily} {inputFile} are missing an argument")
       return
     }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/92fc4c0c/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkPutTimestampExample.scala
----------------------------------------------------------------------
diff --git a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkPutTimestampExample.scala b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkPutTimestampExample.scala
index ae92f37..fa78216 100644
--- a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkPutTimestampExample.scala
+++ b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseBulkPutTimestampExample.scala
@@ -32,7 +32,7 @@ import org.apache.spark.SparkConf
 object HBaseBulkPutTimestampExample {
   def main(args: Array[String]) {
     if (args.length < 2) {
-      System.out.println("HBaseBulkPutTimestampExample {tableName} {columnFamily}")
+      System.out.println("HBaseBulkPutTimestampExample {tableName} {columnFamily} are missing an argument")
       return
     }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/92fc4c0c/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseDistributedScanExample.scala
----------------------------------------------------------------------
diff --git a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseDistributedScanExample.scala b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseDistributedScanExample.scala
index 852b198..bb2e79d 100644
--- a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseDistributedScanExample.scala
+++ b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseDistributedScanExample.scala
@@ -24,12 +24,12 @@ import org.apache.hadoop.hbase.client.Scan
 import org.apache.spark.SparkConf
 /**
  * This is a simple example of scanning records from HBase
- * with the hbaseRDD function.
+ * with the hbaseRDD function in Distributed fashion.
  */
 object HBaseDistributedScanExample {
   def main(args: Array[String]) {
     if (args.length < 1) {
-      println("GenerateGraphs {tableName}")
+      println("HBaseDistributedScanExample {tableName} missing an argument")
       return
     }
 
@@ -51,11 +51,9 @@ object HBaseDistributedScanExample {
       getRdd.foreach(v => println(Bytes.toString(v._1.get())))
 
       println("Length: " + getRdd.map(r => r._1.copyBytes()).collect().length);
-
-        //.collect().foreach(v => println(Bytes.toString(v._1.get())))
     } finally {
       sc.stop()
     }
   }
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/92fc4c0c/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseStreamingBulkPutExample.scala
----------------------------------------------------------------------
diff --git a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseStreamingBulkPutExample.scala b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseStreamingBulkPutExample.scala
index 29afa49..8ac93ef 100644
--- a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseStreamingBulkPutExample.scala
+++ b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/hbasecontext/HBaseStreamingBulkPutExample.scala
@@ -33,7 +33,7 @@ object HBaseStreamingBulkPutExample {
   def main(args: Array[String]) {
     if (args.length < 4) {
       println("HBaseStreamingBulkPutExample " +
-        "{host} {port} {tableName} {columnFamily}")
+        "{host} {port} {tableName} {columnFamily} are missing an argument")
       return
     }
 
@@ -42,7 +42,7 @@ object HBaseStreamingBulkPutExample {
     val tableName = args(2)
     val columnFamily = args(3)
 
-    val sparkConf = new SparkConf().setAppName("HBaseBulkPutTimestampExample " +
+    val sparkConf = new SparkConf().setAppName("HBaseStreamingBulkPutExample " +
       tableName + " " + columnFamily)
     val sc = new SparkContext(sparkConf)
     try {
@@ -71,4 +71,4 @@ object HBaseStreamingBulkPutExample {
       sc.stop()
     }
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/92fc4c0c/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseBulkDeleteExample.scala
----------------------------------------------------------------------
diff --git a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseBulkDeleteExample.scala b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseBulkDeleteExample.scala
index b8f40a8..83d3f9e 100644
--- a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseBulkDeleteExample.scala
+++ b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseBulkDeleteExample.scala
@@ -31,7 +31,7 @@ import org.apache.spark.{SparkContext, SparkConf}
 object HBaseBulkDeleteExample {
   def main(args: Array[String]) {
     if (args.length < 1) {
-      println("HBaseBulkDeletesExample {tableName} ")
+      println("HBaseBulkDeleteExample {tableName} are missing an argument")
       return
     }
 
@@ -61,4 +61,4 @@ object HBaseBulkDeleteExample {
       sc.stop()
     }
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/92fc4c0c/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseBulkGetExample.scala
----------------------------------------------------------------------
diff --git a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseBulkGetExample.scala b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseBulkGetExample.scala
index 9d59e96..eedabc3 100644
--- a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseBulkGetExample.scala
+++ b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseBulkGetExample.scala
@@ -24,13 +24,13 @@ import org.apache.hadoop.hbase.spark.HBaseRDDFunctions._
 import org.apache.spark.{SparkContext, SparkConf}
 
 /**
- * This is a simple example of getting records in HBase
+ * This is a simple example of getting records from HBase
  * with the bulkGet function.
  */
 object HBaseBulkGetExample {
   def main(args: Array[String]) {
     if (args.length < 1) {
-      println("HBaseBulkGetExample {tableName}")
+      println("HBaseBulkGetExample {tableName} is missing an argument")
       return
     }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/92fc4c0c/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseBulkPutExample.scala
----------------------------------------------------------------------
diff --git a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseBulkPutExample.scala b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseBulkPutExample.scala
index 2d07e89..28711b8 100644
--- a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseBulkPutExample.scala
+++ b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseBulkPutExample.scala
@@ -31,7 +31,7 @@ import org.apache.spark.{SparkConf, SparkContext}
 object HBaseBulkPutExample {
    def main(args: Array[String]) {
      if (args.length < 2) {
-       println("HBaseBulkPutExample {tableName} {columnFamily}")
+       println("HBaseBulkPutExample {tableName} {columnFamily} are missing an arguments")
        return
      }
 
@@ -73,4 +73,4 @@ object HBaseBulkPutExample {
        sc.stop()
      }
    }
- }
\ No newline at end of file
+ }

http://git-wip-us.apache.org/repos/asf/hbase/blob/92fc4c0c/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseForeachPartitionExample.scala
----------------------------------------------------------------------
diff --git a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseForeachPartitionExample.scala b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseForeachPartitionExample.scala
index e2ad224..8dfefc2 100644
--- a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseForeachPartitionExample.scala
+++ b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseForeachPartitionExample.scala
@@ -31,14 +31,14 @@ import org.apache.spark.{SparkContext, SparkConf}
 object HBaseForeachPartitionExample {
   def main(args: Array[String]) {
     if (args.length < 2) {
-      println("HBaseBulkPutExample {tableName} {columnFamily}")
+      println("HBaseForeachPartitionExample {tableName} {columnFamily} are missing an arguments")
       return
     }
 
     val tableName = args(0)
     val columnFamily = args(1)
 
-    val sparkConf = new SparkConf().setAppName("HBaseBulkPutExample " +
+    val sparkConf = new SparkConf().setAppName("HBaseForeachPartitionExample " +
       tableName + " " + columnFamily)
     val sc = new SparkContext(sparkConf)
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/92fc4c0c/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseMapPartitionExample.scala
----------------------------------------------------------------------
diff --git a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseMapPartitionExample.scala b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseMapPartitionExample.scala
index bc444be..0d0b314 100644
--- a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseMapPartitionExample.scala
+++ b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/rdd/HBaseMapPartitionExample.scala
@@ -31,13 +31,13 @@ import org.apache.spark.{SparkContext, SparkConf}
 object HBaseMapPartitionExample {
   def main(args: Array[String]) {
     if (args.length < 1) {
-      println("HBaseBulkGetExample {tableName}")
+      println("HBaseMapPartitionExample {tableName} is missing an argument")
       return
     }
 
     val tableName = args(0)
 
-    val sparkConf = new SparkConf().setAppName("HBaseBulkGetExample " + tableName)
+    val sparkConf = new SparkConf().setAppName("HBaseMapPartitionExample " + tableName)
     val sc = new SparkContext(sparkConf)
 
     try {