You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by te...@apache.org on 2017/01/26 19:58:13 UTC
hbase git commit: HBASE-17547 Bug Resolved - TableCatelog doesn't
supports multiple columns from Single Column family
Repository: hbase
Updated Branches:
refs/heads/master 81d3e25a7 -> 59cd8e510
HBASE-17547 Bug Resolved - TableCatelog doesn't supports multiple columns from Single Column family
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/59cd8e51
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/59cd8e51
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/59cd8e51
Branch: refs/heads/master
Commit: 59cd8e510c603908f85e300e9531323e528e31e7
Parents: 81d3e25
Author: dskskv <ck...@gmail.com>
Authored: Thu Jan 26 22:40:00 2017 +0530
Committer: tedyu <yu...@gmail.com>
Committed: Thu Jan 26 11:58:01 2017 -0800
----------------------------------------------------------------------
.../sql/datasources/hbase/HBaseTableCatalog.scala | 2 +-
.../hadoop/hbase/spark/DefaultSourceSuite.scala | 16 ++++++++--------
2 files changed, 9 insertions(+), 9 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/59cd8e51/hbase-spark/src/main/scala/org/apache/spark/sql/datasources/hbase/HBaseTableCatalog.scala
----------------------------------------------------------------------
diff --git a/hbase-spark/src/main/scala/org/apache/spark/sql/datasources/hbase/HBaseTableCatalog.scala b/hbase-spark/src/main/scala/org/apache/spark/sql/datasources/hbase/HBaseTableCatalog.scala
index c2d611f..bb9a94b 100644
--- a/hbase-spark/src/main/scala/org/apache/spark/sql/datasources/hbase/HBaseTableCatalog.scala
+++ b/hbase-spark/src/main/scala/org/apache/spark/sql/datasources/hbase/HBaseTableCatalog.scala
@@ -150,7 +150,7 @@ case class HBaseTableCatalog(
def getRowKey: Seq[Field] = row.fields
def getPrimaryKey= row.keys(0)
def getColumnFamilies = {
- sMap.fields.map(_.cf).filter(_ != HBaseTableCatalog.rowKey)
+ sMap.fields.map(_.cf).filter(_ != HBaseTableCatalog.rowKey).toSeq.distinct
}
def get(key: String) = params.get(key)
http://git-wip-us.apache.org/repos/asf/hbase/blob/59cd8e51/hbase-spark/src/test/scala/org/apache/hadoop/hbase/spark/DefaultSourceSuite.scala
----------------------------------------------------------------------
diff --git a/hbase-spark/src/test/scala/org/apache/hadoop/hbase/spark/DefaultSourceSuite.scala b/hbase-spark/src/test/scala/org/apache/hadoop/hbase/spark/DefaultSourceSuite.scala
index 0f8baed..7b8b844 100644
--- a/hbase-spark/src/test/scala/org/apache/hadoop/hbase/spark/DefaultSourceSuite.scala
+++ b/hbase-spark/src/test/scala/org/apache/hadoop/hbase/spark/DefaultSourceSuite.scala
@@ -812,9 +812,9 @@ BeforeAndAfterEach with BeforeAndAfterAll with Logging {
|"columns":{
|"col0":{"cf":"rowkey", "col":"key", "type":"string"},
|"col1":{"cf":"cf1", "col":"col1", "type":"boolean"},
- |"col2":{"cf":"cf2", "col":"col2", "type":"double"},
+ |"col2":{"cf":"cf1", "col":"col2", "type":"double"},
|"col3":{"cf":"cf3", "col":"col3", "type":"float"},
- |"col4":{"cf":"cf4", "col":"col4", "type":"int"},
+ |"col4":{"cf":"cf3", "col":"col4", "type":"int"},
|"col5":{"cf":"cf5", "col":"col5", "type":"bigint"},
|"col6":{"cf":"cf6", "col":"col6", "type":"smallint"},
|"col7":{"cf":"cf7", "col":"col7", "type":"string"},
@@ -851,7 +851,7 @@ BeforeAndAfterEach with BeforeAndAfterAll with Logging {
test("full query") {
val df = withCatalog(writeCatalog)
- df.show
+ df.show()
assert(df.count() == 256)
}
@@ -861,7 +861,7 @@ BeforeAndAfterEach with BeforeAndAfterAll with Logging {
val df = withCatalog(writeCatalog)
val s = df.filter($"col0" <= "row005")
.select("col0", "col1")
- s.show
+ s.show()
assert(s.count() == 6)
}
@@ -999,7 +999,7 @@ BeforeAndAfterEach with BeforeAndAfterAll with Logging {
test("avro full query") {
val df = withAvroCatalog(avroCatalog)
- df.show
+ df.show()
df.printSchema()
assert(df.count() == 256)
}
@@ -1013,7 +1013,7 @@ BeforeAndAfterEach with BeforeAndAfterAll with Logging {
.format("org.apache.hadoop.hbase.spark")
.save()
val newDF = withAvroCatalog(avroCatalogInsert)
- newDF.show
+ newDF.show()
newDF.printSchema()
assert(newDF.count() == 256)
}
@@ -1024,7 +1024,7 @@ BeforeAndAfterEach with BeforeAndAfterAll with Logging {
val df = withAvroCatalog(avroCatalog)
val r = df.filter($"col1.name" === "name005" || $"col1.name" <= "name005")
.select("col0", "col1.favorite_color", "col1.favorite_number")
- r.show
+ r.show()
assert(r.count() == 6)
}
@@ -1034,7 +1034,7 @@ BeforeAndAfterEach with BeforeAndAfterAll with Logging {
val df = withAvroCatalog(avroCatalog)
val s = df.filter($"col1.name" <= "name005" || $"col1.name".contains("name007"))
.select("col0", "col1.favorite_color", "col1.favorite_number")
- s.show
+ s.show()
assert(s.count() == 7)
}
}