You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by me...@apache.org on 2020/04/28 19:53:41 UTC
[hbase-connectors] branch master updated: HBASE-24276 support
writing to table in arbitrary namespace (#67)
This is an automated email from the ASF dual-hosted git repository.
meszibalu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hbase-connectors.git
The following commit(s) were added to refs/heads/master by this push:
new b41afac HBASE-24276 support writing to table in arbitrary namespace (#67)
b41afac is described below
commit b41afac03c8e8f0020ff865b2c27e3a22f146e3a
Author: Naitree Zhu <Na...@gmail.com>
AuthorDate: Wed Apr 29 03:53:21 2020 +0800
HBASE-24276 support writing to table in arbitrary namespace (#67)
Signed-off-by: Balazs Meszaros <me...@apache.org>
---
.../main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala b/spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala
index cede78a..84e9123 100644
--- a/spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala
+++ b/spark/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/DefaultSource.scala
@@ -103,7 +103,7 @@ case class HBaseRelation (
@transient val encoder = JavaBytesEncoder.create(encoderClsName)
val catalog = HBaseTableCatalog(parameters)
- def tableName = catalog.name
+ def tableName = s"${catalog.namespace}:${catalog.name}"
val configResources = parameters.get(HBaseSparkConf.HBASE_CONFIG_LOCATION)
val useHBaseContext = parameters.get(HBaseSparkConf.USE_HBASECONTEXT).map(_.toBoolean).getOrElse(HBaseSparkConf.DEFAULT_USE_HBASECONTEXT)
val usePushDownColumnFilter = parameters.get(HBaseSparkConf.PUSHDOWN_COLUMN_FILTER)
@@ -157,7 +157,7 @@ case class HBaseRelation (
parameters.get(HBaseTableCatalog.regionEnd)
.getOrElse(HBaseTableCatalog.defaultRegionEnd))
if (numReg > 3) {
- val tName = TableName.valueOf(catalog.name)
+ val tName = TableName.valueOf(tableName)
val cfs = catalog.getColumnFamilies
val connection = HBaseConnectionCache.getConnection(hbaseConf)
@@ -168,7 +168,7 @@ case class HBaseRelation (
val tableDesc = new HTableDescriptor(tName)
cfs.foreach { x =>
val cf = new HColumnDescriptor(x.getBytes())
- logDebug(s"add family $x to ${catalog.name}")
+ logDebug(s"add family $x to ${tableName}")
tableDesc.addFamily(cf)
}
val splitKeys = Bytes.split(startKey, endKey, numReg);
@@ -194,7 +194,7 @@ case class HBaseRelation (
override def insert(data: DataFrame, overwrite: Boolean): Unit = {
val jobConfig: JobConf = new JobConf(hbaseConf, this.getClass)
jobConfig.setOutputFormat(classOf[TableOutputFormat])
- jobConfig.set(TableOutputFormat.OUTPUT_TABLE, catalog.name)
+ jobConfig.set(TableOutputFormat.OUTPUT_TABLE, tableName)
var count = 0
val rkFields = catalog.getRowKey
val rkIdxedFields = rkFields.map{ case x =>