You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Lily (JIRA)" <ji...@apache.org> on 2014/12/23 03:07:13 UTC
[jira] [Created] (SPARK-4926) Spark manipulate Hbase
Lily created SPARK-4926:
---------------------------
Summary: Spark manipulate Hbase
Key: SPARK-4926
URL: https://issues.apache.org/jira/browse/SPARK-4926
Project: Spark
Issue Type: Question
Reporter: Lily
When I run the program below,I got an error “Job aborted due to stage failure: Task 0.0 in stage 2.0 (TID 14) had a not serializable result:org.apache.hadoop.hbase.io.ImmutableBytesWritable”
How can I manipulate the results?
How to realize get,put,scan of hbase by scala?
There are not any examples in the source code files.
import org.apache.hadoop.hbase.client.HBaseAdmin
import org.apache.hadoop.hbase.{ HBaseConfiguration, HTableDescriptor }
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.spark._
object HbaseTest extends Serializable{
def main(args: Array[String]) {
val sparkConf = new SparkConf().setAppName("HBaseTest")
val sc = new SparkContext(sparkConf)
val conf = HBaseConfiguration.create()
conf.set("hbase.zookeeper.property.clientPort", "2181");
conf.set("hbase.zookeeper.quorum", "192.168.179.146");
conf.set(TableInputFormat.INPUT_TABLE, "sensteer_rawdata")
val admin = new HBaseAdmin(conf)
if (!admin.isTableAvailable("sensteer_rawdata")) {
val tableDesc = new HTableDescriptor("sensteer_rawdata")
admin.createTable(tableDesc)
}
val hBaseRDD = sc.newAPIHadoopRDD(conf, classOf[TableInputFormat],
classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable],
classOf[org.apache.hadoop.hbase.client.Result])
val count = hBaseRDD.count()
println("--------------" + hBaseRDD.count() + "--------------")
val res = hBaseRDD.take(count.toInt)
sc.stop()
}
}
--
This message was sent by Atlassian JIRA
(v6.3.4#6332)
---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org