HBase實操:HBase-Spark-Read-Demo 分享

post200發表於2021-09-09

**前言:**本文是一個關於Spark讀取HBase的一個小demo,簡單瞭解一下~

相關程式碼:

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hbase._
import org.apache.hadoop.hbase.client.Scan
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.hadoop.hbase.protobuf.ProtobufUtil
import org.apache.hadoop.hbase.util.{Base64, Bytes}
import org.apache.spark.{SparkConf, SparkContext}


object SparkReadHBaseDemo {

  val HBASE_ZOOKEEPER_QUORUM = "xxx1.com.cn,xxx2.com.cn,xxx3.com.cn"

  //   主函式
  def main(args: Array[String]) {

    // 設定spark訪問入口
    val conf = new SparkConf().setAppName("SparkReadHBaseDemo ")
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    .setMaster("local")//除錯
    val sc = new SparkContext(conf)
    // 獲取HbaseRDD
    val hbaseRDD = sc.newAPIHadoopRDD(getHbaseConf(), classOf[TableInputFormat],
      classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable],
      classOf[org.apache.hadoop.hbase.client.Result])
    hbaseRDD.map(_._2).map(getRes(_)).count()
  }


  def getRes(result: org.apache.hadoop.hbase.client.Result): String = {
    val rowkey = Bytes.toString(result.getRow())
    val addr = Bytes.toString(result.getValue("f".getBytes, "addr".getBytes))
    println(rowkey+"---"+addr)
    addr
  }
  // 構造 Hbase 配置資訊
  def getHbaseConf(): Configuration = {
    val conf: Configuration = HBaseConfiguration.create()
    conf.set("hbase.zookeeper.property.clientPort", "2181")
    conf.set("zookeeper.znode.parent", "/hbase-unsecure")
    conf.set("hbase.zookeeper.quorum", HBASE_ZOOKEEPER_QUORUM)
    // 設定查詢的表名
    conf.set(TableInputFormat.INPUT_TABLE, "test_shx")
    conf.set(TableInputFormat.SCAN, getScanStr())
    conf
  }

  // 獲取掃描器
  def getScanStr(): String = {
    val scan = new Scan()
    // scan.set.....各種過濾
    val proto = ProtobufUtil.toScan(scan)
    Base64.encodeBytes(proto.toByteArray())
  }
}

專案用到的 pom.xml 檔案:

<project xmlns="" xmlns:xsi=""
         xsi:schemaLocation=" ">
    <modelVersion>4.0.0</modelVersion>

    <groupId>com.zpb.test</groupId>
    <artifactId>spark-read-hbase-demo</artifactId>
    <version>1.0-SNAPSHOT</version>
    <packaging>jar</packaging>

    <name>spark-read-hbase-demo</name>
    <url>

    <repositories>
        <repository>
            <id>cloudera</id>
            <url>
        </repository>
    </repositories>

    <properties>
        <cdh.hbase.version>1.2.0-cdh5.7.0</cdh.hbase.version>
        <cdh.spark.version>1.6.0-cdh5.7.0</cdh.spark.version>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
    </properties>

    <dependencies>
        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
            <version>3.8.1</version>
            <scope>test</scope>
        </dependency>
        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>fastjson</artifactId>
            <version>1.2.62</version>
        </dependency>
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-core_2.10</artifactId>
            <version>${cdh.spark.version}</version>
            <!--<scope>provided</scope>-->
        </dependency>
        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-server</artifactId>
            <version>${cdh.hbase.version}</version>
        </dependency>
    </dependencies>
</project>

來自 “ ITPUB部落格 ” ,連結:http://blog.itpub.net/2249/viewspace-2825256/,如需轉載,請註明出處,否則將追究法律責任。

相關文章