中文字幕av专区_日韩电影在线播放_精品国产精品久久一区免费式_av在线免费观看网站

溫馨提示×

溫馨提示×

您好,登錄后才能下訂單哦!

密碼登錄×
登錄注冊×
其他方式登錄
點擊 登錄注冊 即表示同意《億速云用戶服務條款》

Spark如何批量存取HBase

發布時間:2021-12-08 13:56:55 來源:億速云 閱讀:286 作者:小新 欄目:云計算

這篇文章將為大家詳細講解有關Spark如何批量存取HBase ,小編覺得挺實用的,因此分享給大家做個參考,希望大家閱讀完這篇文章后可以有所收獲。

FileAna.scala

object FileAna {

  //  val conf: Configuration = HBaseConfiguration.create()

  val hdfsPath = "hdfs://master:9000"
  val hdfs = FileSystem.get(new URI(hdfsPath), new Configuration())

  def main(args: Array[String]) {
    val conf = new SparkConf().setAppName("FileAna").setMaster("spark://master:7077").
      set("spark.driver.host", "192.168.1.127").
      setJars(List("/home/pang/woozoomws/spark-service.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/hbase-common-1.2.2.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/hbase-client-1.2.2.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/hbase-protocol-1.2.2.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/htrace-core-3.1.0-incubating.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/hbase-server-1.2.2.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/metrics-core-2.2.0.jar"))
    val sc = new SparkContext(conf)
    val rdd = sc.textFile("hdfs://master:9000/woozoom/msgfile.txt")
    val rdd2 = rdd.map(x => convertToHbase(anaMavlink(x)))

    val hbaseConf = HBaseConfiguration.create()
    hbaseConf.addResource("/home/hadoop/software/hbase-1.2.2/conf/hbase-site.xml");

    val jobConf = new JobConf(hbaseConf, this.getClass)
    jobConf.setOutputFormat(classOf[TableOutputFormat])
    jobConf.set(TableOutputFormat.OUTPUT_TABLE, "MissionItem")

    rdd2.saveAsHadoopDataset(jobConf)

    sc.stop()
  }

  def convertScanToString(scan: Scan) = {
    val proto = ProtobufUtil.toScan(scan)
    Base64.encodeBytes(proto.toByteArray)
  }

  def convertToHbase(msg: MAVLinkMessage) = {
    val p = new Put(Bytes.toBytes(UUID.randomUUID().toString()))
    if (msg.isInstanceOf[msg_mission_item]) {
      val missionItem = msg.asInstanceOf[msg_mission_item]
      p.addColumn(Bytes.toBytes("data"), Bytes.toBytes("x"), Bytes.toBytes(missionItem.x))
      p.addColumn(Bytes.toBytes("data"), Bytes.toBytes("y"), Bytes.toBytes(missionItem.y))
      p.addColumn(Bytes.toBytes("data"), Bytes.toBytes("z"), Bytes.toBytes(missionItem.z))
    }
    (new ImmutableBytesWritable, p)
  }

  val anaMavlink = (str: String) => {
    val bytes = ByteAndHex.hexStringToBytes(str)
    QuickParser.parse(bytes).unpack()
  }
}

ReadHBase.scala

object ReadHBase {

  //  val conf: Configuration = HBaseConfiguration.create()

  val hdfsPath = "hdfs://master:9000"
  val hdfs = FileSystem.get(new URI(hdfsPath), new Configuration())

  def main(args: Array[String]) {
    val conf = new SparkConf().setAppName("FileAna").setMaster("spark://master:7077").
      set("spark.driver.host", "192.168.1.127").
      setJars(List("/home/pang/woozoomws/spark-service.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/hbase-common-1.2.2.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/hbase-client-1.2.2.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/hbase-protocol-1.2.2.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/htrace-core-3.1.0-incubating.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/hbase-server-1.2.2.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/metrics-core-2.2.0.jar"))
    val sc = new SparkContext(conf)

    val hbaseConf = HBaseConfiguration.create()
    hbaseConf.addResource("/home/hadoop/software/hbase-1.2.2/conf/hbase-site.xml");

    hbaseConf.set(TableInputFormat.INPUT_TABLE, "MissionItem")
    val scan = new Scan()
    hbaseConf.set(TableInputFormat.SCAN, convertScanToString(scan))
    val readRDD = sc.newAPIHadoopRDD(hbaseConf, classOf[TableInputFormat],
      classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable],
      classOf[org.apache.hadoop.hbase.client.Result])

    val count = readRDD.count()
    println("Mission Item Count:" + count)

    sc.stop()
  }

  def convertScanToString(scan: Scan) = {
    val proto = ProtobufUtil.toScan(scan)
    Base64.encodeBytes(proto.toByteArray)
  }
}

關于“Spark如何批量存取HBase ”這篇文章就分享到這里了,希望以上內容可以對大家有一定的幫助,使各位可以學到更多知識,如果覺得文章不錯,請把它分享出去讓更多的人看到。

向AI問一下細節

免責聲明:本站發布的內容(圖片、視頻和文字)以原創、轉載和分享為主,文章觀點不代表本網站立場,如果涉及侵權請聯系站長郵箱:is@yisu.com進行舉報,并提供相關證據,一經查實,將立刻刪除涉嫌侵權內容。

AI

九江市| 贵德县| 昂仁县| 绥江县| 顺平县| 北京市| 麻阳| 麻江县| 公安县| 崇明县| 博野县| 洛阳市| 广西| 阜南县| 慈利县| 清涧县| 柯坪县| 安泽县| 塔河县| 嘉祥县| 乐山市| 奎屯市| 都兰县| 酒泉市| 岳池县| 资阳市| 内黄县| 东海县| 通辽市| 赞皇县| 博野县| 潼南县| 凌海市| 碌曲县| 惠州市| 巴楚县| 临澧县| 宝鸡市| 平度市| 曲松县| 彭水|