Spark streaming + Kafka 流式資料處理,結果儲存至MongoDB、Solr、Neo4j(自用)

右介發表於2018-03-01

KafkaStreaming.scala檔案

import kafka.serializer.StringDecoder
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.kafka.{KafkaManagerAdd, KafkaUtils}
import org.json4s.DefaultFormats
import org.json4s.jackson.Json
import com.mongodb.casbah.{MongoClient, MongoClientURI, MongoCollection}

import scala.collection.mutable.ArrayBuffer
/**
  * Created by zty on 2017/12/20.
  */
object KafkaStreaming {
  var journalArticleClass = new JournalArticleDataManagerAdd(MongoClient(MongoClientURI("mongodb://IP:27017"))("資料庫名稱")("資料集合名稱"))

  def main(args: Array[String]): Unit = {
    run()
  }

  def run(): Unit = {
    //kafka topic名稱
    val topicsJournalArticle = "JouArt"
    //kafka中間人
    val brokers = "IP1:9092,IP2:9092,IP3:9092"
    //spark配置
    val sparkconf = new SparkConf().setAppName("kafkastreaming").set("spark.streaming.kafka.maxRatePerPartition", "5")
    val ssc = new StreamingContext(sparkconf, Seconds(30))

    val topicsJournalArticleSet = topicsJournalArticle.split(",").toSet
    val journalArticlekafkaParams = Map[String, String]("metadata.broker.list" -> brokers, "group.id" -> "journalArticledataManager",
      "fetch.message.max.bytes" -> "20971520", "auto.offset.reset" -> "smallest")
    val journalArticleManager = new KafkaManagerAdd(journalArticlekafkaParams)
    val jsonsjournalArticleLines = journalArticleManager.createDirectStream[String, String, StringDecoder, StringDecoder](ssc, journalArticlekafkaParams, topicsJournalArticleSet)

    // 更新offsets
    jsonsjournalArticleLines.foreachRDD(rdd => {
      journalArticleManager.updateZKOffsets(rdd)
    })

    val jsonsjournalArticle = jsonsjournalArticleLines.map(_._2)
    val arrayjournalArticle = ArrayBuffer[String]()
    jsonsjournalArticle.foreachRDD(rdd => {
      val count = rdd.count().toInt
      rdd.take(count + 1).take(count).foreach(x => {
        arrayjournalArticle += x
      })
      kafkaProducerSendJournalArticle(arrayjournalArticle)
      arrayjournalArticle.clear()
    })

    ssc.start()
    ssc.awaitTermination()
  }

  def kafkaProducerSendJournalArticle(args: ArrayBuffer[String]) {
    if (args.nonEmpty && args.toString() != "[]" && args.toString() != "ArrayBuffer()") {
      args.foreach(line => {
        val json: Option[Any] = scala.util.parsing.json.JSON.parseFull(line.toString)
        val maps: List[Any] = json.get.asInstanceOf[List[Any]]
        maps.foreach(langMap => {
          val listJson: Map[String, Any] = langMap.asInstanceOf[Map[String, Any]]
          if (Json(DefaultFormats).write(listJson) != "" || Json(DefaultFormats).write(listJson) != null || !Json(DefaultFormats).write(listJson).isEmpty) {
            journalArticleClass.MongoDBJournalArticleAdd(Json(DefaultFormats).write(listJson).toString)
            journalArticleClass.Neo4jSolrJournalArticleAdd()
          }
        })
      })
    }
  }
}

 JournalArticleDataManagerAdd.scala檔案

import java.text.SimpleDateFormat
import java.util.Date
import org.joda.time.DateTime
import com.mongodb.DBObject
import com.mongodb.casbah.{MongoClient, MongoClientURI, MongoCollection}
import com.mongodb.util.JSON
import org.neo4j.driver.v1.{AuthTokens, GraphDatabase, StatementResult}
/**
  * Created by zty on 2017/02/01.
  */
class JournalArticleDataManagerAdd (collectionString : MongoCollection)  {
  //連結mongodb
  def createDatabase(url: String, dbName: String, coll: String): MongoCollection = {
    MongoClient(MongoClientURI(url))(dbName)(coll)
  }
//全域性變數 由MongoDBJournalArticleAdd函式接受,與Neo4jSolrJournalArticleAdd共用
  var jsonString = ""

  def MongoDBJournalArticleAdd(JsonString: String): Unit = {
    jsonString = JsonString
    try{
      val bson: DBObject = JSON.parse(jsonString).asInstanceOf[DBObject]
      collectionString.insert(bson)
    }catch {
      case e: ArithmeticException => println(e)
      case ex: Throwable =>println(ex)
    }
  }

  //solr新增資料
  def Neo4jSolrJournalArticleAdd(): Unit = {
    val driver = GraphDatabase.driver("bolt://IP/7687", AuthTokens.basic("neo4j", "******"))
    val session = driver.session

    try {
//      字串轉json物件
      var json = com.alibaba.fastjson.JSON.parseObject(jsonString)
//      ID
      var GUID = json.getJSONObject("_id").get("$oid")
//      標題
      var titleObject = json.getJSONObject("title")
      var titlevalue = titleObject.get("title")
//      文獻語種
      var language = titleObject.get("language")
      var languagevalue=if(language!=null) language.toString else ""
//      其他語種標題
      var title_alternative = json.getJSONArray("title_alternative")
      var title_alternativevalue=""
      for( a <-0 to title_alternative.toArray.length-1){
        var item=title_alternative.toArray.apply(a)
        title_alternativevalue += com.alibaba.fastjson.JSON.parseObject(item.toString).get("title_alternative") + ","
      }
      if(title_alternativevalue!="") title_alternativevalue=title_alternativevalue.substring(0,title_alternativevalue.length-1) else title_alternativevalue=""
//      第一作者id
      var first_id = json.get("first_contributer_id")
      if(first_id==null) first_id="" else first_id=first_id.toString
//      責任者
      var contributer_meta = json.getJSONArray("contributer_meta")
      var contributer_metavalue = ""
      var contributer_idvalue = ""
      var contributer_orgvalue = ""
      var contributer_org_idvalue = ""
      var conid = ""
      var conorgid = ""
      for (a <- 0 to contributer_meta.toArray.length - 1) {
        var item = contributer_meta.toArray.apply(a)
        var itemJson = com.alibaba.fastjson.JSON.parseObject(item.toString)
        contributer_metavalue += itemJson.get("contributer_name") + ","
        var contributer_id = itemJson.getJSONArray("contributer_URI").toArray
        if(contributer_id.length != 0){
          conid = com.alibaba.fastjson.JSON.parseObject(contributer_id.apply(0).toString).get("contributer_URI").toString
          if (conid.length!=0) contributer_idvalue += conid.substring(7,conid.length) + "','" else contributer_idvalue += "','"}
        var organization_list = itemJson.getJSONArray("organization_list")
        for (b <- 0 to organization_list.toArray.length - 1) {
          var list = organization_list.toArray.apply(b)
          contributer_orgvalue += com.alibaba.fastjson.JSON.parseObject(list.toString).get("name") + ","
          var contributer_org_id = com.alibaba.fastjson.JSON.parseObject(list.toString).getJSONArray("organization_URI").toArray
          if(contributer_org_id.length != 0){
            conorgid = contributer_org_id.apply(0).toString
            if (conorgid.length!=0) contributer_org_idvalue += conorgid.substring(13,conorgid.length) + "','" else contributer_org_idvalue += "','"}
        }
      }
      if(contributer_metavalue!="") contributer_metavalue = contributer_metavalue.substring(0, contributer_metavalue.length - 1) else contributer_metavalue=""
      if(contributer_idvalue!="") contributer_idvalue = "['"+contributer_idvalue.substring(0, contributer_idvalue.length - 2)+"]" else contributer_idvalue="[]"
      if(contributer_orgvalue!="") contributer_orgvalue = contributer_orgvalue.substring(0, contributer_orgvalue.length - 1) else contributer_orgvalue=""
      if(contributer_org_idvalue!="") contributer_org_idvalue = "['"+contributer_org_idvalue.substring(0, contributer_org_idvalue.length - 2)+"]" else contributer_org_idvalue="[]"
//      簡介
      var abstractvalue = json.getJSONObject("abstractvalue").get("abstractvalue")
      var abstractvaluevalue=if(abstractvalue==null) "" else abstractvalue.toString
//      其他語種簡介
      var abstract_alternative = json.getJSONArray("abstract_alternative")
      var abstract_alternativevalue=""
      for( a <-0 to abstract_alternative.toArray.length-1){
        var item=abstract_alternative.toArray.apply(a)
        abstract_alternativevalue += com.alibaba.fastjson.JSON.parseObject(item.toString).get("abstract_alternative") + ","
      }
      if(abstract_alternativevalue!="") abstract_alternativevalue=abstract_alternativevalue.substring(0,abstract_alternativevalue.length-1) else abstract_alternativevalue=""
//      主題容器-主題詞
      val subject_list = json.getJSONObject("subject_meta").getJSONArray("subject_list")
      var CLCtitle=""
      var CLCcode=""
      var keywords=""
      for (a <- 0 to subject_list.toArray.length - 1) {
        var item = com.alibaba.fastjson.JSON.parseObject(subject_list.toArray.apply(a).toString)
        var source=item.get("source")
        var types=item.get("type")
        if(source=="CLC" || source=="clc"){
          CLCtitle += item.get("subject_title") + ","
          CLCcode += item.get("subject_code") + ","
        }
        if(types=="Keyword" || types=="keyword") keywords+= item.get("subject_title") + ","
      }
      if(CLCtitle!="") CLCtitle=CLCtitle.substring(0,CLCtitle.length-1) else CLCtitle=""
      if(CLCcode!="") CLCcode=CLCcode.substring(0,CLCcode.length-1) else CLCcode=""
      if(keywords!="") keywords=keywords.substring(0,keywords.length-1) else keywords=""
//      基金專案
      var funding_list = json.getJSONObject("funding_list").getJSONArray("funding_meta")
      var funding_listvalue=""
      var funding_list_idvalue=""
      var funid = ""
      for( a <-0 to funding_list.toArray.length-1){
        var item=com.alibaba.fastjson.JSON.parseObject(funding_list.toArray.apply(a).toString)
        funding_listvalue += item.get("title") + ","
        funding_list_idvalue += item.get("_id") +"','"
      }
      if(funding_listvalue!="") funding_listvalue=funding_listvalue.substring(0,funding_listvalue.length-1) else funding_listvalue=""
      if(funding_list_idvalue!="") funding_list_idvalue = "['"+funding_list_idvalue.substring(0, funding_list_idvalue.length - 2)+"]" else funding_list_idvalue="[]"
//      收錄類別
      var holding_meta = json.getJSONArray("holding_meta").toArray
      var holding_metavalue=""
      for( a <-0 to holding_meta.length-1){
        var item=holding_meta.apply(a)
        holding_metavalue += com.alibaba.fastjson.JSON.parseObject(item.toString).get("holding_code") + ","
      }
      if(holding_metavalue!="") holding_metavalue=holding_metavalue.substring(0,holding_metavalue.length-1) else holding_metavalue=""
//      期刊id
      var journal_id=json.get("journal_URI")
      if(journal_id==null) journal_id="[]" else journal_id="['"+journal_id.toString.substring(8,journal_id.toString.length)+"']"
//      期刊名稱
      var journal_title=json.get("journal_title")
      if(journal_title==null) journal_title="" else journal_title=journal_title.toString
//      發表年份
      var publication_year=json.get("publication_year")
      if(publication_year==null) publication_year="" else publication_year=publication_year.toString
//      volume
      var volume=json.get("volume")
      if(volume==null) volume="" else volume=volume.toString
//      issue
      var issue=json.get("issue")
      if(issue==null) issue="" else issue=issue.toString
//      發表日期
      var publication_datevalue=json.getJSONObject("publication_date")
      val dateTimep = new DateTime(publication_datevalue.get("$date").asInstanceOf[Number].longValue).toString("yyyy-MM-dd")
      var publication_date = "0001-01-01T00:00:00Z"
      if(dateTimep!=null) publication_date=dateTimep+ "T00:00:00Z"
//      solr Date 型別格式化
      val now: Date = new Date()
      val dateFormat: SimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd")
      val date = dateFormat.format(now)
      val timeFormat: SimpleDateFormat = new SimpleDateFormat("HH:mm:ssZ")
      val time = timeFormat.format(now)
      var createTime=date+"T"+time+"Z"
//      拼資料字串 solr儲存資料
      var text = ""
      var data = "{'id':'"+GUID.toString+
        "','text':'"+text+
        "','title':'"+titlevalue.toString+
        "','title_alternative':'"+title_alternativevalue+
        "','first_contributer_id':'"+first_id.toString+
        "','contributer_id':"+contributer_idvalue+
        ",'contributer_name':'"+contributer_metavalue+
        "','contributer_org_id':"+contributer_org_idvalue+
        ",'contributer_org':'"+contributer_orgvalue+
        "','abstractvalue':'"+abstractvaluevalue+
        "','abstract_alternative':'"+abstract_alternativevalue+
        "','funding_list_id':"+funding_list_idvalue+
        ",'funding_list':'"+funding_listvalue+
        "','holding_code':'"+holding_metavalue+
        "','journal_id':"+journal_id.toString+
        ",'journal_title':'"+journal_title.toString+
        "','volume':'"+volume+
        "','issue':'"+issue+
        "','CLCcode':'"+CLCcode+
        "','CLCtitle':'"+CLCtitle+
        "','keywords':'"+keywords+
        "','language':'"+languagevalue+
        "','publication_year':'"+publication_year+
        "','publication_date':'"+publication_date+
        "','createTime':'"+ createTime+
        "','updateTime':'"+createTime+"'}"
      var zty = new SolrAdd()
      zty.postToSolr("JournalArticle", data)
//      neo4j儲存資料
      val script = s"CREATE (:journalArticle {guid:'" + GUID +
        "',title:'"+titlevalue.toString+
        "',title_alternative:'"+title_alternativevalue+
        "',contributer_name:'"+contributer_metavalue+
        "',contributer_org:'"+contributer_orgvalue+
        "',abstractvalue:'"+abstractvaluevalue+
        "',abstract_alternative:'"+abstract_alternativevalue+
        "',funding_list:'"+funding_listvalue+
        "',holding_code:'"+holding_metavalue+
        "',journal_title:'"+journal_title.toString+
        "',volume:'"+volume+
        "',issue:'"+issue+
        "',CLCcode:'"+CLCcode+
        "',CLCtitle:'"+CLCtitle+
        "',keywords:'"+keywords+
        "',language:'"+languagevalue+
        "',publication_year:'"+publication_year+
        "',publication_date:'"+publication_date+
        "'})"
      val result: StatementResult = session.run(script)
      session.close()
      driver.close()
      result.consume().counters().nodesCreated()
    }catch {
      case e: ArithmeticException => println(e)
      case ex: Throwable =>println(ex)
    }
  }
}

KafkaManagerAdd.scala檔案

import kafka.common.TopicAndPartition
import kafka.message.MessageAndMetadata
import kafka.serializer.Decoder
import org.apache.spark.SparkException
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka.KafkaCluster.LeaderOffset
import scala.reflect.ClassTag
/**
  * Created by zty.
  */
class KafkaManagerAdd(val kafkaParams: Map[String, String]) extends Serializable {

  private val kc = new KafkaCluster(kafkaParams)

  /**
    * 建立資料流
    * @param ssc
    * @param kafkaParams
    * @param topics
    * @tparam K
    * @tparam V
    * @tparam KD
    * @tparam VD
    * @return
    */
  def createDirectStream[K: ClassTag, V: ClassTag, KD <: Decoder[K]: ClassTag, VD <: Decoder[V]: ClassTag](
                                                                                                            ssc: StreamingContext,
                                                                                                            kafkaParams: Map[String, String],
                                                                                                            topics: Set[String]): InputDStream[(K, V)] =  {
    val groupId = kafkaParams.get("group.id").get
    // 在zookeeper上讀取offsets前先根據實際情況更新offsets
    setOrUpdateOffsets(topics, groupId)

    //從zookeeper上讀取offset開始消費message
    val messages = {
      val partitionsE = kc.getPartitions(topics)
      if (partitionsE.isLeft)
        throw new SparkException(s"get kafka partition failed: ${partitionsE.left.get}")
      val partitions = partitionsE.right.get
      val consumerOffsetsE = kc.getConsumerOffsets(groupId, partitions)
      if (consumerOffsetsE.isLeft)
        throw new SparkException(s"get kafka consumer offsets failed: ${consumerOffsetsE.left.get}")
      val consumerOffsets = consumerOffsetsE.right.get
      KafkaUtils.createDirectStream[K, V, KD, VD, (K, V)](
        ssc, kafkaParams, consumerOffsets, (mmd: MessageAndMetadata[K, V]) => (mmd.key, mmd.message))
    }
    messages
  }

  /**
    * 建立資料流前,根據實際消費情況更新消費offsets
    * @param topics
    * @param groupId
    */
  private def setOrUpdateOffsets(topics: Set[String], groupId: String): Unit = {
    topics.foreach(topic => {
      var hasConsumed = true
      val partitionsE = kc.getPartitions(Set(topic))
      if (partitionsE.isLeft)
        throw new SparkException(s"get kafka partition failed: ${partitionsE.left.get}")
      val partitions = partitionsE.right.get
      val consumerOffsetsE = kc.getConsumerOffsets(groupId, partitions)
      if (consumerOffsetsE.isLeft) hasConsumed = false
      if (hasConsumed) {// 消費過
        /**
          * 如果streaming程式執行的時候出現kafka.common.OffsetOutOfRangeException,
          * 說明zk上儲存的offsets已經過時了,即kafka的定時清理策略已經將包含該offsets的檔案刪除。
          * 針對這種情況,只要判斷一下zk上的consumerOffsets和earliestLeaderOffsets的大小,
          * 如果consumerOffsets比earliestLeaderOffsets還小的話,說明consumerOffsets已過時,
          * 這時把consumerOffsets更新為earliestLeaderOffsets
          */
        val earliestLeaderOffsetsE = kc.getEarliestLeaderOffsets(partitions)
        if (earliestLeaderOffsetsE.isLeft)
          throw new SparkException(s"get earliest leader offsets failed: ${earliestLeaderOffsetsE.left.get}")
        val earliestLeaderOffsets = earliestLeaderOffsetsE.right.get
        val consumerOffsets = consumerOffsetsE.right.get

        // 可能只是存在部分分割槽consumerOffsets過時,所以只更新過時分割槽的consumerOffsets為earliestLeaderOffsets
        var offsets: Map[TopicAndPartition, Long] = Map()
        consumerOffsets.foreach({ case(tp, n) =>
          val earliestLeaderOffset = earliestLeaderOffsets(tp).offset
          if (n < earliestLeaderOffset) {
            println("consumer group:" + groupId + ",topic:" + tp.topic + ",partition:" + tp.partition +
              " offsets已經過時,更新為" + earliestLeaderOffset)
            offsets += (tp -> earliestLeaderOffset)
          }
        })
        if (!offsets.isEmpty) {
          kc.setConsumerOffsets(groupId, offsets)
        }
      } else {// 沒有消費過
      val reset = kafkaParams.get("auto.offset.reset").map(_.toLowerCase)
        var leaderOffsets: Map[TopicAndPartition, LeaderOffset] = null
        if (reset == Some("smallest")) {
          val leaderOffsetsE = kc.getEarliestLeaderOffsets(partitions)
          if (leaderOffsetsE.isLeft)
            throw new SparkException(s"get earliest leader offsets failed: ${leaderOffsetsE.left.get}")
          leaderOffsets = leaderOffsetsE.right.get
        } else {
          val leaderOffsetsE = kc.getLatestLeaderOffsets(partitions)
          if (leaderOffsetsE.isLeft)
            throw new SparkException(s"get latest leader offsets failed: ${leaderOffsetsE.left.get}")
          leaderOffsets = leaderOffsetsE.right.get
        }
        val offsets = leaderOffsets.map {
          case (tp, offset) => (tp, offset.offset)
        }
        kc.setConsumerOffsets(groupId, offsets)
      }
    })
  }

  /**
    * 更新zookeeper上的消費offsets
    * @param rdd
    */
  def updateZKOffsets(rdd: RDD[(String, String)]) : Unit = {
    val groupId = kafkaParams.get("group.id").get
    val offsetsList = rdd.asInstanceOf[HasOffsetRanges].offsetRanges

    for (offsets <- offsetsList) {
      val topicAndPartition = TopicAndPartition(offsets.topic, offsets.partition)
      val o = kc.setConsumerOffsets(groupId, Map((topicAndPartition, offsets.untilOffset)))
      if (o.isLeft) {
        println(s"Error updating the offset to Kafka cluster: ${o.left.get}")
      }
    }
  }
}

  SolrAdd.scala檔案

import scalaj.http.Http
//post資料到solr
//Author:zty
class SolrAdd () {
//  dataType接收solr資料集名稱字串, jsonString接收資料json格式字串
  def postToSolr (dataType:String, jsonString: String): Unit = {
    var data = "{'add':{ 'doc':" + jsonString + ",'boost':1.0,'overwrite':true,'commitWithin':1000}}"
    val result = Http("http://IP:8985/solr/"+dataType+"/update?wt=json")
          .postData(data)
          .header("Content-Type", "application/json").asString
    println(result)
  }
}

  

相關文章