flume自定義 ES SINk外掛,AVRO格式資料寫入ES
package com.vacp.collecor;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.vacp.common.AvroHelper;
import com.vacp.common.DateUtils;
import com.vacp.common.ObjectSerializeUtils;
import com.vacp.es.ESClientFactory;
import com.videtek.kafka.VehiclePassingInfo;
import org.apache.commons.lang.StringUtils;
import org.apache.flume.*;
import org.apache.flume.conf.Configurable;
import org.apache.flume.sink.AbstractSink;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Calendar;
import java.util.Date;
import static com.vacp.es.ESClientFactory.saveToEsBulk;
public class AvroVehicleCollectorextends AbstractSinkimplements Configurable{
private Loggerlogger = LoggerFactory.getLogger(AvroVehicleCollector.class);
private Stringhostname;
private Stringport;
StringdayTime ;
private int batchSize;
public static boolean outlogger =false;
private boolean bulktype=true;//預設 update
public AvroVehicleCollector() {
logger.info("VehicleUpdateCollector start...");
}
@Override
public void configure(Context context) {
hostname = context.getString("hostname");
Preconditions.checkNotNull(hostname, "es hostname must be set!!");
dayTime = context.getString("daytime");
Preconditions.checkNotNull(dayTime, "daytime must be set!!example:6,18");
batchSize = context.getInteger("batchSize", 100);
outlogger ="true".equals(context.getString("outlogger"));
bulktype ="create".equals(context.getString("bulktype"))?false:true;
Preconditions.checkNotNull(dayTime, "daytime must be set!!example:6,18");
Preconditions.checkNotNull(batchSize >0, "batchSize must be a positive number!!");
ESClientFactory.init(hostname);
}
@Override
public void start() {
super.start();
}
@Override
public void stop() {
super.stop();
}
@Override
public Statusprocess()throws EventDeliveryException {
Status result = Status.READY;
Channel channel = getChannel();
Transaction transaction =null;
try {
transaction = channel.getTransaction();
transaction.begin();
Event event =null;
String content =null;
StringBuilder vehicleList =new StringBuilder();
int maxsize=0;
try {
for (int i =0; i
maxsize++;
event = channel.take();
if (event !=null) {
//對事件進行處理
AvroHelper helper =new AvroHelper();
VehiclePassingInfo vehicle= helper.deserialize(VehiclePassingInfo.class, event.getBody());
content = ObjectSerializeUtils.toJSON(vehicle);
//System.out.println(content);
JSONObject vehicleMap = JSON.parseObject(content);
Stringpass_time=String.valueOf(vehicleMap.get("pass_time"));
//時間戳 格式化yyyy-MM-dd HH:mm:ss
pass_time=DateUtils.formatDatestamp(pass_time);
if(StringUtils.isBlank(pass_time)) {
continue;
}
//HHmmss 時段查詢處理優化查詢速度
//script過濾某幾個月內 每天幾點到幾點的資料效率不高
int timeNum = getTimeNum(pass_time);
String indexName = DateUtils.getIndexName("vacp", pass_time);
String typeName = DateUtils.getTypeName("vehicle", pass_time);
vehicleMap.put("pass_time", pass_time);
vehicleMap.put("timenum", timeNum);
vehicleMap.put("mark_time", mark_time);
String vehicleId = vehicleMap.getString("vehicle_id");
String tollgateId = vehicleMap.getString("tollgate_id");
Stringplate_no= vehicleMap.getString("plate_no");
vehicleMap.put("daynight", daynight);
vehicleList.append("{ \"index\":{");
vehicleList.append("\"_id\":\"" + vehicleId +"\",\"_index\":\"" + indexName +"\",\"_type\":\"" + typeName +"\"}}");
vehicleList.append("\r\n");
vehicleList.append(JSON.toJSONString(vehicleMap));
vehicleList.append("\r\n");
}else {
result = Status.BACKOFF;
break;
}
}
}catch (Exception e){
logger.error("channel.take();." +maxsize, e);
Throwables.propagate(e);
}
if (vehicleList.length() >0) {
//提交失敗不做回滾
if(outlogger) {
System.out.println("==========POST Vehicle JSON====================");
logger.info(vehicleList.toString());
}
saveToEsBulk(vehicleList.toString());
}
result = Status.READY ;
transaction.commit();//通過 commit 機制確保資料不丟失
}catch (Exception e) {
transaction.rollback();
e.printStackTrace();
logger.error("Failed to commit transaction." +
"Transaction rolled back.", e);
Throwables.propagate(e);
}finally {
if (transaction !=null) {
transaction.close();
logger.debug("close Transaction");
}
}
return result;
}
private int checkDayTime(String passTime){
Date dt = DateUtils.parseDate(passTime);
Calendar cal = Calendar.getInstance();
cal.setTime(dt);
int hour = cal.get(Calendar.HOUR_OF_DAY);
int start = Integer.parseInt(dayTime.split(",")[0]);
int end = Integer.parseInt(dayTime.split(",")[1]);
if(hour>=start&&hour>=end) {
return 1;
}else{
return 0;
}
}
private int getTimeNum(String passTime){
return Integer.valueOf(passTime.substring(11).replace(":","")+"00");
}
}
相關文章
- es 自定義分詞外掛分詞
- 使用 ES-Hadoop 將 Spark Streaming 流資料寫入 ESHadoopSpark
- 用 ES6 寫全屏滾動外掛
- Elasticsearch Lucene 資料寫入原理 | ES 核心篇Elasticsearch
- 自定義log4j的appender寫es日誌APP
- flink 透過繼承RichSinkFunction實現自定義sink,將資料錄入資料庫繼承Function資料庫
- gradle自定義外掛Gradle
- mybatis 自定義外掛MyBatis
- ES5 / ES6 自定義錯誤型別比較型別
- ES寫入效能優化優化
- 如何在MapReduce中使用Avro資料格式?VR
- 基於hanlp的es分詞外掛HanLP分詞
- vue自定義全域性元件(或自定義外掛)Vue元件
- Flume學習系列(六)---- Logger Sink原始碼解讀與自定原始碼
- apisix~自定義外掛的部署API
- 自定義Gradle-Plugin 外掛GradlePlugin
- Flink 實踐教程 - 入門(4):讀取 MySQL 資料寫入到 ESMySql
- Vue2自定義外掛的寫法-Vue.use()Vue
- 基於 HanLP 的 ES 中文分詞外掛HanLP中文分詞
- vscode中eslint外掛es6,jsx支援VSCodeEsLintJS
- 【複習資料】ES6/ES7/ES8/ES9資料整理(個人整理)
- Flume和Hive整合之hive sinkHive
- ES資料聚合
- FlinkSQL寫入Kafka/ES/MySQL示例-JAVAKafkaMySqlJava
- APISIX Ingress 如何支援自定義外掛API
- 【django學習-24】自定義外掛Django
- gradle自定義任務和外掛Gradle
- Kube-Scheduler外掛的自定義
- Cordova學習--iOS自定義外掛iOS
- Flink的sink實戰之四:自定義
- apisix-dashboard上新增自定義外掛API
- 二 阿里大模型接入:自定義外掛阿里大模型
- mybaits原始碼分析--自定義外掛(七)AI原始碼
- 快速自定義Cordova外掛(-配置檔案)
- [-Flutter外掛篇 1-] 從自定義外掛開始說起Flutter
- [外掛擴充套件]onethink自定義欄位外掛 百度地圖定位 外掛套件地圖
- svelte元件:Svelte3自定義Navbar+Tabbr元件|svelte自定義外掛元件
- Apache Maven Assembly自定義打包外掛的使用ApacheMaven