flume自定義 ES SINk外掛,AVRO格式資料寫入ES
package com.vacp.collecor;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.vacp.common.AvroHelper;
import com.vacp.common.DateUtils;
import com.vacp.common.ObjectSerializeUtils;
import com.vacp.es.ESClientFactory;
import com.videtek.kafka.VehiclePassingInfo;
import org.apache.commons.lang.StringUtils;
import org.apache.flume.*;
import org.apache.flume.conf.Configurable;
import org.apache.flume.sink.AbstractSink;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Calendar;
import java.util.Date;
import static com.vacp.es.ESClientFactory.saveToEsBulk;
public class AvroVehicleCollectorextends AbstractSinkimplements Configurable{
private Loggerlogger = LoggerFactory.getLogger(AvroVehicleCollector.class);
private Stringhostname;
private Stringport;
StringdayTime ;
private int batchSize;
public static boolean outlogger =false;
private boolean bulktype=true;//預設 update
public AvroVehicleCollector() {
logger.info("VehicleUpdateCollector start...");
}
@Override
public void configure(Context context) {
hostname = context.getString("hostname");
Preconditions.checkNotNull(hostname, "es hostname must be set!!");
dayTime = context.getString("daytime");
Preconditions.checkNotNull(dayTime, "daytime must be set!!example:6,18");
batchSize = context.getInteger("batchSize", 100);
outlogger ="true".equals(context.getString("outlogger"));
bulktype ="create".equals(context.getString("bulktype"))?false:true;
Preconditions.checkNotNull(dayTime, "daytime must be set!!example:6,18");
Preconditions.checkNotNull(batchSize >0, "batchSize must be a positive number!!");
ESClientFactory.init(hostname);
}
@Override
public void start() {
super.start();
}
@Override
public void stop() {
super.stop();
}
@Override
public Statusprocess()throws EventDeliveryException {
Status result = Status.READY;
Channel channel = getChannel();
Transaction transaction =null;
try {
transaction = channel.getTransaction();
transaction.begin();
Event event =null;
String content =null;
StringBuilder vehicleList =new StringBuilder();
int maxsize=0;
try {
for (int i =0; i
maxsize++;
event = channel.take();
if (event !=null) {
//對事件進行處理
AvroHelper helper =new AvroHelper();
VehiclePassingInfo vehicle= helper.deserialize(VehiclePassingInfo.class, event.getBody());
content = ObjectSerializeUtils.toJSON(vehicle);
//System.out.println(content);
JSONObject vehicleMap = JSON.parseObject(content);
Stringpass_time=String.valueOf(vehicleMap.get("pass_time"));
//時間戳 格式化yyyy-MM-dd HH:mm:ss
pass_time=DateUtils.formatDatestamp(pass_time);
if(StringUtils.isBlank(pass_time)) {
continue;
}
//HHmmss 時段查詢處理優化查詢速度
//script過濾某幾個月內 每天幾點到幾點的資料效率不高
int timeNum = getTimeNum(pass_time);
String indexName = DateUtils.getIndexName("vacp", pass_time);
String typeName = DateUtils.getTypeName("vehicle", pass_time);
vehicleMap.put("pass_time", pass_time);
vehicleMap.put("timenum", timeNum);
vehicleMap.put("mark_time", mark_time);
String vehicleId = vehicleMap.getString("vehicle_id");
String tollgateId = vehicleMap.getString("tollgate_id");
Stringplate_no= vehicleMap.getString("plate_no");
vehicleMap.put("daynight", daynight);
vehicleList.append("{ \"index\":{");
vehicleList.append("\"_id\":\"" + vehicleId +"\",\"_index\":\"" + indexName +"\",\"_type\":\"" + typeName +"\"}}");
vehicleList.append("\r\n");
vehicleList.append(JSON.toJSONString(vehicleMap));
vehicleList.append("\r\n");
}else {
result = Status.BACKOFF;
break;
}
}
}catch (Exception e){
logger.error("channel.take();." +maxsize, e);
Throwables.propagate(e);
}
if (vehicleList.length() >0) {
//提交失敗不做回滾
if(outlogger) {
System.out.println("==========POST Vehicle JSON====================");
logger.info(vehicleList.toString());
}
saveToEsBulk(vehicleList.toString());
}
result = Status.READY ;
transaction.commit();//通過 commit 機制確保資料不丟失
}catch (Exception e) {
transaction.rollback();
e.printStackTrace();
logger.error("Failed to commit transaction." +
"Transaction rolled back.", e);
Throwables.propagate(e);
}finally {
if (transaction !=null) {
transaction.close();
logger.debug("close Transaction");
}
}
return result;
}
private int checkDayTime(String passTime){
Date dt = DateUtils.parseDate(passTime);
Calendar cal = Calendar.getInstance();
cal.setTime(dt);
int hour = cal.get(Calendar.HOUR_OF_DAY);
int start = Integer.parseInt(dayTime.split(",")[0]);
int end = Integer.parseInt(dayTime.split(",")[1]);
if(hour>=start&&hour>=end) {
return 1;
}else{
return 0;
}
}
private int getTimeNum(String passTime){
return Integer.valueOf(passTime.substring(11).replace(":","")+"00");
}
}
相關文章
- es 自定義分詞外掛分詞
- 用 ES6 寫全屏滾動外掛
- 使用 ES-Hadoop 將 Spark Streaming 流資料寫入 ESHadoopSpark
- Elasticsearch Lucene 資料寫入原理 | ES 核心篇Elasticsearch
- ES外掛常見問題
- 自定義log4j的appender寫es日誌APP
- Es叢集搭建,及常用外掛
- mybatis 自定義外掛MyBatis
- ES5 / ES6 自定義錯誤型別比較型別
- ES寫入效能優化優化
- flink 透過繼承RichSinkFunction實現自定義sink,將資料錄入資料庫繼承Function資料庫
- 基於hanlp的es分詞外掛HanLP分詞
- gradle自定義外掛Gradle
- 如何在MapReduce中使用Avro資料格式?VR
- 【複習資料】ES6/ES7/ES8/ES9資料整理(個人整理)
- vue自定義全域性元件(或自定義外掛)Vue元件
- 基於 HanLP 的 ES 中文分詞外掛HanLP中文分詞
- ES資料聚合
- WM自定義桌面"今日外掛"
- apisix~自定義外掛的部署API
- Vue2自定義外掛的寫法-Vue.use()Vue
- Flink 實踐教程 - 入門(4):讀取 MySQL 資料寫入到 ESMySql
- elasticsearch bulk資料--ES批量匯入json資料ElasticsearchJSON
- 自定義Gradle-Plugin 外掛GradlePlugin
- Kube-Scheduler外掛的自定義
- jQuery如何製作自定義外掛jQuery
- Cordova學習--iOS自定義外掛iOS
- Gradle自定義外掛詳解Gradle
- gulp進階-自定義gulp外掛
- 【django學習-24】自定義外掛Django
- Flume學習系列(六)---- Logger Sink原始碼解讀與自定原始碼
- FlinkSQL寫入Kafka/ES/MySQL示例-JAVAKafkaMySqlJava
- Flume-ng HDFS sink原理解析
- 02【線上日誌分析】之基於Flume-ng Exec Source開發自定義外掛AdvancedExecSource
- 編寫jquery常用外掛的基本格式jQuery
- gradle自定義任務和外掛Gradle
- APISIX Ingress 如何支援自定義外掛API
- 自定義jquery外掛簡單介紹jQuery