storm kafka外掛使用案例
一、pom引用
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>jiankunking</groupId>
<artifactId>kafkastorm</artifactId>
<version>1.0-SNAPSHOT</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<url>http://blog.csdn.net/jiankunking</url>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>3.8.1</version>
<!--<scope>test</scope>-->
</dependency>
<dependency>
<groupId>org.apache.storm</groupId>
<artifactId>storm-core</artifactId>
<version>1.1.0</version>
<!--本地除錯的時候,遮蔽掉scope,等打包部署的時候再放開-->
<!--<scope>provided</scope>-->
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<version>0.10.1.1</version>
<exclusions>
<exclusion>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.storm</groupId>
<artifactId>storm-kafka</artifactId>
<version>1.1.0</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.3.3</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.6</source>
<target>1.6</target>
</configuration>
</plugin>
</plugins>
</build>
</project>
二、自定義bolt
package com.jiankunking.stormkafka.bolts;
import org.apache.storm.topology.BasicOutputCollector;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseBasicBolt;
import org.apache.storm.tuple.Tuple;
/**
* Created by jiankunking on 2017/4/29 11:15.
*/
public class CustomBolt extends BaseBasicBolt {
public void execute(Tuple input, BasicOutputCollector collector) {
String sentence = input.getString(0);
System.out.println(sentence);
}
public void declareOutputFields(OutputFieldsDeclarer declarer) {
System.out.println("declareOutputFields");
}
}
三、自定義Scheme
package com.jiankunking.stormkafka.schemes;
import org.apache.storm.spout.Scheme;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Values;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.charset.Charset;
import java.nio.charset.CharsetDecoder;
import java.util.List;
/**
* Created by jiankunking on 2017/4/22 10:52.
*/
public class MessageScheme implements Scheme {
private static final Logger LOGGER;
static {
LOGGER = LoggerFactory.getLogger(MessageScheme.class);
}
public List<Object> deserialize(ByteBuffer byteBuffer) {
String msg = this.getString(byteBuffer);
return new Values(msg);
}
public Fields getOutputFields() {
return new Fields("msg");
}
private String getString(ByteBuffer buffer) {
Charset charset = null;
CharsetDecoder decoder = null;
CharBuffer charBuffer = null;
try {
charset = Charset.forName("UTF-8");
decoder = charset.newDecoder();
//用這個的話,只能輸出來一次結果,第二次顯示為空
// charBuffer = decoder.decode(buffer);
charBuffer = decoder.decode(buffer.asReadOnlyBuffer());
return charBuffer.toString();
} catch (Exception ex) {
LOGGER.error("Cannot parse the provided message!" + ex.toString());
return "error";
}
}
}
四、自定義拓撲圖入口類
package com.jiankunking.stormkafka.topologies;
import com.jiankunking.stormkafka.bolts.CustomBolt;
import com.jiankunking.stormkafka.schemes.MessageScheme;
import com.jiankunking.stormkafka.util.PropertiesUtil;
import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.StormSubmitter;
import org.apache.storm.generated.AlreadyAliveException;
import org.apache.storm.generated.AuthorizationException;
import org.apache.storm.generated.InvalidTopologyException;
import org.apache.storm.kafka.BrokerHosts;
import org.apache.storm.kafka.KafkaSpout;
import org.apache.storm.kafka.SpoutConfig;
import org.apache.storm.kafka.ZkHosts;
import org.apache.storm.spout.SchemeAsMultiScheme;
import org.apache.storm.topology.TopologyBuilder;
import java.util.Arrays;
import java.util.Map;
/**
* Created by jiankunking on 2017/4/19 16:27.
*/
public class CustomCounterTopology {
/**
* 入口類,即提交任務的類
*
* @throws InterruptedException
* @throws AlreadyAliveException
* @throws InvalidTopologyException
*/
public static void main(String[] args) throws AlreadyAliveException, InvalidTopologyException {
System.out.println("11111");
PropertiesUtil propertiesUtil = new PropertiesUtil("/application.properties", false);
Map propsMap = propertiesUtil.getAllProperty();
String zks = propsMap.get("zk_hosts").toString();
String topic = propsMap.get("kafka.topic").toString();
String zkRoot = propsMap.get("zk_root").toString();
String zkPort = propsMap.get("zk_port").toString();
String zkId = propsMap.get("zk_id").toString();
BrokerHosts brokerHosts = new ZkHosts(zks);
SpoutConfig spoutConfig = new SpoutConfig(brokerHosts, topic, zkRoot, zkId);
spoutConfig.zkServers = Arrays.asList(zks.split(","));
if (zkPort != null && zkPort.length() > 0) {
spoutConfig.zkPort = Integer.parseInt(zkPort);
} else {
spoutConfig.zkPort = 2181;
}
spoutConfig.scheme = new SchemeAsMultiScheme(new MessageScheme());
TopologyBuilder builder = new TopologyBuilder();
builder.setSpout("kafkaSpout", new KafkaSpout(spoutConfig));
builder.setBolt("customCounterBolt", new CustomBolt(), 1).shuffleGrouping("kafkaSpout");
//Configuration
Config conf = new Config();
conf.setDebug(false);
if (args != null && args.length > 0) {
//提交到叢集執行
try {
StormSubmitter.submitTopologyWithProgressBar("customCounterTopology", conf, builder.createTopology());
} catch (AlreadyAliveException e) {
e.printStackTrace();
} catch (InvalidTopologyException e) {
e.printStackTrace();
} catch (AuthorizationException e) {
e.printStackTrace();
}
} else {
conf.setMaxTaskParallelism(3);
//本地模式執行
LocalCluster cluster = new LocalCluster();
cluster.submitTopology("CustomCounterTopology", conf, builder.createTopology());
}
}
}
五、配置檔案application.properties
kafka.topic=test_one
# zookeeper
zk_hosts=10.10.10.10
zk_root=/kafka
zk_port=2181
# kafka消費組
zk_id="kafkaspout"
demo下載地址:http://download.csdn.net/detail/xunzaosiyecao/9829058
https://github.com/JianKunKing/storm-kafka-plugin-demo
作者:jiankunking 出處:http://blog.csdn.net/jiankunking
相關文章
- storm-kafka-client使用ORMKafkaclient
- Eclipse安裝lombok外掛及外掛使用案例EclipseLombok
- Storm 系列(九)—— Storm 整合 KafkaORMKafka
- Storm系列(六)storm和kafka整合ORMKafka
- Storm與kafka整合ORMKafka
- kafka+storm+hbaseKafkaORM
- apisix~kafka-logger外掛APIKafka
- SpringBoot整合Kafka和StormSpring BootKafkaORM
- 安裝nginx-kafka外掛NginxKafka
- storm簡單案例ORM
- storm(1.1.3)與kafka(1.0.0)整合ORMKafka
- babel 外掛開發案例Babel
- PHP外掛系統的實現(七):外掛案例PHP
- 使用Storm、Kafka和ElasticSearch處理實時資料 -javacodegeeksORMKafkaElasticsearchJava
- Headshot外掛如何使用?Headshot外掛使用教程
- HtmlWebpackPlugin外掛和HtmlWebpackInlineSourcePlugin外掛的使用HTMLWebPlugininline
- fastadmin的【外掛管理】外掛使用教程AST
- videojs外掛使用IDEJS
- Head 外掛使用
- alpakka-kafka(6)-kafka應用案例,使用者介面Kafka
- 大資料6.1 - 實時分析(storm和kafka)大資料ORMKafka
- IDEA使用lombok外掛IdeaLombok
- 使用 VSCode Remote 外掛VSCodeREM
- burp外掛的使用
- vscode外掛使用包VSCode
- 工具篇---xcode外掛使用XCode
- Windows下使用ideagit外掛WindowsIdeaGit
- Androidstudio adb wifi外掛使用AndroidWiFi
- Vue-router外掛使用Vue
- 使用podspec建立iOS外掛iOS
- apisix~限流外掛的使用API
- flask 外掛使用方法Flask
- Chrome 外掛特性及實戰場景案例分析Chrome
- kafka入門案例Kafka
- IntelliJ IDEA 外掛Rest Client 外掛使用過程遇到的問題IntelliJIdeaRESTclient
- [需求建議]外掛外部url控制器直接使用外掛配置
- CocoaPods工程使用外掛App ExtensionAPP
- 我使用的 Sublime Text 外掛
- uniapp使用z-paging外掛APP