storm kafka外掛使用案例

衣舞晨風發表於2017-04-29

一、pom引用

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>jiankunking</groupId>
    <artifactId>kafkastorm</artifactId>
    <version>1.0-SNAPSHOT</version>
    <properties>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
    </properties>
    <url>http://blog.csdn.net/jiankunking</url>
    <dependencies>
        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
            <version>3.8.1</version>
            <!--<scope>test</scope>-->
        </dependency>
        <dependency>
            <groupId>org.apache.storm</groupId>
            <artifactId>storm-core</artifactId>
            <version>1.1.0</version>
            <!--本地除錯的時候,遮蔽掉scope,等打包部署的時候再放開-->
            <!--<scope>provided</scope>-->
        </dependency>
        <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka_2.11</artifactId>
            <version>0.10.1.1</version>
            <exclusions>
                <exclusion>
                    <groupId>org.apache.zookeeper</groupId>
                    <artifactId>zookeeper</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>log4j</groupId>
                    <artifactId>log4j</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.slf4j</groupId>
                    <artifactId>slf4j-log4j12</artifactId>
                </exclusion>
            </exclusions>
        </dependency>
        <dependency>
            <groupId>org.apache.storm</groupId>
            <artifactId>storm-kafka</artifactId>
            <version>1.1.0</version>
        </dependency>
        <dependency>
            <groupId>org.apache.httpcomponents</groupId>
            <artifactId>httpclient</artifactId>
            <version>4.3.3</version>
        </dependency>

    </dependencies>

    <build>
        <plugins>
            <plugin>
                <artifactId>maven-assembly-plugin</artifactId>
                <configuration>
                    <descriptorRefs>
                        <descriptorRef>jar-with-dependencies</descriptorRef>
                    </descriptorRefs>
                </configuration>
                <executions>
                    <execution>
                        <id>make-assembly</id>
                        <phase>package</phase>
                        <goals>
                            <goal>single</goal>
                        </goals>
                    </execution>
                </executions>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <configuration>
                    <source>1.6</source>
                    <target>1.6</target>
                </configuration>
            </plugin>
        </plugins>
    </build>

</project>

二、自定義bolt

package com.jiankunking.stormkafka.bolts;

import org.apache.storm.topology.BasicOutputCollector;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseBasicBolt;
import org.apache.storm.tuple.Tuple;

/**
 * Created by jiankunking on 2017/4/29 11:15.
 */
public class CustomBolt extends BaseBasicBolt {

    public void execute(Tuple input, BasicOutputCollector collector) {
        String sentence = input.getString(0);
        System.out.println(sentence);
    }

    public void declareOutputFields(OutputFieldsDeclarer declarer) {
        System.out.println("declareOutputFields");
    }
}

三、自定義Scheme

package com.jiankunking.stormkafka.schemes;


import org.apache.storm.spout.Scheme;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Values;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.charset.Charset;
import java.nio.charset.CharsetDecoder;
import java.util.List;

/**
 * Created by jiankunking on 2017/4/22 10:52.
 */
public class MessageScheme implements Scheme {

    private static final Logger LOGGER;

    static {
        LOGGER = LoggerFactory.getLogger(MessageScheme.class);
    }

    public List<Object> deserialize(ByteBuffer byteBuffer) {
        String msg = this.getString(byteBuffer);
        return new Values(msg);
    }

    public Fields getOutputFields() {
        return new Fields("msg");
    }

    private String getString(ByteBuffer buffer) {
        Charset charset = null;
        CharsetDecoder decoder = null;
        CharBuffer charBuffer = null;
        try {
            charset = Charset.forName("UTF-8");
            decoder = charset.newDecoder();
            //用這個的話,只能輸出來一次結果,第二次顯示為空
            // charBuffer = decoder.decode(buffer);
            charBuffer = decoder.decode(buffer.asReadOnlyBuffer());
            return charBuffer.toString();
        } catch (Exception ex) {
            LOGGER.error("Cannot parse the provided message!" + ex.toString());
            return "error";
        }
    }
}

四、自定義拓撲圖入口類

package com.jiankunking.stormkafka.topologies;


import com.jiankunking.stormkafka.bolts.CustomBolt;
import com.jiankunking.stormkafka.schemes.MessageScheme;
import com.jiankunking.stormkafka.util.PropertiesUtil;
import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.StormSubmitter;
import org.apache.storm.generated.AlreadyAliveException;
import org.apache.storm.generated.AuthorizationException;
import org.apache.storm.generated.InvalidTopologyException;
import org.apache.storm.kafka.BrokerHosts;
import org.apache.storm.kafka.KafkaSpout;
import org.apache.storm.kafka.SpoutConfig;
import org.apache.storm.kafka.ZkHosts;
import org.apache.storm.spout.SchemeAsMultiScheme;
import org.apache.storm.topology.TopologyBuilder;

import java.util.Arrays;
import java.util.Map;

/**
 * Created by jiankunking on 2017/4/19 16:27.
 */
public class CustomCounterTopology {

    /**
     * 入口類,即提交任務的類
     *
     * @throws InterruptedException
     * @throws AlreadyAliveException
     * @throws InvalidTopologyException
     */
    public static void main(String[] args) throws AlreadyAliveException, InvalidTopologyException {
        System.out.println("11111");
        PropertiesUtil propertiesUtil = new PropertiesUtil("/application.properties", false);
        Map propsMap = propertiesUtil.getAllProperty();
        String zks = propsMap.get("zk_hosts").toString();
        String topic = propsMap.get("kafka.topic").toString();
        String zkRoot = propsMap.get("zk_root").toString();
        String zkPort = propsMap.get("zk_port").toString();
        String zkId = propsMap.get("zk_id").toString();
        BrokerHosts brokerHosts = new ZkHosts(zks);
        SpoutConfig spoutConfig = new SpoutConfig(brokerHosts, topic, zkRoot, zkId);
        spoutConfig.zkServers = Arrays.asList(zks.split(","));
        if (zkPort != null && zkPort.length() > 0) {
            spoutConfig.zkPort = Integer.parseInt(zkPort);
        } else {
            spoutConfig.zkPort = 2181;
        }
        spoutConfig.scheme = new SchemeAsMultiScheme(new MessageScheme());
        TopologyBuilder builder = new TopologyBuilder();
        builder.setSpout("kafkaSpout", new KafkaSpout(spoutConfig));
        builder.setBolt("customCounterBolt", new CustomBolt(), 1).shuffleGrouping("kafkaSpout");
        //Configuration
        Config conf = new Config();
        conf.setDebug(false);
        if (args != null && args.length > 0) {
            //提交到叢集執行
            try {
                StormSubmitter.submitTopologyWithProgressBar("customCounterTopology", conf, builder.createTopology());
            } catch (AlreadyAliveException e) {
                e.printStackTrace();
            } catch (InvalidTopologyException e) {
                e.printStackTrace();
            } catch (AuthorizationException e) {
                e.printStackTrace();
            }
        } else {
            conf.setMaxTaskParallelism(3);
            //本地模式執行
            LocalCluster cluster = new LocalCluster();
            cluster.submitTopology("CustomCounterTopology", conf, builder.createTopology());
        }
    }
}

五、配置檔案application.properties

kafka.topic=test_one
# zookeeper
zk_hosts=10.10.10.10
zk_root=/kafka
zk_port=2181
# kafka消費組
zk_id="kafkaspout"

demo下載地址:http://download.csdn.net/detail/xunzaosiyecao/9829058

https://github.com/JianKunKing/storm-kafka-plugin-demo

作者:jiankunking 出處:http://blog.csdn.net/jiankunking

相關文章