storm 初識編碼
#storm.yaml 配置 #zookeeper storm.zookeeper.servers: - "bigdata01" - "bigdata02" - "bigdata03" #本地存放資料的路徑 storm.local.dir: "/apps/storm" #nimbus master nimbus.seeds: ["bigdata00"] #workder埠 supervisor.slots.ports: - 6700 - 6701 - 6702 - 6703 啟動命令 bin/ nohup storm nimbus & bin/ nohup storm supervisor & bin/ nohup storm ui & -------------------------------------------------------------------------------------- package com.hgs.storm; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import org.apache.storm.Config; import org.apache.storm.LocalCluster; import org.apache.storm.generated.AlreadyAliveException; import org.apache.storm.generated.AuthorizationException; import org.apache.storm.generated.InvalidTopologyException; import org.apache.storm.spout.SpoutOutputCollector; import org.apache.storm.task.OutputCollector; import org.apache.storm.task.TopologyContext; import org.apache.storm.topology.IRichBolt; import org.apache.storm.topology.OutputFieldsDeclarer; import org.apache.storm.topology.TopologyBuilder; import org.apache.storm.topology.base.BaseRichBolt; import org.apache.storm.topology.base.BaseRichSpout; import org.apache.storm.tuple.Fields; import org.apache.storm.tuple.Tuple; import org.apache.storm.tuple.Values; public class StormWordCountTest { public static void main(String[] args) throws AlreadyAliveException, InvalidTopologyException, AuthorizationException, InterruptedException { TopologyBuilder builder = new TopologyBuilder(); builder.setSpout("wordspout", new WordCountSpout(), 3); builder.setBolt("splitword", (IRichBolt) new WordSpliteBolt(), 2).shuffleGrouping("wordspout"); //word 是splitword發出的欄位,如第九十行 builder.setBolt("wordcount", new WordCountBolt(), 2).fieldsGrouping("splitword", new Fields("word")); Config config = new Config(); config.setNumWorkers(2); /* StormSubmitter.submitTopology("words-count", config, builder.createTopology()); if(args!=null && args.length>0) { StormSubmitter.submitTopology(args[0], config, builder.createTopology()); }else { LocalCluster cluster = new LocalCluster(); }*/ LocalCluster cluster = new LocalCluster(); cluster.submitTopology("words-count", config, builder.createTopology()); } } class WordCountSpout extends BaseRichSpout{ private static final long serialVersionUID = 1L; //從open方法中的到collector,用於declareOutputFields 方法發出欄位資訊 SpoutOutputCollector collector = null; @Override public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) { this.collector = collector; } @Override public void nextTuple() { collector.emit(new Values(" this is my first storm program so i hope it will success")); } @Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare( new Fields("message")); } } class WordSpliteBolt extends BaseRichBolt{ private static final long serialVersionUID = 1L; OutputCollector collector = null; @Override public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) { this.collector = collector; } @Override public void execute(Tuple input) { String line = input.getString(0); String[] words = line.split(" "); for(String wd : words) { collector.emit(new Values(wd ,1)); } } @Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare(new Fields("word","num")); } } class WordCountBolt extends BaseRichBolt{ ConcurrentHashMap<String, Integer> wordsMap = new ConcurrentHashMap<String, Integer>(); private static final long serialVersionUID = 1L; OutputCollector collector = null; @Override public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) { this.collector = collector; } @Override public void execute(Tuple input) { String word = input.getString(0); Integer num = input.getInteger(1); if(wordsMap.containsKey(word)) { wordsMap.put(word, wordsMap.get(word)+num); }else { wordsMap.put(word, num); } System.out.println(word +"----"+wordsMap.get(word)); } @Override public void declareOutputFields(OutputFieldsDeclarer declarer) { } }
來自 “ ITPUB部落格 ” ,連結:http://blog.itpub.net/31506529/viewspace-2214253/,如需轉載,請註明出處,否則將追究法律責任。
相關文章
- 初識彙編
- Storm的wordcount程式碼編寫與分析ORM
- Storm系列(三)java編寫第個storm程式ORMJava
- 初識shell指令碼指令碼
- 格式化輸出,while迴圈,運算子,初識編碼While
- HikariPool原始碼(一)初識原始碼
- 初識彙編-第一篇
- ReactNative原始碼解析-初識原始碼React原始碼
- iOS逆向學習筆記 - 彙編(一) - 初識彙編iOS筆記
- 彙編初識與計算機組成計算機
- 運維之shell指令碼初識運維指令碼
- Netty原始碼分析(七):初識ChannelPipelineNetty原始碼
- Storm 系列(九)—— Storm 整合 KafkaORMKafka
- NNLM初認識以及相關程式碼
- SOFAJRaft原始碼閱讀(伍)-初識RheaKVRaft原始碼
- StormORM
- 通過TCP碼流識別編碼TCP
- storm叢集啟動停止指令碼ORM指令碼
- shell 指令碼如何編寫-致初學者指令碼
- Storm系列(六)storm和kafka整合ORMKafka
- Java初識Java
- srpingboot 初識boot
- 初識JavaJava
- 初識htmlHTML
- 初識Kubernetes
- Spring 初識Spring
- 初識WebWeb
- 初識WebAssemblyWeb
- 初識Fink
- AsterixDB初識AST
- 初識PostgreSqlSQL
- 初識DevOpsdev
- 初識 NodejsNodeJS
- 初識RedisRedis
- 初識 Shell
- 初識 reduxRedux
- 初識 SpringMVCSpringMVC
- rocketmq初識MQ