storm编程模型
This commit is contained in:
@ -0,0 +1,4 @@
|
||||
worker-id: 7b8e6dbf-1e3e-4368-8f0c-1a4936042ca7
|
||||
logs.users: []
|
||||
logs.groups: []
|
||||
topology.submitter.user: ciic
|
@ -0,0 +1,4 @@
|
||||
worker-id: 931219fd-8b9a-4333-9fda-5d1df11a258c
|
||||
logs.users: []
|
||||
logs.groups: []
|
||||
topology.submitter.user: ciic
|
@ -0,0 +1,4 @@
|
||||
worker-id: 9cdf2e0f-b135-41c6-b3fd-1502afddf212
|
||||
logs.users: []
|
||||
logs.groups: []
|
||||
topology.submitter.user: ciic
|
@ -0,0 +1,4 @@
|
||||
worker-id: 9837751b-8320-4651-b325-3c64898b976d
|
||||
logs.users: []
|
||||
logs.groups: []
|
||||
topology.submitter.user: ciic
|
32
code/Storm/storm-word-count/pom.xml
Normal file
32
code/Storm/storm-word-count/pom.xml
Normal file
@ -0,0 +1,32 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>com.heibaiying</groupId>
|
||||
<artifactId>storm-word-count</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<configuration>
|
||||
<source>8</source>
|
||||
<target>8</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.storm</groupId>
|
||||
<artifactId>storm-core</artifactId>
|
||||
<version>1.2.2</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
</project>
|
@ -0,0 +1,26 @@
|
||||
package com.heibaiying.wordcount;
|
||||
|
||||
import com.heibaiying.wordcount.component.CountBolt;
|
||||
import com.heibaiying.wordcount.component.DataSourceSpout;
|
||||
import com.heibaiying.wordcount.component.SplitBolt;
|
||||
import org.apache.storm.Config;
|
||||
import org.apache.storm.LocalCluster;
|
||||
import org.apache.storm.topology.TopologyBuilder;
|
||||
|
||||
public class WordCountApp{
|
||||
|
||||
public static void main(String[] args) {
|
||||
TopologyBuilder builder = new TopologyBuilder();
|
||||
builder.setSpout("DataSourceSpout", new DataSourceSpout());
|
||||
// 指明将 DataSourceSpout 的数据发送到 SplitBolt 中处理
|
||||
builder.setBolt("SplitBolt", new SplitBolt()).shuffleGrouping("DataSourceSpout");
|
||||
// 指明将 SplitBolt 的数据发送到 CountBolt 中 处理
|
||||
builder.setBolt("CountBolt", new CountBolt()).shuffleGrouping("SplitBolt");
|
||||
|
||||
// 创建本地集群用于测试 这种模式不需要本机安装storm,直接运行该Main方法即可
|
||||
LocalCluster cluster = new LocalCluster();
|
||||
cluster.submitTopology("LocalWordCountTopology",
|
||||
new Config(), builder.createTopology());
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,40 @@
|
||||
package com.heibaiying.wordcount.component;
|
||||
|
||||
import org.apache.storm.task.OutputCollector;
|
||||
import org.apache.storm.task.TopologyContext;
|
||||
import org.apache.storm.topology.OutputFieldsDeclarer;
|
||||
import org.apache.storm.topology.base.BaseRichBolt;
|
||||
import org.apache.storm.tuple.Tuple;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class CountBolt extends BaseRichBolt {
|
||||
|
||||
private Map<String, Integer> counts = new HashMap<>();
|
||||
|
||||
@Override
|
||||
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute(Tuple input) {
|
||||
String word = input.getStringByField("word");
|
||||
Integer count = counts.get(word);
|
||||
if (count == null) {
|
||||
count = 0;
|
||||
}
|
||||
count++;
|
||||
counts.put(word, count);
|
||||
// 输出
|
||||
System.out.print("当前实时统计结果:");
|
||||
counts.forEach((key, value) -> System.out.print(key + ":" + value + "; "));
|
||||
System.out.println();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void declareOutputFields(OutputFieldsDeclarer declarer) {
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,49 @@
|
||||
package com.heibaiying.wordcount.component;
|
||||
|
||||
import org.apache.storm.shade.org.apache.commons.lang.StringUtils;
|
||||
import org.apache.storm.spout.SpoutOutputCollector;
|
||||
import org.apache.storm.task.TopologyContext;
|
||||
import org.apache.storm.topology.OutputFieldsDeclarer;
|
||||
import org.apache.storm.topology.base.BaseRichSpout;
|
||||
import org.apache.storm.tuple.Fields;
|
||||
import org.apache.storm.tuple.Values;
|
||||
import org.apache.storm.utils.Utils;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
public class DataSourceSpout extends BaseRichSpout {
|
||||
|
||||
private List<String> list = Arrays.asList("Spark", "Hadoop", "HBase", "Storm", "Flink", "Hive");
|
||||
|
||||
private SpoutOutputCollector spoutOutputCollector;
|
||||
|
||||
@Override
|
||||
public void open(Map map, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) {
|
||||
this.spoutOutputCollector = spoutOutputCollector;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void nextTuple() {
|
||||
// 模拟产生数据
|
||||
String lineData = productData();
|
||||
spoutOutputCollector.emit(new Values(lineData));
|
||||
Utils.sleep(1000);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
|
||||
outputFieldsDeclarer.declare(new Fields("line"));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* 模拟数据
|
||||
*/
|
||||
private String productData() {
|
||||
Collections.shuffle(list);
|
||||
Random random = new Random();
|
||||
int endIndex = random.nextInt(list.size()) % (list.size()) + 1;
|
||||
return StringUtils.join(list.toArray(), "\t", 0, endIndex);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,35 @@
|
||||
package com.heibaiying.wordcount.component;
|
||||
|
||||
import org.apache.storm.task.OutputCollector;
|
||||
import org.apache.storm.task.TopologyContext;
|
||||
import org.apache.storm.topology.OutputFieldsDeclarer;
|
||||
import org.apache.storm.topology.base.BaseRichBolt;
|
||||
import org.apache.storm.tuple.Fields;
|
||||
import org.apache.storm.tuple.Tuple;
|
||||
import org.apache.storm.tuple.Values;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
public class SplitBolt extends BaseRichBolt {
|
||||
|
||||
private OutputCollector collector;
|
||||
|
||||
@Override
|
||||
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
|
||||
this.collector=collector;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute(Tuple input) {
|
||||
String line = input.getStringByField("line");
|
||||
String[] words = line.split("\t");
|
||||
for (String word : words) {
|
||||
collector.emit(new Values(word));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void declareOutputFields(OutputFieldsDeclarer declarer) {
|
||||
declarer.declare(new Fields("word"));
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user