主方法
public class CountToMain {
public static void main(String[] args) {
String topic="dsj03";//主题
String zkRoot="/dsj";//节点
String spountId="kagkaSpount";//名字,这个随便取的
BrokerHosts zkHosts = new ZkHosts("zjgm01:2181,zjgm02:2181,zjgm03:2181");
TopologyBuilder builder= new TopologyBuilder();
SpoutConfig conf=new SpoutConfig(zkHosts,topic,zkRoot,spountId);
conf.forceFromStart=true;//重头开始读取
conf.scheme=new SchemeAsMultiScheme(new MessageScheme());//格式
builder.setSpout(spountId,new KafkaSpout(conf));
builder.setBolt("readBolt",new ReadBolt()).shuffleGrouping(spountId);//知道上一个下一个的信息,并传递。
builder.setBolt("writeCountBolt",new WriteCountBolt()).shuffleGrouping("readBolt");
LocalCluster cluster=new LocalCluster();
Config conf1=new Config();
conf1.setNumWorkers(4);
cluster.submitTopology("count",conf1,builder.createTopology());//一切都是为了设置topology
}
}
读数据
public class ReadBolt extends BaseBasicBolt {
@Override
public void execute(Tuple tuple, BasicOutputCollector basicOutputCollector) {
String s = tuple.getString(0);//进入后一行一行的读写代码这里读0 第一行就好了
int index=s.indexOf("user Phone :");//查是否含有,不含有返回-1
if (index!=-1){
String s1 = s.substring(index + 12);//第几个值开始取,可以设置最后读到第几位,不写就是默认到最后
basicOutputCollector.emit(new Values(s1));
}
}
@Override
public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
outputFieldsDeclarer.declare(new Fields("s"));
}
}
写数据
public class WriteCountBolt extends BaseBasicBolt {
FileWriter fileWriter=null;
Map<String,Integer> map=null;
@Override
public void prepare(Map stormConf, TopologyContext context) {//这个prepare是初始化的设置,进来第一次会执行,防止多次新建。
try {
fileWriter=new FileWriter("d:\\storm\\yiju"+ UUID.randomUUID().toString());
map=new HashMap<>();
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public void execute(Tuple tuple, BasicOutputCollector basicOutputCollector) {
String s = tuple.getString(0);
Integer integer=map.get(s);
if (integer==null){
map.put(s,1);
}
else {
Integer i=map.get(s);
map.put(s,i+1);
}
try {
fileWriter.write(s+"登入"+map.get(s)+"次");
fileWriter.write("\n");
fileWriter.flush();
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
}
}
设置kafka传进来的格式 不然无法读取
public class MessageScheme implements Scheme {
@Override
public List<Object> deserialize(byte[] bytes) {
String msg=new String(bytes);
return new Values(msg);
}
@Override
public Fields getOutputFields() {
return new Fields("msg");
}
}