package com.loong.storm.students;

import com.loong.commen.util.LogUtil;
import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.shade.org.apache.commons.io.FileUtils;
import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.TopologyBuilder;
import org.apache.storm.topology.base.BaseRichBolt;
import org.apache.storm.topology.base.BaseRichSpout;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Tuple;
import org.apache.storm.tuple.Values;

import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * storm:单词计数
 * Created by 梁浩峰 on 2016/8/13 12:19.
 */
public class WordCountStormTopology implements Serializable{
    public static class DataSourceSpout extends BaseRichSpout implements Serializable{
        private Map map;
        private TopologyContext topologyContext;
        private SpoutOutputCollector spoutOutputCollector;
        /**
         * 本实例运行时执行一次，初始化信息
         * @param map 配置参数
         * @param topologyContext
         * @param spoutOutputCollector 发射器，往下一步发射数据
         */
        public void open(Map map, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) {
            this.map=map;
            this.topologyContext=topologyContext;
            this.spoutOutputCollector=spoutOutputCollector;
        }

        /**
         * 死循环调用
         */

        public void nextTuple() {
            try {
            //读取指定目录下所有文件
            Collection<File> files = FileUtils.listFiles(new File("d:/test"), new String[]{"txt"}, true);//路径、后缀、递归读取
            for (File file:files
                 ) {
                //获取每年每个文件数据
                List<String> lines = FileUtils.readLines(file);
                //把每一行数据发射出去
                for (String line:lines
                     ) {
                    this.spoutOutputCollector.emit(new Values(line));
                }

            }

            } catch (Exception e) {
                e.printStackTrace();
            }
        }

        /**
         * 声明输出内容
         * @param outputFieldsDeclarer
         */
        public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
            outputFieldsDeclarer.declare(new Fields("line"));
        }
    }


    public static class SplitBolt extends BaseRichBolt implements Serializable{
        private Map map;
        private TopologyContext topologyContext;
        private OutputCollector outputCollector;

        public void prepare(Map map, TopologyContext topologyContext, OutputCollector outputCollector) {
            this.map=map;
            this.topologyContext=topologyContext;
            this.outputCollector=outputCollector;
        }



        /**
         * 死循环读取数据
         * @param tuple
         */
        public void execute(Tuple tuple) {
            //获取每一行数据
            String line = tuple.getStringByField("line");
            String[] words=null;

            //对数据切割
            if(line!=null) {
                words = line.split(" ");
            }


            //发射每个单词
            if (words!=null&&words.length!=0)for (String word:words
                 ) {
                LogUtil.log(word);
               this.outputCollector.emit(new Values(word)) ;
            }
        }

        /**
         * 声明字段
         * @param outputFieldsDeclarer
         */
        public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
            outputFieldsDeclarer.declare(new Fields("word"));

        }
    }

    public static class CountBolt extends BaseRichBolt implements Serializable {
        private Map map;
        private TopologyContext topologyContext;
        private OutputCollector outputCollector;

        public void prepare(Map map, TopologyContext topologyContext, OutputCollector outputCollector) {
            this.map=map;
            this.topologyContext=topologyContext;
            this.outputCollector=outputCollector;
        }


        Map<String,Long> hashMap=new HashMap<String,Long>();

        /**
         * 死循环读取数据
         * @param tuple
         */
        public void execute(Tuple tuple) {
            //获取每一个单词
            String word = tuple.getStringByField("word");
            //对所有单词汇总
            Long value = hashMap.get(word);
            if (value==null){
                value=0l;
            }
            value++;
            hashMap.put(word,value);
            //打印结果
            LogUtil.log("==========================================");
            for (Map.Entry<String,Long> en:hashMap.entrySet()
                 ) {
                LogUtil.log(en);
            }
        }

        /**
         * 声明字段
         * @param outputFieldsDeclarer
         */
        public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
        }
    }

    public  void main(String[] args){
        TopologyBuilder topologyBuilder = new TopologyBuilder();
        topologyBuilder.setSpout("spout_id",new DataSourceSpout());
        topologyBuilder.setBolt("split_bold_id",new SplitBolt()).shuffleGrouping("spout_id");
        topologyBuilder.setBolt("count_bold_id",new CountBolt()).shuffleGrouping("split_bold_id");

        LocalCluster localCluster = new LocalCluster();
        localCluster.submitTopology("topology",new Config(),topologyBuilder.createTopology());

    }

}
