package com.loong.storm.trident;

import com.loong.commen.util.LogUtil;
import com.loong.storm.students.WordCountStormTopology;
import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.shade.org.apache.commons.io.FileUtils;
import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.IMetricsContext;
import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.IRichSpout;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.TopologyBuilder;
import org.apache.storm.topology.base.BaseRichBolt;
import org.apache.storm.topology.base.BaseRichSpout;
import org.apache.storm.trident.Stream;
import org.apache.storm.trident.TridentState;
import org.apache.storm.trident.TridentTopology;
import org.apache.storm.trident.operation.builtin.Count;
import org.apache.storm.trident.state.State;
import org.apache.storm.trident.state.StateFactory;
import org.apache.storm.trident.testing.Split;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Tuple;
import org.apache.storm.tuple.Values;

import java.io.File;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * Created by 梁浩峰 on 2016/8/13 22:55.
 */
public class TridentTopologyLocalWordCount {
    public static class DataSourceSpout extends BaseRichSpout {
        private Map map;
        private TopologyContext topologyContext;
        private SpoutOutputCollector spoutOutputCollector;
        /**
         * 本实例运行时执行一次，初始化信息
         * @param map 配置参数
         * @param topologyContext
         * @param spoutOutputCollector 发射器，往下一步发射数据
         */
        public void open(Map map, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) {
            this.map=map;
            this.topologyContext=topologyContext;
            this.spoutOutputCollector=spoutOutputCollector;
        }

        /**
         * 死循环调用
         */

        public void nextTuple() {
            try {
                //读取指定目录下所有文件
                Collection<File> files = FileUtils.listFiles(new File("d:/test"), new String[]{"txt"}, true);//路径、后缀、递归读取
                for (File file:files
                        ) {
                    //获取每年每个文件数据
                    List<String> lines = FileUtils.readLines(file);
                    //把每一行数据发射出去
                    for (String line:lines
                            ) {
                        this.spoutOutputCollector.emit(new Values(line));
                    }

                }

            } catch (Exception e) {
                e.printStackTrace();
            }
        }

        /**
         * 声明输出内容
         * @param outputFieldsDeclarer
         */
        public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
            outputFieldsDeclarer.declare(new Fields("line"));
        }
    }


    public static class SplitBolt extends BaseRichBolt {
        private Map map;
        private TopologyContext topologyContext;
        private OutputCollector outputCollector;

        public void prepare(Map map, TopologyContext topologyContext, OutputCollector outputCollector) {
            this.map=map;
            this.topologyContext=topologyContext;
            this.outputCollector=outputCollector;
        }



        /**
         * 死循环读取数据
         * @param tuple
         */
        public void execute(Tuple tuple) {
            //获取每一行数据
            String line = tuple.getStringByField("line");
            String[] words=null;

            //对数据切割
            if(line!=null) {
                words = line.split(" ");
            }


            //发射每个单词
            if (words!=null&&words.length!=0)for (String word:words
                    ) {
                LogUtil.log(word);
                this.outputCollector.emit(new Values(word)) ;
            }
        }

        /**
         * 声明字段
         * @param outputFieldsDeclarer
         */
        public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
            outputFieldsDeclarer.declare(new Fields("word"));

        }
    }

    public static class CountBolt extends BaseRichBolt{
        private Map map;
        private TopologyContext topologyContext;
        private OutputCollector outputCollector;

        public void prepare(Map map, TopologyContext topologyContext, OutputCollector outputCollector) {
            this.map=map;
            this.topologyContext=topologyContext;
            this.outputCollector=outputCollector;
        }


        Map<String,Long> hashMap=new HashMap<String,Long>();

        /**
         * 死循环读取数据
         * @param tuple
         */
        public void execute(Tuple tuple) {
            //获取每一个单词
            String word = tuple.getStringByField("word");
            //对所有单词汇总
            Long value = hashMap.get(word);
            if (value==null){
                value=0l;
            }
            value++;
            hashMap.put(word,value);
            //打印结果
            LogUtil.log("==========================================");
            for (Map.Entry<String,Long> en:hashMap.entrySet()
                    ) {
                LogUtil.log(en);
            }
        }

        /**
         * 声明字段
         * @param outputFieldsDeclarer
         */
        public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
        }
    }

    public  void main(String[] args){
        TridentTopology topology = new TridentTopology();
        IRichSpout spout=null;
        TridentState stream = topology.newStream("spout_id", spout)
                .each(new Fields(""),new Split(),new Fields(""))
                .groupBy(new Fields(""))
                .persistentAggregate(new StateFactory() {
                    public State makeState(Map map, IMetricsContext iMetricsContext, int i, int i1) {
                        return null;
                    }
                },new Count(),new Fields())
                .parallelismHint(6);


        TopologyBuilder topologyBuilder = new TopologyBuilder();
        topologyBuilder.setSpout("spout_id",new WordCountStormTopology.DataSourceSpout());
        topologyBuilder.setBolt("split_bold_id",new WordCountStormTopology.SplitBolt()).shuffleGrouping("spout_id");
        topologyBuilder.setBolt("count_bold_id",new WordCountStormTopology.CountBolt()).shuffleGrouping("split_bold_id");

        LocalCluster localCluster = new LocalCluster();
        localCluster.submitTopology("topology",new Config(),topologyBuilder.createTopology());

    }
}
