package com.tycmc.storm.bolt;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.tycmc.bean.InputObject;
import com.tycmc.common.AbstractCalculation;
import com.tycmc.common.log.AppendLog;
import com.tycmc.common.util.ListUtil;
import com.tycmc.dao.VehicleDao;
import com.tycmc.kafka.CreateKafkaProducer;
import com.tycmc.common.util.PrintException;
import com.tycmc.common.util.Property;
import com.tycmc.common.dynamicloader.DynamicClassLoadUtil;
import com.tycmc.system.AppContext;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseRichBolt;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Tuple;
import org.apache.storm.tuple.Values;
import ty.pub.TransPacket;

import java.util.HashMap;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;

public class ExecuteBolt extends BaseRichBolt {
    private OutputCollector collector;
    /**
     * kafka生产者（日志信息生产者）
     */
    private KafkaProducer<String, String> logProducer;
    private static AppendLog appendLog = new AppendLog();
    //线程安全的map
    private static Map<String, Object> mapCash = new ConcurrentHashMap<String, Object>();

    static {
        AppContext.getSpringContext();
        //初始化mapCash
        mapCash = new VehicleDao().getLastMsgTime();
        //开启线程，每10分钟执行一次
        /*String minus = Property.getProperty("threadExecTime");
        Thread tr = new Thread(new Runnable() {
            @Override
            public void run() {
                while (true) {
                    try {
                        Thread.sleep(Long.valueOf(String.valueOf((int) (Double.valueOf(minus) * 60 * 1000))));
                        //Thread.sleep(10000);
                        //写入数据
                        new VehicleDao().insertData(mapCash);
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                    }
                }
            }
        });
        tr.start();*/
    }

    @Override
    public void prepare(Map map, TopologyContext topologyContext, OutputCollector outputCollector) {
        collector = outputCollector;
        logProducer = new CreateKafkaProducer().initDaily();

    }

    @Override
    public void execute(Tuple tuple) {
        try {
            System.out.println("ExecuteBolt拓扑:"+tuple);
            TransPacket transPacket = (TransPacket) tuple.getValueByField("transPacket");
            InputObject inputObject = new InputObject();
            inputObject.setTransPacket(transPacket);
            Map<String, Object> sqlResultMap = new HashMap<>();
            String sqlJson = Property.getProperty("sqlConfig");
            JSONArray jsonArray = JSON.parseArray(sqlJson);
            VehicleDao vehicleDao = new VehicleDao();
            for (int i = 0; i < jsonArray.size(); i++) {
                JSONObject jsonObject = jsonArray.getJSONObject(i);
                String sql = jsonObject.getString("sqlstr");
                List<Map<String, Object>> list = vehicleDao.vehicleList(sql);
                Map<String, List<Map<String, Object>>> map = ListUtil.groupBy(jsonObject.getString("groupColumn"), list);
                sqlResultMap.put(jsonObject.getString("mapkey"), map);
            }
            inputObject.setSqlResultMap(sqlResultMap);
            inputObject.setCacheMap(mapCash);
            AbstractCalculation calculation = (AbstractCalculation) DynamicClassLoadUtil.getLoader().loadClass(Property.getProperty("jarName"), "com.main.UserFunction");
            String resultJson = (String) calculation.run(inputObject);
            // String resultJson = (String) calculation.run(  new InputObject());

            collector.emit("bolt_resultOutput", new Values(resultJson));
        } catch (Exception e) {
            String stackTrace = PrintException.getStackTrace(e);
            logProducer.send(new ProducerRecord<>(Property.getProperty("producerDaily.topic"), appendLog.appendDaily("0", System.currentTimeMillis(), "ExecuteBolt.execute:" + stackTrace)));
        }
    }

    @Override
    public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
        outputFieldsDeclarer.declareStream("bolt_resultOutput", new Fields("resultJson"));
    }
}
