package com.wis.bolt.to_kafka;

import com.wis.TopologyDrive;
import com.wis.pool.kafka.KafkaPool;
import com.wis.pool.kafka.KafkaProducerApp;
import org.apache.commons.lang.StringUtils;
import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseRichBolt;
import org.apache.storm.tuple.Tuple;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.util.Map;
import java.util.Properties;

/**
 * @Description: traffic_fan    json指标数据写入kafka中
 * @author: fan
 * @Date: Created in 2018/10/18 15:25
 * @Modified By:
 */
public class ToBeanKafkaBolt extends BaseRichBolt {
    private OutputCollector collector;
    private KafkaPool<String, String> pool = null;
    private String topic = null;//kafka中的主题
    // private static BufferedWriter bufferedWriter = null;

    @Override
    public void prepare(Map map, TopologyContext context, OutputCollector collector) {
        this.collector = collector;
        // Properties properties = new Utils().getProperties();
        // 获取kafka生产者的连接
        String kafka_producer_server = map.get(TopologyDrive.KAFKA_PRODUCER_SERVER).toString();
        // 初始化连接池
        initPool(kafka_producer_server);
        // kafka生产者的topic
        topic = map.get(TopologyDrive.KAFKA_PRODUCER_BEAN_TOPIC).toString();
        // try {
        //     bufferedWriter = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(new File("d:\\wis_work\\data\\test2.txt")), "utf-8"));
        //     bufferedWriter.write("{\"type\": \"FeatureCollection\",\"features\": [");
        // } catch (Exception e) {
        //     e.printStackTrace();
        // }
    }

    @Override
    public void execute(Tuple tuple) {
        KafkaProducerApp<String, String> producer = null;
        try {
            //"key", "id", "nengjiandu", "fengsu", "jiangshuiliang", "jiangshuixiangtai", "qiwen", "xiangduishidu", "daolujiebing"));
            String keyLine = tuple.getStringByField("key");
            String njd = tuple.getStringByField("nengjiandu");
            producer = pool.borrowProducer();
            // 将数据写到kafka中
            if (njd.contains("jies")) {
                producer.send(topic, keyLine.substring(6, 18) + "-" + "over", keyLine.substring(6, 18) + "-" + "over");
                // System.out.println("---------+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++------");
            } else if (StringUtils.isNotBlank(njd) && !njd.contains("over")) {
                // vb + "," + ws + "," + pp + "," + at + "," + ri
                String line = njd + "," + tuple.getStringByField("fengsu") + "," + tuple.getStringByField("jiangshuiliang") + "," + tuple.getStringByField("jiangshuixiangtai") + "," + tuple.getStringByField("qiwen") + "," + tuple.getStringByField("daolujiebing");
                // bufferedWriter.write(keyLine.substring(6, 18) + "-" + tuple.getStringByField("id")+"----"+line);
                // bufferedWriter.write("\n");
                // bufferedWriter.flush();
                producer.send(topic, keyLine.substring(6, 18) + "-" + tuple.getStringByField("id"), line);
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            // 写完数据后需要将连接点还到连接池中
            pool.returnProducer(producer);
            this.collector.ack(tuple);
        }
    }

    @Override
    public void declareOutputFields(OutputFieldsDeclarer declarer) {

    }

    @Override
    public void cleanup() {
        // 资源关闭
        if (pool != null) {
            pool.close();
        }
        super.cleanup();
    }

    private void initPool(String server) {
        if (StringUtils.isNotBlank(server)) {
            Properties props = new Properties();
            //将生产者的连接设置到properties中
            props.put("bootstrap.servers", server);
            props.put("acks", "all");
            props.put("retries", 0);
            props.put("batch.size", 16384);
            props.put("linger.ms", 1);
            props.put("buffer.memory", 33554432);
            props.put("key.serializer",
                    "org.apache.kafka.common.serialization.StringSerializer");
            props.put("value.serializer",
                    "org.apache.kafka.common.serialization.StringSerializer");

            pool = new KafkaPool<String, String>(props);
        }
    }
}
