package cn.tianyigps.m7.netty;


import cn.tianyigps.m7.beans.Equipment;
import cn.tianyigps.m7.template.KafkaTemplate;
import cn.tianyigps.m7.template.MongoTemplate;
import cn.tianyigps.m7.utils.MessageCoding;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.mongodb.client.MongoCollection;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.util.ReferenceCountUtil;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.bson.Document;

import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Optional;



/**
 * Created with IDEA
 *
 * @description: 设备处理类
 * @author: cui.guibin
 * @create: 2019/3/7 10:20
 **/
public class EquipmentHandler extends ChannelInboundHandlerAdapter {

    Logger log = LogManager.getLogger(EquipmentHandler.class);
    @Override
    public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {

        try {
            TYChannelPipeline pipeline = (TYChannelPipeline) ctx.pipeline();
            Equipment equipment = pipeline.getEquipment();
            byte[] bytes = MessageCoding.getBytes((ByteBuf) msg);
            //获取标识位置
            String strData = MessageCoding.bytesToHexString(bytes).toUpperCase();
            List<Document> documents = equipment.analysisPackageType(strData);
            if (!Optional.ofNullable(documents).isPresent() || documents.isEmpty())
                return;
            Date date = new Date();
            DateFormat dateFormat = new SimpleDateFormat("yyyy-MM");
            String format = dateFormat.format(date);
            MongoCollection collection = MongoTemplate.getCollection("m7StandardModel-" + format);
            ObjectMapper objectMapper = new ObjectMapper();
            objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
            String json = objectMapper.writeValueAsString(documents);
            KafkaProducer<String, String> producer = KafkaTemplate.producer();
            producer.send(new ProducerRecord<String, String>("m7_inbound",json));
            producer.close();
            collection.insertMany(documents);
            //转换成json字符串，再转换成DBObject对象
            log.info(json);
        } catch (JsonProcessingException e) {
            e.printStackTrace();
        } finally {
            ReferenceCountUtil.release(msg);
        }
//        byteBuf.clear();

    }

    @Override
    public void channelReadComplete(ChannelHandlerContext ctx) throws Exception {
        super.channelReadComplete(ctx);
    }

    @Override
    public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
        TYChannelPipeline pipeline = (TYChannelPipeline) ctx.pipeline();
        Equipment equipment = pipeline.getEquipment();
        if (Optional.ofNullable(equipment).isPresent())
            log.error("M7设备: "+ equipment.getEquipmentId() +"   type： "+ equipment.getType() +" 报错");
        super.exceptionCaught(ctx, cause);
    }

}
