package com.tico.editor.editors.service.impl;

import com.alibaba.druid.support.json.JSONUtils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.serializer.SerializerFeature;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringSerializer;
import com.tico.common.service.impl.AbstractService;
import com.tico.common.utils.BaseQuery;
import com.tico.common.utils.JsonUtils;
import com.tico.common.utils.QueryResult;
import com.tico.editor.editors.domain.HttpEditor;
import com.tico.editor.editors.domain.PubilcData;
import com.tico.editor.editors.mapper.HttpEditorMapper;
import com.tico.editor.editors.mapper.PubilcDataMapper;
import com.tico.editor.editors.service.HttpEditorService;
import com.tico.editor.editors.service.PubilcDataService;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.nutz.lang.segment.CharSegment;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;

import java.io.Serializable;
import java.util.*;
import java.util.concurrent.ConcurrentLinkedQueue;

/**
 */
@Service
public class PubilcDataServiceImpl extends AbstractService<PubilcData> implements PubilcDataService {


   @Autowired
   PubilcDataMapper pubilcDataMapper;
   @Autowired
   HttpEditorMapper httpEditorMapper;
   @Autowired
   private HttpEditorService httpEditorService;

   @Override
   public PubilcData findById(Serializable id) {
     return pubilcDataMapper.findById(id);
   }

    @Override
    public int insertBatch(List<PubilcData> pubilcDatas) {
      return pubilcDataMapper.insertBatch(pubilcDatas);
    }

    @Override
    public int insert(PubilcData pubilcData) {
      return pubilcDataMapper.insert(pubilcData);
    }
    @Override
    public List<PubilcData > findByNamedParamList(Map<String,Object> param){
        return pubilcDataMapper.findByNamedParamList(param);
    }

    @Override
    public String modelHttp(String httpId, String key) {
        HttpEditor httpEditor = httpEditorMapper.findById(httpId);
        JSONObject postman = httpEditorService.postman(httpEditor);
        return JsonUtils.keyValue(key, JSON.toJSONString(postman));
    }
    @Override
    public String translate(String str) {
        CharSegment url =  new CharSegment(str);
        List<PubilcData> pubilcData = pubilcDataMapper.findByNamedParamList(null);
        for(PubilcData p:pubilcData){
            String s = modelHttp(p.getHttpId(), p.getPublicKey());
            url.set(p.getPublicKey(),s);
        }
        return url.toString();
    }
    @Override
    public int delete(Serializable id) {
      return pubilcDataMapper.delete(id);
    }

    @Override
    public int remove(Serializable id) {
      return pubilcDataMapper.remove(id);
    }

    @Override
    public QueryResult<PubilcData> findPageInfo(BaseQuery baseQuery) {
        QueryResult<PubilcData> result = new QueryResult<PubilcData>();
        result.setQuery(baseQuery);
        Map
        <String, Object> params = result.getQuery().build();
        Integer amount = this.pubilcDataMapper.countPageInfo(params);
        result.setTotalRecord(amount);
        if (amount == 0) {
        return result;
        }
        List<PubilcData> list = pubilcDataMapper.findPageInfo(params);
        if (!CollectionUtils.isEmpty(list)) {
        result.setResultList(list);

        }
        return result;
    }
    public static String topic = "duanjt_test";//定义主题

//    public static void main(String[] args) throws InterruptedException {
//        Properties p = new Properties();
//        p.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "172.16.16.9:9092");//kafka地址，多个地址用逗号分割
//        p.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
//        p.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
//        KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(p);
//
//        try {
//            while (true) {
//                String msg = "Hello," + new Random().nextInt(100);
//                ProducerRecord<String, String> record = new ProducerRecord<String, String>(topic, msg);
//                kafkaProducer.send(record);
//                System.out.println("消息发送成功:" + msg);
//                Thread.sleep(500);
//            }
//        } finally {
//            kafkaProducer.close();
//        }
//    }
public static void main(String[] args) {
    final ConcurrentLinkedQueue<String> subscribedTopics = new ConcurrentLinkedQueue<>();

    // 创建另一个测试线程，启动后首先暂停10秒然后变更topic订阅
    Runnable runnable = new Runnable() {
        @Override
        public void run() {
            try {
                Thread.sleep(20000);
            } catch (InterruptedException e) {
                // swallow it.
            }
            // 变更为订阅topic： btopic， ctopic
            subscribedTopics.addAll(Arrays.asList("stopic","atopic", "dtopic"));
        }
    };
    new Thread(runnable).start();

    Properties props = new Properties();
    props.put("bootstrap.servers", "172.16.16.9:9092");
    props.put("group.id", "my-group1");
    props.put("auto.offset.reset", "earliest");
    props.put("enable.auto.commit", "true");
    props.put("auto.commit.interval.ms", "1000");
    props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
    props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
    KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);

    // 最开始的订阅列表：atopic、btopic
    consumer.subscribe(Arrays.asList("atopic", "btopic","ctopic"));
    while (true) {
        ConsumerRecords<String, String> records = consumer.poll(10000); //表示每2秒consumer就有机会去轮询一下订阅状态是否需要变更
        // 本例不关注消息消费，因此每次只是打印订阅结果！
        System.out.println(consumer.subscription());
        if (!subscribedTopics.isEmpty()) {
            Iterator<String> iter = subscribedTopics.iterator();
            List<String> topics = new ArrayList<>();
            while (iter.hasNext()) {
                topics.add(iter.next());
            }
            subscribedTopics.clear();
            consumer.subscribe(topics); // 重新订阅topic
        }
            /**
             * 消费者必须持续对Kafka进行轮询，否则会被认为已经死亡，他的分区会被移交给群组里的其他消费者。
             * poll返回一个记录列表，每个记录包含了记录所属主题的信息，
             * 记录所在分区的信息，记录在分区里的偏移量，以及键值对。
             * poll需要一个指定的超时参数，指定了方法在多久后可以返回。
             * 发送心跳的频率，告诉群组协调器自己还活着。
             */
            for (ConsumerRecord<String, String> record : records) {
                //Thread.sleep(1000);
                System.out.println("########################################################################################################");
                System.out.printf("offset = %d, value = %s", record.offset(), record.value(),record.topic());
                System.out.println("########################################################################################################");
            }
    }
    // 本例只是测试之用，使用了while(true)，所以这里没有显式关闭consumer
//        consumer.close();
}
}
