/**********************************************************************
 *@Project： cloudDemo
 *@File: DemoProducer
 *@Date: 2017/4/13
 *@Copyright (C) 2013-2017 http://www.posun.cn. All Rights Reserved.
 ***********************************************************************
 *注意： 本内容仅限于深圳市普盛实业有限公司内部使用，禁止外泄
 **********************************************************************
 */
package com.posun.edu.system.mq;

import com.alibaba.fastjson.JSON;
import com.google.common.collect.Sets;
import com.posun.cloud.tracing.TraceUtil;
import com.posun.cloud.tracing.enums.LogType;
import com.posun.edu.system.entity.Dict;
import com.posun.edu.system.search.DictRepository;
import com.posun.edu.system.service.IDictService;
import com.posun.framework.dto.DataSourceResult;
import com.posun.framework.http.AresHttpClient;
import com.posun.framework.log.AresLog;
import com.posun.framework.log.LogBuilder;
import com.posun.framework.orm.domain.Searchable;
import com.posun.framework.orm.enums.Operator;
import com.posun.framework.orm.toolkit.IdWorker;
import com.posun.framework.util.DateUtil;
import org.apache.activemq.command.ActiveMQQueue;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jms.core.JmsMessagingTemplate;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.ProducerListener;
import org.springframework.scheduling.annotation.Async;
import org.springframework.scheduling.annotation.AsyncResult;
import org.springframework.scheduling.annotation.Scheduled;

import javax.jms.Queue;
import java.time.LocalDateTime;
import java.util.List;
import java.util.concurrent.Future;

/**
 * <p>
 * TODO<br>
 *
 * @author YL
 * @version 1.0
 * @createTime 2017/4/13 11:46
 * @ChangeLog
 */
@Configuration
@EnableKafka
public class DemoProducer {

    private static final String TOPIC_NAME = "demo-topic";

    private static final String TEST_TOPIC_NAME = "demo-test-topic";

    @Autowired
    private JmsMessagingTemplate jmsMessagingTemplate;

    @Autowired
    private IDictService dictService;

    @Autowired
    private KafkaTemplate<String, String> kafkaTemplate;

    @Autowired
    private DictRepository dictRepository;

    @Bean
    public Queue demoQueue() {
        try {
            String result = AresHttpClient.get().
                    url("https://test.oksales.net/eidpws/core/common/upgrade/posun/oksales.android").
                    build().
                    execute().asString();
            System.out.println(result);
        } catch (Exception e) {
            e.printStackTrace();
        }

        return new ActiveMQQueue("posun.cloud.demo");
    }

    @Scheduled(cron = "0 0/1 * * * ? ")//每3s执行1次
    public void send() throws Exception {
        TraceUtil.putAttachment(TraceUtil.TENANT_KEY, "test");
        Searchable<Dict> searchable = new Searchable(new Dict());
        searchable.setExcludeColumns(Sets.newHashSet("sysDict"));
        searchable.addSearch("id", Operator.PRE_LIKE, "85");
        searchable.setPage(1, 20);
        searchable.addAggregate("dictName", "count");
        List<Dict> dicts = dictService.selectList(searchable);
        System.out.println(JSON.toJSONString(dicts));
        DataSourceResult<Dict> dictList = dictService.selectResult(searchable);
        System.out.println(JSON.toJSONString(dictList));
        asyncSendMsg();

        Dict dict = new Dict();
        String id = String.valueOf(IdWorker.getId());
        dict.setId(id);
        dict.setDictName("test:" + DateUtil.getCurrentDateTime());
        dict.setVersion(0);
        dict.setServerFlag(0);

        boolean insertSuccess = dictService.insert(dict);
        System.out.println("insert successed:" + insertSuccess);

        //update
        Dict updateDict = dictService.selectById("853158441487749120");
        updateDict.setDeleteTime(LocalDateTime.now());

        boolean updateSuccess = dictService.update(updateDict);
        System.out.println("update successed:" + updateSuccess);


        dictService.delete(id);

        Dict selectDict = dictService.selectById(id);
        if (selectDict != null) {
            JSON.toJSONString(selectDict);
        }

        sendKafka();

        //elasticsearch测试
        dictRepository.save(dicts);

        System.out.println(dictRepository.findByDictName("MYTEST"));

        QueryBuilder queryBuilder = QueryBuilders.queryStringQuery("豹子");
        Iterable<Dict> iterable = this.dictRepository.search(queryBuilder);
        iterable.forEach(d -> {
            System.out.println(JSON.toJSONString(d));
        });

        //jmsMessagingTemplate.convertAndSend(demoQueue(), "hi,i'm demo msg!" + IdWorker.getId());
    }


    public void sendKafka() {
        kafkaTemplate.send(TOPIC_NAME, "hello,ares!");
        kafkaTemplate.send(TEST_TOPIC_NAME, "hello,ares developer!");

        kafkaTemplate.metrics();

        kafkaTemplate.execute(producer -> {
            //这里可以编写kafka原生的api操作
            return null;
        });

        //消息发送的监听器，用于回调返回信息
        kafkaTemplate.setProducerListener(new ProducerListener<String, String>() {
            @Override
            public void onSuccess(String topic, Integer partition, String key, String value,
                                  RecordMetadata recordMetadata) {
                System.out.println("successed! topic:" + topic + ",partition:" + partition + ",key:" + key + ",value:" + value);
            }

            @Override
            public void onError(String topic, Integer partition, String key, String value, Exception exception) {
                System.out.println("error!topic:" + topic + ",partition:" + partition + ",key:" + key + ",value:" + value);
            }

            @Override
            public boolean isInterestedInSuccess() {
                return false;
            }
        });
    }


    @KafkaListener(topics = {TOPIC_NAME, TEST_TOPIC_NAME})
    public void processMessage(String content) {
        System.out.println(content);
    }


    @Async("updateDict")
    public Future<String> updateDict(String id) throws InterruptedException {
        Dict updateDict = dictService.selectById(id);
        updateDict.setDeleteTime(LocalDateTime.now());
        try {
            Thread.sleep(500);
        } catch (Exception e) {
        }
        boolean updateSuccess = dictService.update(updateDict);
        System.out.println("update successed:" + updateSuccess);
        return new AsyncResult<>("Task2 accomplished!");
    }

    @Async("asyncTask")
    public Future<String> asyncSendMsg() throws InterruptedException {
        long start = System.currentTimeMillis();
        Thread.sleep(000);
        long end = System.currentTimeMillis();
        AresLog.info(new LogBuilder(DemoProducer.class, LogType.OTHER, "Task1 finished, time elapsed: " + (end - start) + " ms."));
        return new AsyncResult<>("Task1 accomplished!");
    }

}
