package com.xiguaji.tiktok.task.hashtag.service;

import cn.hutool.core.date.DateUtil;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.gugee.tiktok.common.model.dal.blogger.BloggerLogRepository;
import com.gugee.tiktok.common.model.dal.blogger.BloggerTypeRepository;
import com.gugee.tiktok.common.model.dal.hashtag.HashtagRepository;
import com.gugee.tiktok.common.model.dto.Aweme4HashtagDto;
import com.gugee.tiktok.common.model.dto.DistributionRecord;
import com.gugee.tiktok.common.model.mysql.BloggerLog;
import com.gugee.tiktok.common.model.mysql.BloggerType;
import com.gugee.tiktok.common.model.mysql.Hashtag;
import com.gugee.tiktok.data.common.msg.HashtagMessage;
import com.gugee.tiktok.data.common.msg.MusicMessage;
import com.xiguaji.boot.toolkit.JsonUtil;
import com.xiguaji.tiktok.task.common.config.AppConfig;
import com.xiguaji.tiktok.task.common.job.TaskParam;
import com.xiguaji.tiktok.task.common.producer.AbstractProducerHandler;
import com.xiguaji.tiktok.task.music.service.MusicService;
import com.xxl.job.core.log.XxlJobLogger;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;

import javax.annotation.PostConstruct;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.time.LocalDateTime;
import java.util.*;
import java.util.concurrent.TimeUnit;

/**
 * Created with IntelliJ IDEA.
 * Description:
 * User: luolifeng
 * Date: 2020-10-15 15:16
 */
@Service
@Slf4j
public class HashtagService extends AbstractProducerHandler<HashtagMessage> {

    public static String CHALLENGE_INDEX = "challenge_index";

    public static String AWEME_INDEX = "aweme_index_";

    @Autowired
    @Qualifier("kafkaTemplateHashtag")
    private KafkaTemplate<Long, HashtagMessage> kafkaTemplateHashtag;

    @Autowired
    RestHighLevelClient client;

    @Autowired
    BloggerLogRepository bloggerLogRepository;

    @Autowired
    HashtagRepository hashtagRepository;

    @Autowired
    RedisTemplate<String,String> redisTemplate;

    @Autowired
    BloggerTypeRepository bloggerTypeRepository;

    List<BloggerType> bloggerTypes = new ArrayList<>();
    @Autowired
    AppConfig appConfig;
    @PostConstruct
    public void initConstruct(){
        BloggerType bloggerType = new BloggerType();
        bloggerType.setId(0);
        bloggerType.setName("ALL");
        this.bloggerTypes.add(bloggerType);
        this.bloggerTypes.addAll(bloggerTypeRepository.getBloggerTypeByIsDisplay(true));
//        scheduleTemplate.attach(topBloggerMain, forkJoinPool);
    }

    @Override
    /**
     * todo 根据资源负荷，以及标签再多个视频中的重复率，决定是否需要过滤掉已经更新的标签消息
     */
    public List<HashtagMessage> findMessageObject(BloggerLog bloggerLog, TaskParam taskParam) {
        List<Aweme4HashtagDto> aweme4HashtagDtos= hashtagRepository.getHashtagsTopOrderByScore((bloggerLog.getCurrentBloggerId()-1)*taskParam.getRecords(),taskParam.getRecords());
        Set<Hashtag> rst=new HashSet<>(aweme4HashtagDtos.size());

        aweme4HashtagDtos.forEach(textExtras->{
            try {
                if (!StringUtils.isEmpty(textExtras)) {
                    final ObjectMapper mapper = new ObjectMapper();
                    List<Hashtag> hashtagList = mapper.readValue(textExtras.getOTextExtra(), new TypeReference<List<Hashtag>>(){});
                    rst.addAll(new HashSet<>(hashtagList));
                }
            } catch (Exception ex) {
                System.out.println(ex.getMessage());
            }
        });

        List<HashtagMessage>  hashtagMessageList=new ArrayList<>(rst.size());
        rst.forEach(hashtag -> {
            HashtagMessage hashtagMessage= new HashtagMessage();
            hashtagMessage.setHid(hashtag.getHid());
            hashtagMessageList.add(hashtagMessage);
        });
        return hashtagMessageList;
    }

    @Override
    public void sendMsg(List<HashtagMessage> messages,String topic) {
        messages.forEach(msg->{
            msg.setRetryTimes(appConfig.getRetryHashtagTimes());
            msg.setRetryLimitTimes(appConfig.getRetryHashtagTimes());
            kafkaTemplateHashtag.send(topic,msg);
        });
        XxlJobLogger.log("投递主题={},投递消息数={}", topic,messages.size());
        log.info("投递主题={},投递消息数={}", topic,messages.size());
    }

    private String getAwemeEsIndex(){
        LocalDateTime now = LocalDateTime.now();
        StringBuffer stringBuffer = new StringBuffer(AWEME_INDEX+ DateUtil.format(now,"yyyyMM")).append(",");
        stringBuffer.append(AWEME_INDEX+DateUtil.format(now.minusMonths(1),"yyyyMM")).append(",");
        stringBuffer.append(AWEME_INDEX+DateUtil.format(now.minusMonths(2),"yyyyMM"));
        return stringBuffer.toString();
    }

    public void setDistribution(){
        Set<Long> cidList = getHashTag(0l);

        cidList.parallelStream().forEach( cid ->{
            setDistributionToRedis(cid,1);
            setDistributionToRedis(cid,2);
        });
    }


    private Set<Long> getHashTag(Long categoryId){
        Set<Long> cidList = new HashSet<>();
        SearchRequest searchRequest = new SearchRequest(HashtagService.CHALLENGE_INDEX);
        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();

        if(null != categoryId  && categoryId > 0l){
            searchSourceBuilder.query(QueryBuilders.nestedQuery("categories",QueryBuilders.matchQuery("categories.id",categoryId), ScoreMode.Total));
        }
        searchSourceBuilder.size(100);
        searchSourceBuilder.sort("userGrowth", SortOrder.DESC);
        searchRequest.source(searchSourceBuilder);
        SearchResponse searchResponse = null;
        try {
            searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
        } catch (IOException e) {
            e.printStackTrace();
        }

        val hits = searchResponse.getHits().getHits();

        for (SearchHit hit : hits) {
            Map result = JsonUtil.jsonParse(Map.class,hit.getSourceAsString());
            cidList.add(Long.parseLong(result.get("cid").toString()));
        }

        return cidList;
    }

    private void setDistributionToRedis(long cid,int type){
        SearchRequest searchRequest = new SearchRequest(getAwemeEsIndex());
        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
        searchSourceBuilder.query(QueryBuilders.nestedQuery("challenges",QueryBuilders.matchQuery("challenges.hid",cid), ScoreMode.Total));
        TermsAggregationBuilder builder;

        String termsName = "regions";
        String redisName = "dis_challenge_region_";
        if(type == 1){
            builder = AggregationBuilders.terms(termsName).field("region").size(1000).executionHint("map");
        }else{
            termsName = "awemeTypes";
            redisName = "dis_challenge_industry_";
            builder = AggregationBuilders.terms(termsName).field("awemeType").size(1000).executionHint("map");
        }

        searchSourceBuilder.aggregation(builder);
        searchRequest.source(searchSourceBuilder);
        SearchResponse searchResponse = null;
        try {
            searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
        } catch (IOException e) {
            e.printStackTrace();
        }

        if(null != searchResponse.getAggregations()){
            List<Map<String,Object>> mapList = new ArrayList<>();
            val terms = (Terms) searchResponse.getAggregations().get(termsName);
            val buckets = terms.getBuckets();
            buckets.forEach( bucket -> {
                Map<String,Object> map = new HashMap<>();
                map.put("key",bucket.getKeyAsString());
                map.put("value",bucket.getDocCount());
                mapList.add(map);
            });
            List<DistributionRecord> distributionRecords =  getDistributionRecords(mapList);
            distributionRecords.removeIf( x -> StringUtils.isEmpty(x.getDescription()));
            redisTemplate.opsForValue().set(redisName+cid, JsonUtil.parseJson(distributionRecords),7, TimeUnit.DAYS);
        }
    }


    private List<DistributionRecord> getDistributionRecords(List<Map<String, Object>> mapList) {
        List<DistributionRecord> distributionRecords = new ArrayList<>();
        long sum = mapList.stream().mapToLong(m -> (long) m.get("value")).sum();
        int otherCount = 0;

        for (int i = 0; i < mapList.size(); i++) {
            if(i < 9){
                DistributionRecord distributionRecord = new DistributionRecord();
                distributionRecord.setDescription(mapList.get(i).get("key") == null ? "" : mapList.get(i).get("key").toString());

                BigDecimal bigDecimal = new BigDecimal((long)mapList.get(i).get("value"));
                double percent =bigDecimal.divide(new BigDecimal(sum),3, RoundingMode.FLOOR).multiply(new BigDecimal(100)).setScale(2).doubleValue();
                if(percent <= 0){
                    continue;
                }
                distributionRecord.setPercent(percent);
                distributionRecords.add(distributionRecord);
            }else{
                otherCount += (long)mapList.get(i).get("value");
            }
        }

        if (otherCount > 0) {
            DistributionRecord distributionRecord = new DistributionRecord();
            distributionRecord.setDescription("Others");
            BigDecimal bigDecimal = new BigDecimal(otherCount);
            double percent =bigDecimal.divide(new BigDecimal(sum),3, RoundingMode.FLOOR).multiply(new BigDecimal(100)).setScale(2).doubleValue();
            distributionRecord.setPercent(percent);
            distributionRecords.add(distributionRecord);
        }

        distributionRecords.sort((o1,o2) ->{
            if(o1.getPercent() > o2.getPercent()){
                return -1;
            }else if(o1.getPercent() < o2.getPercent()){
                return 1;
            }
            return 0;
        });
        return distributionRecords;
    }
}
