package com.zxcl.service;//package com.zxcl.service;
//

import com.alibaba.fastjson2.JSON;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.zxcl.config.EsImportConfig;
import com.zxcl.constant.EsConstant;
import com.zxcl.domain.Prod;
import com.zxcl.domain.ProdTagReference;
import com.zxcl.mapper.ProdCommMapper;
import com.zxcl.mapper.ProdMapper;
import com.zxcl.mapper.ProdTagReferenceMapper;
import com.zxcl.model.CommStatistics;
import com.zxcl.model.ProdEs;
import com.zxcl.pool.EsImportThreadPool;
import com.zxcl.service.impl.ImportService;
import lombok.extern.log4j.Log4j2;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.indices.CreateIndexResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentType;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import org.springframework.util.ObjectUtils;

import javax.annotation.PostConstruct;
import java.io.IOException;
import java.math.BigDecimal;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.stream.Collectors;


@Service
@Log4j2
public class EsImportServiceImpl implements ImportService, CommandLineRunner {

    @Autowired
    private RestHighLevelClient restHighLevelClient;

    @Autowired
    private ProdMapper prodMapper;

    @Autowired
    private ProdTagReferenceMapper prodTagReferenceMapper;

    @Autowired
    private ProdCommMapper prodCommMapper;

    @Autowired
    private EsImportConfig esImportConfig;

    @Autowired
    private StringRedisTemplate stringRedisTemplate;

    private SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");


    /**
     * 每页的条数 定义好 一般不建议超过2w
     */
    int size = 20;

    /**
     * 全量导入[项目启动时加载]
     * 全量（分页+多线程）
     * * 1.创建索引
     * * 2.查总条数
     * * 3.计算总页数 （totalPage = 总条数 % size == 0 ? 总条数 / size: ((总条数 / size)+1)）
     * * 4.for(int i = 1;i <= totalPage;i++){
     * * new Thread(()->{
     * * prod: select(1,size);
     * * prodEs{prod/tag/comm}
     * * bulkRequest(prodEsList)
     * * }).start();
     * * }
     * * --------------- 任务 ------------
     * * case   when
     * * 深分页如何优化？
     */
    @Override
//    @PostConstruct
    public void importAll() {
        if (!esImportConfig.getFlag()) {
            log.info("已经导入过了");
            return;
        }
        //创建索引
        createProdEsIndex();
        //查询数据总条数
        Long totalCount = this.getTotalCount(null);
        if (totalCount <= 0) {
            log.info("没有商品需要导入");
            return;
        }
        //计算总页数  10
        long totalPage = totalCount % this.size == 0 ? totalCount / this.size : ((totalCount / this.size) + 1);
        CountDownLatch countDownLatch = new CountDownLatch((int) totalPage);
        for (int i = 1; i <= totalPage; i++) {
            // 异步
            int current = i;
            EsImportThreadPool.esPool.execute(() -> {
                fetchProdToProdEs(current, size, null);
                //?代表当前线程的fetchProdToProdEs方法执行完了
                countDownLatch.countDown();
            });
        }
        try {
            //线程阻塞，只有countDownLatche的值减到0时才会放行
            countDownLatch.await();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
        System.out.println("ok");
        // 手动刷新一次缓冲区数据
        RefreshRequest refreshRequest = new RefreshRequest(EsConstant.PROD_ES_INDEX);
        RefreshResponse refreshResponse = null;
        try {
            refreshResponse = restHighLevelClient.indices().refresh(refreshRequest, RequestOptions.DEFAULT);
        } catch (IOException e) {
            e.printStackTrace();
        }
        log.info("手动刷新结果为:{}", refreshResponse.getStatus());
        // 更新索引的设定
        UpdateSettingsRequest updateSettingsRequest = new UpdateSettingsRequest(EsConstant.PROD_ES_INDEX);
        updateSettingsRequest.settings(Settings.builder()
                .put("number_of_replicas", 2)
                .put("refresh_interval", "1s")
        );
        AcknowledgedResponse acknowledgedResponse = null;
        try {
            acknowledgedResponse = restHighLevelClient.indices().putSettings(updateSettingsRequest, RequestOptions.DEFAULT);
        } catch (IOException e) {
            e.printStackTrace();
        }
        log.info("更新索引结果为:{}", acknowledgedResponse.isAcknowledged());
        Date t1 = new Date();
        stringRedisTemplate.opsForValue().set(EsConstant.UPDATE_IMPORT_TIME_KEY, sdf.format(t1));
    }

    /**
     * * 转换商品信息
     * * 导入es的方法
     * * -----------------
     * * 1.分页查询商品表
     * * 2.拿到当前页商品对应的 标签数据
     * * 3.拿到当前页商品对应的 评论数据
     * * 4.组装prodEs
     * * 5.导入es
     *
     * @param current
     * @param size
     * @param t       导入时间
     */
    private void fetchProdToProdEs(int current, int size, Date t) {
        //分页查询商品数据(注意处理深分页的问题)
        List<Prod> prodList = prodMapper.selectMyPage((current - 1) * size, size, t);
        //取出所有商品ID
        List<Long> prodIds = prodList.stream().map(Prod::getProdId).collect(Collectors.toList());
        //根据商品的ID，把和商品ID相关的所有活动对象全查询出来
        List<ProdTagReference> prodTagReferences = prodTagReferenceMapper.selectList(new LambdaQueryWrapper<ProdTagReference>()
                .in(ProdTagReference::getProdId, prodIds)
        );
        // prodId  List<Long>  根据商品ID对活动关系对象进行分组 80--List<ProdTagReference>
        Map<Long, List<ProdTagReference>> tagMap = prodTagReferences.stream()
                .collect(Collectors.groupingBy(ProdTagReference::getProdId));
        //拿评论数据   1 根据商品IDS拿到所有的评论数据，再回到内存里面去统计 好评率 好评数  【不合适】
        //            2 直接在数据库通过SQL只查总评价数和好评数  count sum  【】
        List<CommStatistics> commStatistics = prodCommMapper.selectCommStatistics(prodIds);
        //把commStatistics放成Map<ProdId,List<CommStatistics>>
        Map<Long, CommStatistics> mapStat = commStatistics.stream().collect(Collectors.toMap(CommStatistics::getProdId, p -> p));
        // 创建一个批处理请求
        BulkRequest bulkRequest = new BulkRequest(EsConstant.PROD_ES_INDEX);
        //  组合数据
        prodList.forEach(prod -> {
            ProdEs prodEs = new ProdEs();
            BeanUtils.copyProperties(prod, prodEs);
            //根据商品ID从tagMap里面取出商品活动关系信息
            List<ProdTagReference> tagReferences = tagMap.get(prod.getProdId());
            //如果商品参加了活动，就把活动ID取出来 放到prodEs-->tagList
            if (!CollectionUtils.isEmpty(tagReferences)) {
                List<Long> tagList = tagReferences.stream()
                        .map(ProdTagReference::getTagId)
                        .collect(Collectors.toList());
                prodEs.setTagList(tagList);
            }
            //根据商品ID拿到评论统计数据
            CommStatistics statistics = mapStat.get(prod.getProdId());
            if (!ObjectUtils.isEmpty(statistics)) {
                //取出总评数
                Long allCount = statistics.getAllCount();
                //取出好评论数
                Long goodCount = statistics.getGoodCount();
                if (!goodCount.equals(0L)) {
                    // 计算
                    BigDecimal goodLV = new BigDecimal(goodCount.toString())
                            .divide(new BigDecimal(allCount.toString()), 2, BigDecimal.ROUND_HALF_UP)//除以
                            .multiply(new BigDecimal("100"));//乘以
                    prodEs.setPraiseNumber(goodCount);
                    prodEs.setPositiveRating(goodLV);
                }
            }
            // prodEs 导入了 es的bulk
            IndexRequest indexRequest = new IndexRequest(EsConstant.PROD_ES_INDEX);
            indexRequest.id(prodEs.getProdId().toString());
            indexRequest.source(JSON.toJSONString(prodEs), XContentType.JSON);
            bulkRequest.add(indexRequest);//添加到ES的批处理容器里面
        });

        //指交批处理，把数据导到ES
        BulkResponse bulkResponse = null;
        try {
            bulkResponse = restHighLevelClient.bulk(bulkRequest, RequestOptions.DEFAULT);
        } catch (IOException e) {
            e.printStackTrace();
        }
        log.info("=====第{}页导入完成，结果为:{}", current, !bulkResponse.hasFailures());
    }


    /**
     * 创建索引
     */
    private void createProdEsIndex() {
        CreateIndexRequest createIndexRequest = new CreateIndexRequest(EsConstant.PROD_ES_INDEX);
        createIndexRequest.mapping("{\n" +
                "    \"properties\": {\n" +
                "        \"positiveRating\": {\n" +
                "            \"type\": \"double\"\n" +
                "        },\n" +
                "        \"tagList\": {\n" +
                "            \"type\": \"long\"\n" +
                "        },\n" +
                "        \"soldNum\": {\n" +
                "            \"type\": \"integer\"\n" +
                "        },\n" +
                "        \"price\": {\n" +
                "            \"type\": \"double\"\n" +
                "        },\n" +
                "        \"prodName\": {\n" +
                "            \"analyzer\": \"ik_max_word\",\n" +
                "            \"type\": \"text\"\n" +
                "        },\n" +
                "        \"praiseNumber\": {\n" +
                "            \"type\": \"long\"\n" +
                "        },\n" +
                "        \"_class\": {\n" +
                "            \"index\": false,\n" +
                "            \"type\": \"keyword\",\n" +
                "            \"doc_values\": false\n" +
                "        },\n" +
                "        \"pic\": {\n" +
                "            \"type\": \"keyword\"\n" +
                "        },\n" +
                "        \"prodId\": {\n" +
                "            \"type\": \"keyword\"\n" +
                "        },\n" +
                "        \"categoryId\": {\n" +
                "            \"type\": \"long\"\n" +
                "        },\n" +
                "        \"shopId\": {\n" +
                "            \"type\": \"long\"\n" +
                "        }\n" +
                "    }\n" +
                "}", XContentType.JSON);
        createIndexRequest.settings(Settings.builder()
                .put("number_of_shards", 3) // 根据数据量来决定的
                .put("number_of_replicas", 0) // 因为你导入es的时候 关闭副本的功能
                .put("refresh_interval", "-1") // 关闭定时刷新索引的操作
        );
        CreateIndexResponse indexResponse = null;
        try {
            indexResponse = restHighLevelClient.indices().create(createIndexRequest, RequestOptions.DEFAULT);
        } catch (IOException e) {
            e.printStackTrace();
        }
        log.info("创建商品索引:{}", indexResponse.isAcknowledged());
    }

    /**
     * 增量导入 执行n次（多线程+分页）
     * 并不是每次有写商品的操作 就执行导入
     * 搞一个定时任务 间隔5min执行一次
     * 如何确定哪些数据是增量数据？
     * updateTime
     * -----------
     * fixedDelay 它的间隔时间是根据上次的任务结束的时候开始计时的。比如一个方法上设置了fixedDelay=5*1000，那么当该方法某一次执行结束后，开始计算时间，当时间达到5秒，就开始再次执行该方法。
     * fixedRate  它的间隔时间是根据上次任务开始的时候计时的。比如当方法上设置了fiexdRate=5*1000，该执行该方法所花的时间是2秒，那么3秒后就会再次执行该方法。
     */
    @Override
//    @Scheduled(initialDelay = 20 * 1000, fixedRate = 20 * 1000)
    public void importUpdate() {
        // 进入就要给时间点
        Date t2 = new Date();
        log.info("增量开始");
        //从redis里面取出为上次同步的时间
        String t1Str = stringRedisTemplate.opsForValue().get(EsConstant.UPDATE_IMPORT_TIME_KEY);
        Date t1 = null;
        try {
            t1 = sdf.parse(t1Str);
        } catch (ParseException e) {
            e.printStackTrace();
        }
        //根据redis里面取出来时间去查询在这个时间窗口里面有变化的数据的条数
        Long totalCount = getTotalCount(t1);
        if (totalCount <= 0L) {
            stringRedisTemplate.opsForValue().set(EsConstant.UPDATE_IMPORT_TIME_KEY, sdf.format(t2));
            log.info("没有商品需要导入");
            return;
        }

        //计算总页数
        long totalPage = totalCount % this.size == 0 ? totalCount / this.size : ((totalCount / this.size) + 1);
        CountDownLatch countDownLatch = new CountDownLatch((int) totalPage);
        for (int i = 1; i <= totalPage; i++) {
            // 异步
            int current = i;
            Date t = t1;
            EsImportThreadPool.esPool.execute(() -> {
                fetchProdToProdEs(current, this.size, t);
                countDownLatch.countDown();
            });
        }
        try {
            countDownLatch.await();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
        //更新redis里面的最后一次导入时间
        stringRedisTemplate.opsForValue().set(EsConstant.UPDATE_IMPORT_TIME_KEY, sdf.format(t2));
    }

    /**
     * 查询总条数
     *
     * @param t
     * @return
     */
    private Long getTotalCount(Date t) {
        return prodMapper.selectCount(new LambdaQueryWrapper<Prod>()
                .eq(Prod::getStatus, 1)
                .ge(t != null, Prod::getUpdateTime, t)
        );
    }

    /**
     * 实现了CommandLineRunner  在项目启动时会调用这个run方法
     *
     * @param args
     * @throws Exception
     */
    @Override
    public void run(String... args) throws Exception {
        this.importAll();
    }
}
