package com.zh.system.service.impl;

import cn.hutool.core.util.StrUtil;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.zh.common.exception.BusinessException;
import com.zh.common.properties.JwtProperties;
import com.zh.system.Thread.AsyncManager;
import com.zh.system.crawler.BiliArticleCrawler;
import com.zh.system.domain.dto.AskAiDto;
import com.zh.system.domain.dto.CrawlDto;
import com.zh.system.manager.AiManager;
import com.zh.system.mapper.SysDicTypeMapper;
import com.zh.system.mapper.SysDicValueMapper;
import com.zh.system.properties.SystemProperties;
import com.zh.system.service.SystemService;
import com.zh.web.base.ApiResult;
import com.zh.web.openApi.AuthClient;
import com.zh.web.openApi.UserClient;
import com.zhipu.oapi.service.v4.model.ModelData;
import io.reactivex.Flowable;
import io.reactivex.schedulers.Schedulers;
import io.seata.spring.annotation.GlobalTransactional;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.aop.framework.AopContext;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;

import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.time.LocalDateTime;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;


@Slf4j
@Service
@RequiredArgsConstructor
public class SystemServiceImpl implements SystemService {

    private final SysDicTypeMapper sysDicTypeMapper;
    private final SysDicValueMapper sysDicValueMapper;
    private final UserClient userClient;
    private final BiliArticleCrawler biliArticleCrawler;
    private final SystemProperties systemProperties;
    private final AuthClient authClient;
    private final JwtProperties jwtProperties;
    private final AiManager aiManager;
    // sse请求的map
    private final ConcurrentHashMap<Long, AskAiDto> sseRequestMap = new ConcurrentHashMap<>();

    /**
     * 清除所有数据库中deleted=1 超过7天的数据
     */
    @Override
    @Transactional
    public void deleteAllExpireData() throws InterruptedException {
        SystemService proxy = (SystemServiceImpl) AopContext.currentProxy();
        AsyncManager asyncManager = AsyncManager.getInstance();
        asyncManager.submitAsyncTask(() -> {
            try {
                proxy.deleteSystemData();
            } catch (Exception e) {
                log.error("=========================清理ls_system库数据失败======================", e);
            }
        });
        asyncManager.submitAsyncTask(() -> {
            try {
                proxy.deleteUserData();
            } catch (Exception e) {
                log.error("=========================清理ls_user库数据失败======================", e);
            }
        });
        //等待所有任务执行完成 现时一分钟
        asyncManager.awaitAll(1L, TimeUnit.MINUTES);
    }

    /**
     * 清除系统库数据
     */
    @Override
    @DS("ls_system")
    public void deleteSystemData() {
        sysDicTypeMapper.permanentlyDelete();
        sysDicValueMapper.permanentlyDelete();
        log.info("=======================清理ls_system库数据成功==========================");
    }

    /**
     * 清理用户库数据
     */
    @Override
    @GlobalTransactional
    public void deleteUserData() {
        //openfeign调用user服务的接口
        ApiResult<String> apiResult = userClient.clearAllExpiredData();
        if (apiResult.getCode() == 200) {
            log.info("=======================清理ls_user库数据成功==========================");
        }
    }

    @Override
    public String switchCrawl(CrawlDto crawlDto) {
        // 1.根据爬虫类型、状态、等参数选择对应爬虫服务
        if (StrUtil.isEmpty(crawlDto.getKeyword())) {
            throw new BusinessException("请输入关键词");
        }
        switch (crawlDto.getCrawlSource()) {
            case BILIBILI:
                return this.biliCrawl(crawlDto);
            default:
                return "暂不支持该类型爬虫";
        }

    }

    @Override
    public String receiveKeywordBeforeAskAi(AskAiDto askAiDto, HttpServletRequest request) {
        //返回的是 sse地址
        String token = request.getHeader(jwtProperties.getTokenName());
        Map<String, Object> authMap = authClient.getSessionByToken(token).getData();
        Long userId = Long.parseLong(authMap.get("id").toString());
        askAiDto.setAskUSer(userId);
        askAiDto.setAskTime(LocalDateTime.now());
        sseRequestMap.put(askAiDto.getAskUSer(), askAiDto);
        String aiUrl = systemProperties.getUrl();
        if (StrUtil.isEmpty(aiUrl)) {
            aiUrl = "http://localhost:8080/system/ai";
        }
        return aiUrl;
    }

    @Override
    public SseEmitter askAi(Long id) {
        AskAiDto askAiDto = sseRequestMap.get(id);
        if (askAiDto == null) {
            throw new BusinessException("请求超时");
        }
        if (StrUtil.isEmpty(askAiDto.getMessage())) {
            throw new BusinessException("请输入问题");
        }
        //建立连接
        SseEmitter sseEmitter = new SseEmitter(0L);
        StringBuilder output = new StringBuilder();
        //调用ai
        Flowable<ModelData> modelDataFlowable = aiManager.doStableStreamRequest(systemProperties.getSystemPrompt(), askAiDto.getMessage());
        //监听流
        modelDataFlowable.subscribeOn(Schedulers.io())
                .map(modelData -> modelData.getChoices().get(0).getDelta().getContent())
                .doOnNext(message -> {
                    output.append(message);
                    sseEmitter.send(output.toString());
                    log.info("ai输出：" + message);
                })
                .doOnComplete(() -> {
                    sseEmitter.complete();
                    log.info("ai输出完成");
                }).doOnError(e -> {
                    sseEmitter.completeWithError(e);
                    log.error("ai输出异常", e);
                }).subscribe();
        return sseEmitter;
    }


    private String biliCrawl(CrawlDto crawlDto) {
        switch (crawlDto.getCrawlType()) {
            case ARTICLE:
                if (crawlDto.getEnabled()) {
                    biliArticleCrawler.reset();
                    biliArticleCrawler.setStartUrl(biliStartUrl("https://search.bilibili.com/article", crawlDto.getKeyword(), crawlDto.getOrderBy()));
                    return biliArticleCrawler.start();
                } else {
                    return biliArticleCrawler.stop();
                }
            default:
                return "暂不支持该类型爬虫";
        }
    }

    private String biliStartUrl(String url, String keyword, String orderBy) {
        String originalUrl = url;
        if (StrUtil.isNotEmpty(keyword)) {
            url += "?keyword=" + URLEncoder.encode(keyword, StandardCharsets.UTF_8);
        }
        if (StrUtil.isNotEmpty(orderBy)) {
            if (StrUtil.equals(originalUrl, url)) {
                url += "?order=" + orderBy;
            } else {
                url += "&order=" + orderBy;
            }
        }
        return url;
    }

}
