package com.shyroke.daydayzhuansvc.service.impl;

import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.http.HttpRequest;
import com.querydsl.core.QueryResults;
import com.querydsl.jpa.impl.JPAQuery;
import com.querydsl.jpa.impl.JPAQueryFactory;
import com.shyroke.daydayzhuanapi.constant.SiteDto;
import com.shyroke.daydayzhuanapi.dto.SpiderInfoDto;
import com.shyroke.daydayzhuanapi.dto.SpiderProjectDto;
import com.shyroke.daydayzhuanapi.entity.*;
import com.shyroke.daydayzhuanapi.mapper.SpiderInfoMapper;
import com.shyroke.daydayzhuanapi.mapper.SpiderProjectMapper;
import com.shyroke.daydayzhuandao.respository.NewsRepository;
import com.shyroke.daydayzhuandao.respository.SpiderImageRepository;
import com.shyroke.daydayzhuandao.respository.SpiderInfoRepository;
import com.shyroke.daydayzhuandao.respository.SpiderProjectRepository;
import com.shyroke.daydayzhuansvc.service.ISpiderService;
import com.shyroke.daydayzhuansvc.util.*;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.text.StringEscapeUtils;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.PageRequest;
import org.springframework.stereotype.Service;

import java.net.URL;
import java.util.*;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

/**
 * 类名 ：
 * 用法 ：
 * 创建人 ： shyroke
 * 时间：2019/2/20 13:56
 */
@Service
public class SpiderService implements ISpiderService {
    @Autowired
    private SpiderProjectRepository spiderProjectRepository;
    @Autowired
    private SpiderInfoRepository spiderInfoRepository;
    @Value("${spring.profiles.active}")
    private String active;
    @Autowired
    private SpiderImageService spiderImageService;
    @Autowired
    private OSSService ossService;
    @Autowired
    private NewsRepository newsRepository;
    @Autowired
    private SpiderProjectMapper spiderProjectMapper;
    @Autowired
    private JPAQueryFactory factory;
    @Autowired
    private SpiderImageRepository spiderImageRepository;
    @Autowired
    private SpiderInfoMapper spiderInfoMapper;

    private Logger logger = LoggerFactory.getLogger(this.getClass());

    @Override
    public void batchSave(List<SpiderProjectDto> projectList) {
        List<SpiderProjectEntity> list = projectList.stream().map(p -> spiderProjectMapper.dtoToEntity(p)).collect(Collectors.toList());
        if (CollectionUtil.isNotEmpty(list)) {
            spiderProjectRepository.saveAll(list);
        }
    }

    @Override
    public TabelData<com.shyroke.daydayzhuanapi.dto.SpiderProjectDto> getTableData(Page page, SpiderProjectDto project) {
        QSpiderProjectEntity qSpiderProjectEntity = QSpiderProjectEntity.spiderProjectEntity;
        JPAQuery<SpiderProjectEntity> query = factory.selectFrom(qSpiderProjectEntity);
        if(StrUtil.isNotBlank(project.getTitle())){
            query = query.where(qSpiderProjectEntity.title.like("%"+project.getTitle()+"%"));
        }
        if(StrUtil.isNotBlank(project.getSource())){
            query = query.where(qSpiderProjectEntity.source.eq(project.getSource()));
        }
        if(project.getOnlyWrite() != null){
            query = query.where(qSpiderProjectEntity.onlyWrite.eq(project.getOnlyWrite()));
        }
        if(project.getIsSave() != null){
            query = query.where(qSpiderProjectEntity.isSave.eq(project.getIsSave()));
        }

        QueryResults<SpiderProjectEntity> queryResults = query
                .offset(PageRequest.of(page.getPage() - 1, page.getLimit()).getOffset()).limit(page.getLimit())
                .orderBy(qSpiderProjectEntity.id.desc())
                .fetchResults();

        List<com.shyroke.daydayzhuanapi.dto.SpiderProjectDto> list = queryResults.getResults().stream().map(e -> spiderProjectMapper.entityToDto(e)).collect(Collectors.toList());
        return new TabelData(list,(int)queryResults.getTotal());
    }

    @Override
    public List<String> getSource() {
        return spiderProjectRepository.getSource();
    }

    @Override
    public SpiderProjectEntity getById(Integer id) {
        Optional<SpiderProjectEntity> optionalSpiderProjectEntity = spiderProjectRepository.findById(id);
        if (optionalSpiderProjectEntity.isPresent()) {
            return optionalSpiderProjectEntity.get();
        }
        return null;
    }

    @Override
    public R spider(SpiderInfoEntity spiderInfo){
        List<Future> futureList = new ArrayList<>();
        List<R> resultList = new ArrayList<>();
        try {
            //清空当天的消息记录
            this.deleteNews(new Date());

            List<SpiderInfoEntity> spiderInfoList = new ArrayList<>();
            if(spiderInfo == null){
                spiderInfoList = this.getSpiderInfoList();
            }else{
                spiderInfoList.add(spiderInfo);
            }

            ThreadPoolExecutor executor = new ThreadPoolExecutor(spiderInfoList.size(),spiderInfoList.size(),200, TimeUnit.MILLISECONDS,new ArrayBlockingQueue<Runnable>(spiderInfoList.size()));
            for(SpiderInfoEntity info : spiderInfoList){
                SpiderRunnable task = new SpiderRunnable(info,active);
                futureList.add(executor.submit(task));
            }

            executor.shutdown();

        }catch (Exception e){
            logger.info(ExceptionUtil.getMessage(e));
        }

        try {
            if(futureList.size()>0){
                for(Future future:futureList){
                    resultList.add((R)future.get());
                }
            }
        }catch (Exception e){
            logger.info(ExceptionUtil.getMessage(e));
        }


        return R.ok("result",resultList);
    }

    @Override
    public List<SpiderProjectDto> spiderSite(String siteName, String siteURL, String articleEleCssQuery,
                                             String date, String dateCssQuery, String titleCssQuery,
                                             String urlCssQuery, String descCssQuery,
                                             List<String> descExcludeCssQuery, String descURLPre, boolean dateIgnore) throws Exception {
        List<SpiderProjectDto> spiderProjects = new ArrayList<>();
        SpiderProjectDto project  = null;
        logger.info("=============开始爬取"+siteName+"数据=============");

        Document document = null;
        for(int i = 0;i<5;i++){
            try {
                document = Jsoup.connect(siteURL).userAgent(SiteDto.CHORME).timeout(50000).get();
                //运行成功则接着访问
                break;
            } catch (Exception e) {
                System.out.println("服务器过载，休息10秒！");
                Thread.sleep(10000);
                //结束本次循环
                continue;
            }
        }
        if(document == null){
            logger.error("请求siteUrl为空：{}",siteURL);
            throw new RuntimeException("请求siteUrl为空:"+siteURL);
        }

        //获取文章所在标签的集合
        Elements contents = document.select(articleEleCssQuery);

        if(contents == null || contents.size() == 0 ){
            throw new RuntimeException("文章集合为空，该网站为"+siteName+",网址："+siteURL);
        }

        for(Element element:contents){

            project = new SpiderProjectDto();
            Element articleEle = element.selectFirst(dateCssQuery);
            String article_date = "";
            if(articleEle!=null){
                article_date = articleEle.text();
            }

            //日期是否和我们想要的日期相同或者忽略日期
            if(article_date.contains(date) || dateIgnore){
                String title = element.select(titleCssQuery).text();
                String url = null;

                if(descURLPre != null){
                    url = descURLPre+element.select(urlCssQuery).attr("href");
                }else{
                    url = element.select(urlCssQuery).attr("href");
                }

                if(StringUtils.isBlank(url)){
                    continue;
                }
                String content = null;
                try {
                    content = this.getSiteDesc(url,descCssQuery,descExcludeCssQuery);

                    if(siteURL.contains("yyok.cc")) {
                        content = this.handelContent(content);
                    }
                }catch (Exception e){
                    throw new RuntimeException("获取"+siteName+"的文章详情失败，链接为="+url+",失败原因="+ExceptionUtil.getMessage(e));
                }
                //设置信息
                project =  this.setProjectInfo(project,title,content,url,siteName,siteURL,new Date());
                spiderProjects.add(project);
            }
        }

        if(spiderProjects.size() == 0){
//            news = this.sendNews("该网站今天没更新或者爬虫信息错误，请注意！"+siteName+","+siteURL);
//            newsMapper.insert(news);
            throw new RuntimeException("今天没更新,该网站为"+siteName+",网址："+siteURL);
        }

        logger.info("=============爬取"+siteName+"数据结束=============");

        return spiderProjects;

    }

    private String handelContent(String content) throws Exception{
        if(StrUtil.isBlank(content)) {
            return null;
        }
        Document document = Jsoup.parse(content);
        Elements Aelements = document.select("a");
        for(Element aElement:Aelements){
            aElement.attr("rel","nofollow");
            String href = aElement.attr("href");
            if(StrUtil.isNotBlank(href) && href.contains("yyok.cc/go")){
                Document descDocument = Jsoup.connect(href).userAgent(SiteDto.CHORME).timeout(50000).get();
                Element urlEle = descDocument.selectFirst("div[class='safety-url']");

                aElement.attr("href", urlEle.text());
            }
        }

        return document.html();
    }

    @Override
    public List<SpiderInfoEntity> getSpiderInfoList() {
        List<SpiderInfoEntity> res = new ArrayList<>();
        Iterator<SpiderInfoEntity> iterator = spiderInfoRepository.findAll().iterator();
        while (iterator.hasNext()) {
            SpiderInfoEntity entity = iterator.next();
            res.add(entity);
        }
        return res;
    }

    @Override
    public void deleteNews(Date date) {
        String dateStr = DateUtil.format(date, "yyyy-MM-dd");
        String begin = dateStr + " 00:00:00";
        String end = dateStr + " 23:59:59";
        newsRepository.findByDateBetween(DateUtil.parse(begin,"yyyy-MM-dd HH:mm:ss"),DateUtil.parse(end,"yyyy-MM-dd HH:mm:ss"));
    }

    @Override
    public TabelData<SpiderInfoDto> getSiteTableData(Page page, SpiderInfoDto spiderInfo) {
        QSpiderInfoEntity qSpiderInfoEntity = QSpiderInfoEntity.spiderInfoEntity;
        JPAQuery<SpiderInfoEntity> query = factory.selectFrom(qSpiderInfoEntity);
        if(StrUtil.isNotBlank(spiderInfo.getSiteName())){
            query = query.where(qSpiderInfoEntity.siteName.like("%"+spiderInfo.getSiteName()+"%"));
        }

        QueryResults<SpiderInfoEntity> queryResults = query
                .offset(PageRequest.of(page.getPage() - 1, page.getLimit()).getOffset()).limit(page.getLimit())
                .orderBy(qSpiderInfoEntity.id.desc())
                .fetchResults();

        List<SpiderInfoDto> list = queryResults.getResults().stream().map(e -> spiderInfoMapper.entityToDto(e)).collect(Collectors.toList());
        return new TabelData<>(list,(int)queryResults.getTotal());
    }

    @Override
    public void saveSiteInfo(SpiderInfoDto spiderInfo) {
        SpiderInfoEntity entity = spiderInfoMapper.dtoToEntity(spiderInfo);
        spiderInfoRepository.save(entity);
    }


    @Override
    public void updateSite(SpiderInfoDto spiderInfo) {
        SpiderInfoEntity entity = spiderInfoMapper.dtoToEntity(spiderInfo);
        spiderInfoRepository.save(entity);
    }

    @Override
    public void deleteBySiteName(String siteName) {
        SpiderInfoEntity entity = spiderInfoRepository.findBySiteName(siteName);
        spiderInfoRepository.delete(entity);
    }

    @Override
    public SpiderInfoDto getSiteBySiteName(String siteName) {
        SpiderInfoEntity entity = spiderInfoRepository.findBySiteName(siteName);
        return spiderInfoMapper.entityToDto(entity);
    }

    @Override
    public List<com.shyroke.daydayzhuanapi.dto.SpiderProjectDto> findByProjectNameList(List<SpiderProjectDto> spiderProjects) {
        List<com.shyroke.daydayzhuanapi.dto.SpiderProjectDto> result = null;
        if(CollectionUtil.isNotEmpty(spiderProjects)){
            List<String> titleList = spiderProjects.stream().map(p -> p.getTitle()).collect(Collectors.toList());
            List<SpiderProjectEntity> list = spiderProjectRepository.findByTitleIn(titleList);
            if(CollectionUtil.isNotEmpty(list)){
                result = list.stream().map(e -> spiderProjectMapper.entityToDto(e)).collect(Collectors.toList());
            }
        }
        return result;
    }


    @Override
    public R spiderImg(String type) {
        for(int i=0;i<50;i++){
            HttpRequest request = HttpRequest.get("https://source.unsplash.com/210x160/?" + type);
            String result = request.execute().body();
            result = StringEscapeUtils.unescapeHtml4(result);
            Document document = Jsoup.parse(result);
            String url = document.select("a").attr("href");
            SpiderImageEntity image = spiderImageService.selectByUrl(url);
            if(image != null){
                continue;
            }
            String img_name = CommonUtil.getUUID(); //上传图片的图片名
            String path = OSSClientConstants.SPIDER_IMAGE_PATH+CommonUtil.getDateByFormat(new Date(),"yyyy-MM-dd")+"/"+img_name+".png";
            try {
                ossService.uploadImg(url,path,null);
            }catch (Exception e){
                logger.info("oss上传图片失败：{}",e);
            }
            image = new SpiderImageEntity();
            image.setIsDelete(0);
            image.setType(type);
            image.setUrl(path);
            spiderImageRepository.save(image);
        }
        return R.ok("爬取成功！");
    }

    @Override
    public SpiderProjectEntity findById(Integer spiderProjectId) {
        Optional<SpiderProjectEntity> optionalSpiderProjectEntity = spiderProjectRepository.findById(spiderProjectId);
        if (optionalSpiderProjectEntity.isPresent()) {
            return optionalSpiderProjectEntity.get();
        }
        return null;
    }


    private SpiderProjectDto setProjectInfo(SpiderProjectDto project, String title, String content, String url, String siteName, String siteURL, Date date) {
        project.setTitle(title);
        project.setContent(content);
        project.setUrl(url);
        project.setSource(siteName+","+siteURL);
        project.setDate(date);
        return project;
    }

    private String getSiteDesc(String url, String descCssQuery,List<String> descExcludeCssQuery) throws Exception{
        Document document = null;
        for(int i=0;i<5;i++) {
            try {
                document = Jsoup.connect(url).userAgent(SiteDto.CHORME).timeout(50000).get();
                //运行成功则接着访问
                break;
            } catch (Exception e) {
                System.out.println("服务器过载，休息10秒！");
                Thread.sleep(10000);
                //结束本次循环
                continue;
            }
        }
        assert document != null;
        Element content =  document.selectFirst(descCssQuery);

        if(descExcludeCssQuery != null && descExcludeCssQuery.size()>0){
            for(String exclue:descExcludeCssQuery){
                if(content.select(exclue)!=null && content.select(exclue).size()>0){
                    content.select(exclue).remove();
                }
            }
        }

        return EmojiUtil.filterEmoji(content.html());

    }
}
