package com.hog.service.reptiles.impl;

import com.alibaba.fastjson.JSONObject;
import com.hog.entity.BlogArticleWeb;
import com.hog.entity.BlogArticleWebContent;
import com.hog.entity.BlogArticleWebHis;
import com.hog.mapper.BlogArticleWebContentMapper;
import com.hog.mapper.BlogArticleWebHisMapper;
import com.hog.mapper.BlogArticleWebMapper;
import com.hog.service.qiniu.IQiniuHandleService;
import com.hog.service.reptiles.IReptilesService;
import com.hog.utils.IdUtil;
import com.hog.utils.StringUtils;
import com.qiniu.api.io.PutRet;
import lombok.extern.slf4j.Slf4j;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.util.ArrayList;
import java.util.Date;
import java.util.List;

/**
 * 爬虫Service
 *
 * @author 唐晴
 * @date 2020年08月05日
 * @since JDK 1.8
 */
@Service
@Slf4j
public class ReptilesServiceImpl implements IReptilesService {
    private static final String shuiyin = "?imageView2/0/q/75|watermark/2/text/54ix5LiK57yW56iLIHd3dy50cWN0by5jb20=/font/5a6L5L2T/fontsize/340/fill/IzAwMDAwMA==/dissolve/100/gravity/SouthEast/dx/10/dy/10";

    @Autowired
    private BlogArticleWebHisMapper webHisMapper;
    @Autowired
    private BlogArticleWebMapper webMapper;
    @Autowired
    private BlogArticleWebContentMapper contentMapper;
    @Autowired
    private IQiniuHandleService qiniuHandleService;

    @Override
    public String reptilesMaMi() {
        log.info("开始爬码迷站");
        // WEB开发
        getMamiWeb();

        try {
//            qiniuHandleService.uploadImg();
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }

        return null;
    }

    @Override
    public String deleteErrList() {
        // 获取内容为空的数据
        List<BlogArticleWeb> errList = webMapper.getErrList();
        log.info("内容为空的数据数量为: {}", errList.size());
        for (BlogArticleWeb bean : errList) {
            webMapper.delete(bean);
            webHisMapper.deleteByPrimaryKey(bean.getOriginalText());
            log.info("删除内容为空的数据成功: {}", bean.getId());
        }

        return null;
    }

    private void getMamiWeb() {
        int index = 100;
        for (int i = 1; i <= index; i++) {
            try {
                String listUrl = "http://www.mamicode.com/info-list-2-" + i + ".html";
                log.info("{} 爬取列表页: {}", i, listUrl);
                // WEB开发
                Document doc = Jsoup.connect(listUrl).get();
                Element body = doc.body();
                Elements elements = body.getElementsByClass("listtitle");
                for (Element element : elements) {
                    try {
                        String title = element.text();
                        String href = String.format("http://www.mamicode.com%s", element.attr("href"));

                        BlogArticleWebHis blogArticleWebHis = webHisMapper.selectByPrimaryKey(href);

                        if (blogArticleWebHis != null) {
                            log.info("已存在爬取记录, 跳过.....");
                            continue;
                        }

                        String keywords = "爱上编程,tqcto.com,互联网,软件开发,开源,asp,asp.net,android,apple,jsp,php,Windows,数据库,HTML,JS,CSS,技术文章,游戏,手机相关";
                        log.info(title + "\t " + href);
                        Document document = Jsoup.connect(href).get();

                        Element detailcontennt = document.getElementsByClass("detailcontennt").get(0);

                        if (StringUtils.isEmpty(detailcontennt.html())) {
                            log.info("爬取内容为空, 跳过.....");
                            continue;
                        }

//                        log.info("====================================================================================================");
                        Elements pList = detailcontennt.getElementsByTag("p");
                        // 去掉标签
                        cleanTag(pList);
//                        log.info(detailcontennt.html());
//                        log.info("====================================================================================================");

                        // 图片处理
                        handelImg(detailcontennt);

                        BlogArticleWebHis webHis = new BlogArticleWebHis();
                        BlogArticleWeb articleWeb = new BlogArticleWeb();
                        BlogArticleWebContent webContent = new BlogArticleWebContent();

                        try {
                            webHis.setUrl(href);
                            webHisMapper.insert(webHis);

                            articleWeb.setId(IdUtil.generateDistributedId());
                            articleWeb.setTitle(title);
                            articleWeb.setKeywords(keywords);
                            articleWeb.setCrtTime(new Date());
                            articleWeb.setDescription(getDescription(document));
                            articleWeb.setOriginalText(href);
                            webMapper.insert(articleWeb);

                            webContent.setId(IdUtil.generateDistributedId());
                            webContent.setContent(detailcontennt.html());
                            webContent.setArticleId(articleWeb.getId());
                            contentMapper.insert(webContent);
                        } catch (Exception e) {
                            log.error(e.getMessage(), e);
                            log.info("添加到数据库异常, 清除相关数据....");
                            BlogArticleWeb deleteWeb = new BlogArticleWeb();
                            deleteWeb.setId(articleWeb.getId());
                            webMapper.delete(deleteWeb);
                            continue;
                        }
                    } catch (Exception e) {
                        log.error(e.getMessage(), e);
                        log.info("爬取详情页异常, 再次重试....");
                        continue;
                    }
                }
            } catch (Exception e) {
                log.error(e.getMessage(), e);
                log.info("爬取列表异常, 再次重试....");
                continue;
            }
        }
    }

    /**
     * 处理图片
     *
     * @param detailcontennt
     */
    private void handelImg(Element detailcontennt) {
        List<Element> errImgList = new ArrayList<>();

        Elements imgList = detailcontennt.getElementsByTag("img");
        for (Element img : imgList) {
            String src = img.attr("src");
            try {
                PutRet putRet = qiniuHandleService.uploadImg(src);
                log.info("图片上传完成: {}", JSONObject.toJSONString(putRet));
                if (putRet.getStatusCode() != 200) {
                    errImgList.add(img);
                    continue;
                }
                String newSrc = String.format("http://image.tqcto.com/%s%s", putRet.getKey(), shuiyin);
                img.attr("src", newSrc);
            } catch (Exception e) {
                log.info("图片上传失败, 删除元素");
                errImgList.add(img);
            }
        }

        for (Element errImg : errImgList) {
            errImg.remove();
        }
    }

    private String getDescription(Document document) {
        Elements metaList = document.getElementsByTag("meta");
        for (Element meta : metaList) {
            String name = meta.attr("name");
            if ("description".equals(name)) {
                return meta.attr("content");
            }
        }

        return null;
    }

    /**
     * 去掉标签
     */
    private void cleanTag(Elements pList) {
        // 去掉标签
        pList.get(0).remove();
        // 去掉Title
        pList.get(1).remove();
        // 去掉最后一个标签
        Element element1 = pList.get(pList.size() - 1);

        if (element1.text().contains("标签")) {
            element1.remove();
        }

        Element element2 = pList.get(pList.size() - 2);
        if (element2.text().contains("标签")) {
            element2.remove();
        }
    }

    public static void main(String[] args) {
        ReptilesServiceImpl service = new ReptilesServiceImpl();
        service.getMamiWeb();
    }
}