package com.chance.cc.crawler.development.scripts.baidu.zhishu;

import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.tags.CategoryTag;
import com.chance.cc.crawler.core.tags.LabelTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.article;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;


public class BaiDuZhiShuCrawlerScript extends CrawlerCommonScript {

    private static Logger log = LoggerFactory.getLogger(BaiDuZhiShuCrawlerScript.class);

    public static final String beginUrl = "http://index.baidu.com/v2/main/index.html#/subject/\\d*";

    public static final String Url = "http://insight.baidu.com/base/search/trend/general\\?id=\\d*&dateType=30&filterType=1&source=\\d*&callback=_jsonp\\S*";

    public static final String PcAndMobileUrl = "http://insight.baidu.com/base/search/trend/general\\?id=\\d*&dateType=30&filterType=1&source=0&callback=_jsonp\\S*";

    public static final String PCUrl = "http://insight.baidu.com/base/search/trend/general\\?id=\\d*&dateType=30&filterType=1&source=1&callback=_jsonp\\S*";

    public static final String mobileUrl = "http://insight.baidu.com/base/search/trend/general\\?id=\\d*&dateType=30&filterType=1&source=2&callback=_jsonp\\S*";

    private static final String site = "zhishu";

    private static final String DOMAIN = "baidu";

    private static final String REQUEST_AGAIN_TAG = DOMAIN + "_request_again";

    @Override
    public String domain() {
        return "baidu";
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(beginUrl);
        addUrlRegular(Url);

    }

    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord crawlerRequestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> crawlerRecords = new ArrayList<>();
        for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
            String url = supportSourceRecord.getHttpRequest().getUrl();
            if (url.contains("keys")) {
                initKeyword(crawlerRequestRecord, supportSourceRecord, crawlerRecords);
            }
        }
        return crawlerRecords;
    }


    private void initKeyword(CrawlerRequestRecord crawlerRequestRecord, CrawlerRequestRecord supportSourceRecord, List<CrawlerRecord> crawlerRecords) {
        HttpPage internalDownloadPage = supportSourceRecord.getInternalDownloadPage();
        Map<String, Object> extras = crawlerRequestRecord.getHttpRequest().getExtras();
        String searchSourceUrl = (String) extras.get("url");
        if (StringUtils.isBlank(searchSourceUrl)) {
            log.error("search kw source url can not null!");
            return;
        }
        Json json = internalDownloadPage.getJson();
        String msg = json.jsonPath($_type + ".msg").get();
        if (!"success".equals(msg)) {
            log.error("keyword page [{}] download error!", internalDownloadPage.getRequest().getUrl());
            return;
        }

        List<String> all = json.jsonPath($_type + ".content").all();
        for (String data : all) {
            JSONObject jsonObject = JSONObject.parseObject(data);
            String keyword = jsonObject.getString("keyword");

            try {
                String url = String.format(searchSourceUrl, URLEncoder.encode(keyword, "UTF-8"));
                CrawlerRequestRecord turnRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(url)
                        .releaseTime(System.currentTimeMillis())
                        .notFilterRecord()
                        .copyBizTags()
                        .copyResultTags()
                        .build();

                turnRecord.getHttpRequest().addExtra("keyword", keyword);
                crawlerRecords.add(turnRecord);
            } catch (UnsupportedEncodingException e) {
                log.error(e.getMessage());
            }
        }

    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag();
        String crawlerSite = categoryTag.getKVTagStrVal("site");
        return crawlerSite.equalsIgnoreCase(site);
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parseLinksList = new ArrayList<>();


        if (httpPage.getStatusCode() != 200 || (!httpPage.isDownloadSuccess())) {
            log.error("download page url == {} error status is {}", httpPage.getRequest().getUrl(), httpPage.getStatusCode());
            //如果没有成功的下载  进行重新下载
            this.requestAgainCrawlerRecord(parseLinksList, crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            return parseLinksList;

        }
        String url = crawlerRequestRecord.getHttpRequest().getUrl();
        if (url.matches(beginUrl)) {
            parseBeginLinks(crawlerRequestRecord, httpPage, parseLinksList);
        }

        return parseLinksList;
    }


    private void parseBeginLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parseLinksList) {
        String url = httpPage.getRequest().getUrl();
        String id = url.substring(url.indexOf("subject") + 8);
        String randomId = RandomStringUtils.randomAlphanumeric(10); //随机生成10位字符串

        String PcAndMobileUrl = "http://insight.baidu.com/base/search/trend/general?id=" + id + "&dateType=30&filterType=1&source=0&callback=_jsonp" + randomId;
        CrawlerRequestRecord PcAndMobileUrlRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .httpUrl(PcAndMobileUrl)
                .recordKey(PcAndMobileUrl)
                .releaseTime(System.currentTimeMillis())
                .httpHead("Referer", "http://index.baidu.com/")
                .copyBizTags()
                .copyResultTags()
                .build();
        PcAndMobileUrlRecord.setNeedWashPage(true);
        PcAndMobileUrlRecord.tagsCreator().bizTags().addCustomKV("need", "need");
        parseLinksList.add(PcAndMobileUrlRecord);


        String PCUrl = "http://insight.baidu.com/base/search/trend/general?id=" + id + "&dateType=30&filterType=1&source=1&callback=_jsonp" + randomId;
        CrawlerRequestRecord PCUrlRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .httpUrl(PCUrl)
                .recordKey(PCUrl)
                .releaseTime(System.currentTimeMillis())
                .httpHead("Referer", "http://index.baidu.com/")
                .copyBizTags()
                .copyResultTags()
                .build();
        PCUrlRecord.setNeedWashPage(true);
        PCUrlRecord.tagsCreator().bizTags().addCustomKV("need", "need");
        parseLinksList.add(PCUrlRecord);


        String mobileUrl = "http://insight.baidu.com/base/search/trend/general?id=" + id + "&dateType=30&filterType=1&source=2&callback=_jsonp" + randomId;
        CrawlerRequestRecord mobileUrlRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .httpUrl(mobileUrl)
                .recordKey(mobileUrl)
                .releaseTime(System.currentTimeMillis())
                .httpHead("Referer", "http://index.baidu.com/")
                .copyBizTags()
                .copyResultTags()
                .build();
        mobileUrlRecord.setNeedWashPage(true);
        mobileUrlRecord.tagsCreator().bizTags().addCustomKV("need", "need");
        parseLinksList.add(mobileUrlRecord);

    }


    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {

        if (crawlerRequestRecord.tagsCreator().bizTags().hasKVTag("need")) {
            return washInfo(crawlerRequestRecord, httpPage);
        }
        return null;
    }

    private List<CrawlerData> washInfo(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();

        String s = httpPage.getHtml().get();
        String text = s.substring(s.indexOf("("), s.lastIndexOf("</body>"));
        text = text.replaceAll("&amp;quot;", "\"");
        text = text.substring(text.indexOf("(") + 1, text.lastIndexOf(")"));
        JSONObject jsonObject = JSONObject.parseObject(text);
        String data = jsonObject.getString("data");

        JSONObject dataJson = JSONObject.parseObject(data);
        String keyWord = dataJson.getString("name");
        String results = dataJson.getString("results");


        JSONObject resultJson = JSONObject.parseObject(results);
        String current = resultJson.getString("current");
        String currDate = resultJson.getString("currDate");
        currDate = currDate.substring(currDate.indexOf("~") + 1);
        long time = 0;
        SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd");
        try {
            Date date = simpleDateFormat.parse(currDate);
            time = date.getTime();
            time = time - 7 * 24 * 60 * 60 * 1000;

        } catch (ParseException e) {
            e.printStackTrace();
        }


        List list = JSONObject.parseObject(current, List.class);

        for (int i = 0; i < list.size(); i++) {
            Object o = list.get(i);
            JSONObject listJson = JSONObject.parseObject(o.toString());
            String releaseTime = listJson.getString("item");
            String value = listJson.getString("value");
            String url = httpPage.getRequest().getUrl();
            String id = url.substring(url.indexOf("id=") + 3, url.indexOf("&dateType"));


            if (time < Long.parseLong(releaseTime)) {
                if (url.matches(PcAndMobileUrl)) {
                    CrawlerData PcAndMobileData = CrawlerData.builder()
                            .data(crawlerRequestRecord, httpPage)
                            .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), article.enumVal(), id))
                            .url(url)
                            .releaseTime(Long.parseLong(releaseTime))
                            .addContentKV(Tag_Field_Keword, keyWord)
                            .addContentKV(Field_Index_Value, value)
                            .addContentKV(Field_Index_Terminal_Type, "PC+移动")
                            .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                            .build();
                    crawlerDataList.add(PcAndMobileData);
                }


                if (url.matches(PCUrl)) {
                    CrawlerData PCData = CrawlerData.builder()
                            .data(crawlerRequestRecord, httpPage)
                            .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), article.enumVal(), id))
                            .url(url)
                            .releaseTime(Long.parseLong(releaseTime))
                            .addContentKV(Tag_Field_Keword, keyWord)
                            .addContentKV(Field_Index_Value, value)
                            .addContentKV(Field_Index_Terminal_Type, "PC")
                            .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                            .build();
                    crawlerDataList.add(PCData);
                }

                if (url.matches(mobileUrl)) {
                    CrawlerData mobileData = CrawlerData.builder()
                            .data(crawlerRequestRecord, httpPage)
                            .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), article.enumVal(), id))
                            .url(url)
                            .releaseTime(Long.parseLong(releaseTime))
                            .addContentKV(Tag_Field_Keword, keyWord)
                            .addContentKV(Field_Index_Value, value)
                            .addContentKV(Field_Index_Terminal_Type, "移动")
                            .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                            .build();
                    crawlerDataList.add(mobileData);
                }
            }
        }

        return crawlerDataList;
    }


    @Override
    public void afterExecute(CrawlerRecordContext crawlerRecordContext) {

    }

    /*
     * 重新下载
     * */
    private void requestAgainCrawlerRecord(List<CrawlerRequestRecord> crawlerRequestRecords, CrawlerRequestRecord crawlerRecord) {
        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        int count = 0;
        if (crawlerBusinessTags.hasKVTag(REQUEST_AGAIN_TAG)) {
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag(REQUEST_AGAIN_TAG).getVal();
            if (count >= 10) {
                log.error("pcauto download page the number of retries exceeds the limit" +
                        ",request url {}", crawlerRecord.getHttpRequest().getUrl());
                return;
            }
        }

        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        CrawlerRequestRecord crawlerRequestRecord = null;
        LabelTag type = crawlerRecord.tagsCreator().requestTags().getCategoryTag().getLabelTags().get("turn_page_item_request");
        if (type == null) {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .notFilterRecord()
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        } else {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        }

        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.getHttpRequest().setHeaders(crawlerRecord.getHttpRequest().getHeaders());
        crawlerRequestRecord.getHttpRequest().setExtras(crawlerRecord.getHttpRequest().getExtras());
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(REQUEST_AGAIN_TAG, ++count);
        crawlerRequestRecords.add(crawlerRequestRecord);
    }

}
