package com.jcl.cloud.services.crawler.search.task;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.jcl.cloud.common.service.util.HttpClientUtil;
import com.jcl.cloud.common.service.util.PageOps;
import com.jcl.cloud.services.brain.client.CrawlerBrainClient;
import com.jcl.cloud.services.crawler.search.model.SearchConfigModel;
import com.jcl.cloud.services.search.common.model.ZlzpBuyResumeModel;
import com.jcl.cloud.services.crawler.search.service.RedisService;
import com.jcl.cloud.services.crawler.search.util.*;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.cookie.Cookie;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.BasicCookieStore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.text.SimpleDateFormat;
import java.util.*;

@Service
public class ZlzpSearchTaskHandler implements SearchTaskHandler {

    private Logger logger = LoggerFactory.getLogger(ZlzpSearchTaskHandler.class);

    @Autowired
    private RedisService redisService;
    @Autowired
    private CrawlerBrainClient crawlerBrainClient;

    @Override
    public void execute(String crawlerId) {

        SearchConfigModel searchConfigModel = null;
        try {
            String searchConfig = this.redisService.hget("unicrawler:task:info:" + crawlerId, "search-config");

            String phase = "";

            if(StringUtils.isBlank(searchConfig)){
                phase = "GRAB_URL";
                searchConfigModel = new SearchConfigModel();


            }else {
                searchConfigModel = JSON.toJavaObject(JSON.parseObject(searchConfig),SearchConfigModel.class);
                phase = searchConfigModel.getPhase();

            }


            if ("GRAB_URL".equalsIgnoreCase(phase)) {
                this.grabUrl(crawlerId,searchConfigModel);
                phase = "URL_TO_SEARCH";
            } else if ("URL_TO_SEARCH".equalsIgnoreCase(phase)) {
                this.urlToSearch(crawlerId,searchConfigModel);
            }

            searchConfigModel.setPhase(phase);
        } catch (Exception e) {
            e.printStackTrace();
        } finally {

            if(searchConfigModel.isAlive()){

                //判断一下队列的长度
                long len =  this.redisService.llen("crawler:brain:search:tasks:" + crawlerId);
                //上传配置
                if(len == 0){//如果url使用完了
                    searchConfigModel.setPhase("GRAB_URL");
                    searchConfigModel.setConfig(null);
                }
                this.redisService.hset("unicrawler:task:info:" + crawlerId, "search-config",JSON.toJSONString(searchConfigModel));



                long currentTime = System.currentTimeMillis();
                long nextExecuteTime = currentTime + 20000 ;
                this.redisService.hset("unicrawler:task:info:"+crawlerId,"nextExecuteTime",String.valueOf(nextExecuteTime));
                //释放任务
                this.redisService.lpush("unicrawler:tasks:all", crawlerId);

            }else {
                //上报cookie失效
                this.crawlerBrainClient.reportCookieFail(crawlerId);
                logger.info("智联招聘   上报cookie失效的爬虫crawlerId = " + crawlerId);
            }


        }

    }




    private void grabUrl(String crawlerId,SearchConfigModel searchConfigModel) {

        int cookieFailTimes = 0;
        try {
            String cookieStoreStr = this.redisService.hget("unicrawler:task:info:" + crawlerId, "cookieStore");

            BasicCookieStore cookieStore = SerializeUtils.serializeToObj(cookieStoreStr);
            HttpClientContext context = HttpClientContext.create();
            context.setCookieStore(cookieStore);

            String zp_route_meta = "";


            List<Cookie> cookieList = cookieStore.getCookies();
            for (Cookie c : cookieList) {
                if (c.getName().equals("zp-route-meta")) {
                    zp_route_meta = c.getValue();
                    break;
                }
            }


            String Cookie = "";
            for (Cookie c : cookieList) {
                Cookie = Cookie + "; " + c.getName() + "=" + c.getValue();
            }
            Map<String, String> map = new HashMap<>();
            map.put("zp_route_meta", zp_route_meta);
            map.put("Cookie", Cookie.substring(1));


            //获取关键词
            String keyWordResult = HttpClientUtil.doGet("http://www.jucailin.com/api/searchresume/keywords?from=zlzp");
            logger.info("获取到的搜索关键词：" + keyWordResult);
            String keyWord = JSON.parseObject(keyWordResult).getString("keywords");

            if (StringUtils.isNotBlank(keyWord)) {
                //进行搜索
                String result1 = PageOps.getSearchv2Result(Cookie, context);
                logger.info("result1===>" + result1);


                for (int i = 1; i <= 5; i++) {

                    logger.info("智联招聘网站grabUrl爬取接口连续失败的次数：" + (++cookieFailTimes));
                    logger.info(String.format("开始获取第%s页的简历...",i));

                    try {
                        Thread.sleep((long) (Math.random() * 10) * 1000);

                        Condition condition = new Condition();

                        Date date = new Date();
                        SimpleDateFormat sdf = new SimpleDateFormat("yyMMdd");
                        String endDate = sdf.format(date);
                        Calendar calendar = Calendar.getInstance();
                        calendar.setTime(date);
                        calendar.add(Calendar.MONTH, -1);
                        //calendar.add(Calendar.DATE,-7);
                        Date newDate = calendar.getTime();
                        String startDate = sdf.format(newDate);
                        String S_DATE_MODIFIED = startDate + "," + endDate;
                        condition.setS_DATE_MODIFIED(S_DATE_MODIFIED);
                        condition.setS_DISCLOSURE_LEVEL(2);
                        condition.setS_EDUCATION("4,1");
                        condition.setS_ENGLISH_RESUME("1");
                        condition.setS_EXCLUSIVE_COMPANY("上海泛微网络科技股份有限公司");
                        condition.setS_KEYWORD_JOBNAME(keyWord);
                        condition.setIsrepeat(1);
                        condition.setRows(60);
                        condition.setSort("complex");
                        condition.setStart(i);
                        condition.setS_CURRENT_CITY("538");




                        Map smap = new HashMap<>();
                        String inJson = JSON.toJSONString(condition);
                        smap.put("inJson", inJson);
                        smap.put("zp_route_meta", zp_route_meta);
                        smap.put("Cookie", Cookie.substring(1));
                        String result2 = PageOps.getResumeListV2(smap, context);
                        logger.info("result2===>" + result2);


                        JSONArray dataListJA = JSON.parseObject(result2).getJSONObject("data").getJSONArray("dataList");

                        cookieFailTimes = 0;

                        for (Object o : dataListJA) {
                            JSONObject jo = (JSONObject) o;
                            String resumeId = jo.getString("id");
                            String k = jo.getString("k");
                            String t = jo.getString("t");


                            String url = keyWord + "@=" + resumeId + "@=" + resumeId + "_1_1%3B" + k + "%3B" + t;
                            //上传resumeNo
                            this.redisService.lpush("crawler:brain:search:tasks:" + crawlerId,url);
                            Thread.sleep(300);
                        }

                    } catch (Exception e) {
                       logger.error("智联招聘  翻页发生异常：",e);
                    }
                }
            }
        } catch (Exception e) {
           logger.error("智联招聘  grabUrl发生异常：",e);
        }finally {

            if(cookieFailTimes >=4){
               searchConfigModel.setAlive(false);

            }else {
                searchConfigModel.setAlive(true);
            }
        }
    }




    private void urlToSearch(String crawlerId,SearchConfigModel searchConfigModel){


        int cookieFailTimes = 0;
        try {
            String accountKey = this.redisService.hget("unicrawler:task:info:" + crawlerId, "accountKey");
            String cookieStoreStr = this.redisService.hget("unicrawler:task:info:" + crawlerId, "cookieStore");

            BasicCookieStore cookieStore = SerializeUtils.serializeToObj(cookieStoreStr);
            HttpClientContext context = HttpClientContext.create();
            context.setCookieStore(cookieStore);

            String zp_route_meta = "";


            List<Cookie> cookieList = cookieStore.getCookies();
            for (Cookie c : cookieList) {
                if (c.getName().equals("zp-route-meta")) {
                    zp_route_meta = c.getValue();
                    break;
                }
            }


            String Cookie = "";
            for (Cookie c : cookieList) {
                Cookie = Cookie + "; " + c.getName() + "=" + c.getValue();
            }
            Map<String, String> map = new HashMap<>();
            map.put("zp_route_meta", zp_route_meta);
            map.put("Cookie", Cookie.substring(1));


            for(int n = 0;n<=20;n++){


                String url =  this.redisService.rpop("crawler:brain:search:tasks:" + crawlerId);

                if (StringUtils.isNotBlank(url)) {

                    try {

                        logger.info("智联招聘网站urlToSearch爬取接口连续失败的次数：" + (cookieFailTimes++));

                        String keyword = url.split("@=")[0];

                        String resumeNo = url.split("@=")[1];

                        String resumeNo_k_t = url.split("@=")[2];


                        Thread.sleep((long) (Math.random() * 5) * 1000);
                        /*JSONObject jo = new JSONObject();
                        jo.fluentPut("resumeNo", resumeNo);

                        Map smap = new HashMap<>();
                        String inJson = jo.toJSONString();
                        smap.put("inJson", inJson);
                        smap.put("zp_route_meta", zp_route_meta);
                        smap.put("Cookie", Cookie.substring(1));
                        String result3 = PageOps.doMarkRead(smap, context);
                        logger.info("result3===>" + result3);*/


                        Thread.sleep((long) (Math.random() * 5) * 1000);
                        //https://rdapi.zhaopin.com/rd/resume/detail?_=1531792700737&resumeNo=8oefM1pcdirV5VehAJogEg_1_1%3B347874DCD7601271260E9E9985A720B2%3B1531792171474
                        url = "https://rd5.zhaopin.com/api/rd/resume/detail?_=" + new Date().getTime() + "&resumeNo=" + resumeNo_k_t;
                        Map smap = new HashMap<>();
                        smap.put("url", url);
                        smap.put("zp_route_meta", zp_route_meta);
                        smap.put("Cookie", Cookie.substring(1));
                        String Referer = "https://rd5.zhaopin.com/resume/detail?keyword=java&resumeNo=" + resumeNo_k_t + "&openFrom=1";
                        smap.put("Referer",Referer);
                        String result4 = PageOps.getResumeDetail(smap, context);
                        logger.info("result4===>" + result4);
                        Thread.sleep((long) (Math.random() * 5) * 1000);
                        //解析上传
                        Map<String, String> argsMap = new HashMap<>();
                        JSONObject dataJO = JSON.parseObject(result4).getJSONObject("data");
                        JSONObject detailJO = dataJO.getJSONObject("detail");
                        JSONObject candidateJO = dataJO.getJSONObject("candidate");

                        String realname = candidateJO.getString("userName");
                        argsMap.put("realname", realname);
                        String sex = detailJO.getString("Gender");
                        argsMap.put("sex", StringUtil.code2SexZlzp(sex));


                        String mobile = candidateJO.getString("mobilePhone");
                        argsMap.put("mobile", mobile);

                        String email = candidateJO.getString("email");
                        argsMap.put("email", email);


                        String birthDay = candidateJO.getString("birthDay");
                        String birthMonth = candidateJO.getString("birthMonth");
                        String birthYear = candidateJO.getString("birthYear");
                        String birthday = StringUtil.ymd2BirthDayZlzp(birthYear, birthMonth, birthDay);
                        argsMap.put("birthday", birthday);


                        String update_date = candidateJO.getString("dateModified");
                        argsMap.put("update_date", new SimpleDateFormat("yyyy-MM-dd").format(new Date(Long.valueOf(update_date))));


                        String current_salary = detailJO.getString("CurrentSalary");
                        argsMap.put("current_salary", current_salary);


                        String DesiredSalaryScope = detailJO.getString("DesiredSalaryScope");
                        Map<String, String> salaryMap = StringUtil.salary2MapZlzp(DesiredSalaryScope);
                        String desire_salary_start = salaryMap.get("desire_salary_start");
                        argsMap.put("desire_salary_start", desire_salary_start);
                        String desire_salary_end = salaryMap.get("desire_salary_end");
                        argsMap.put("desire_salary_end", desire_salary_end);


                        String evaluate = detailJO.getString("CommentContent");
                        argsMap.put("evaluate", evaluate);

                        String arrival_info = detailJO.getString("AvailableAfterDays");
                        argsMap.put("arrival_info", arrival_info);


                        String hOUKOUProvinceId = candidateJO.getString("hOUKOUProvinceId");
                        String hUKOUCityId = candidateJO.getString("hUKOUCityId");
                        String native_place = "";
                        argsMap.put("native_place", native_place);


                        String workyear = candidateJO.getString("workYearsRangeId");
                        argsMap.put("workyear", StringUtil.workYearsRangeIdToWorkYearsZlzp(workyear));


                        String work_last_company = detailJO.getString("CurrentCompanyName");
                        argsMap.put("work_last_company", work_last_company);


                        String work_last_position = detailJO.getString("CurrentJobTitle");
                        argsMap.put("work_last_position", work_last_position);

                        String CurrentEducationLevel = detailJO.getString("CurrentEducationLevel");
                        String edu_last_education = StringUtil.code2EduLevel(CurrentEducationLevel);
                        argsMap.put("edu_last_education", edu_last_education);


                        String edu_last_school = detailJO.getString("GraduatedFrom");
                        argsMap.put("edu_last_school", edu_last_school);

                        String edu_last_major = detailJO.getString("CurrentMajorName");
                        argsMap.put("edu_last_major", edu_last_major);

                        String source_sub = "zhaopin";
                        argsMap.put("source_sub", source_sub);

                        String current_city = candidateJO.getString("cityId");
                        argsMap.put("current_city", ZlzpUtil.doCode2Name(current_city, "1"));

                        String current_address = candidateJO.getString("Address1");
                        argsMap.put("current_address", current_address);


                        String work_detail = detailJO.getString("WorkExperience");
                        argsMap.put("work_detail", work_detail);

                        String edu_detail = detailJO.getString("EducationExperience");
                        argsMap.put("edu_detail", edu_detail);

                        String project_detail = detailJO.getString("ProjectExperience");
                        argsMap.put("project_detail", project_detail);



                        Referer = "https://rd5.zhaopin.com/resume/detail?keyword="+keyword+"&resumeNo=" + resumeNo_k_t + "&openFrom=1" ;

                        Map buyResumeContextMap = new HashMap();
                        buyResumeContextMap.put("Referer",Referer);
                        buyResumeContextMap.put("resumeNo_k_t",resumeNo_k_t);
                        buyResumeContextMap.put("resumeNo",resumeNo);
                        buyResumeContextMap.put("accountKey",accountKey);

                        argsMap.put("external_msg",JSON.toJSONString(buyResumeContextMap));


                        logger.info("external_msg = " + JSON.toJSONString(buyResumeContextMap));



                        cookieFailTimes = 0;

                        //上传
                        String result = HttpClientUtil.doPostJson("http://www.jucailin.com/api/searchresume/save", JSON.toJSONString(argsMap));
                        logger.info("智联招聘上传简历信息的返回结果为：" + result);

                        String code = JSON.parseObject(result).getString("code");
                        if ("200".equals(code)) {
                            logger.info("上传数据成功");

                            String resume_id = String.valueOf(JSONObject.parseObject(result).getJSONObject("data").getInteger("id"));
                            logger.info("智联招聘的resume_id = " + resume_id);

                            //上传工作经历
                            List<Map<String, String>> worksList = StringUtil.wd2MapZlzp(work_detail);
                            for (int i = 0; i < worksList.size(); i++) {
                                Map workMap = worksList.get(i);
                                workMap.put("resume_id", resume_id);
                                HttpClientUtil.doPostJson("http://www.jucailin.com/api/searchresume/worksave", JSON.toJSONString(workMap));
                            }

                            //上传教育经历
                            List<Map<String, String>> edusList = StringUtil.edu2MapZlzp(edu_detail);
                            for (int i = 0; i < edusList.size(); i++) {
                                Map eduMap = edusList.get(i);
                                eduMap.put("resume_id", resume_id);
                                HttpClientUtil.doPostJson("http://www.jucailin.com/api/searchresume/edusave", JSON.toJSONString(eduMap));
                            }


                            //上传项目经历
                            List<Map<String, String>> projectsList = StringUtil.pd2MapZlzp(project_detail);
                            for (int i = 0; i < projectsList.size(); i++) {
                                Map projectMap = projectsList.get(i);
                                projectMap.put("resume_id", resume_id);
                                HttpClientUtil.doPostJson("http://www.jucailin.com/api/searchresume/projectsave",JSON.toJSONString(projectMap) );
                            }
                        }
                        Thread.sleep((long) (Math.random() * 5) * 1000);
                    } catch (Exception e) {
                        if(cookieFailTimes >= 6){
                            break;
                        }

                        logger.error(String.format("智联招聘搜索爬取单个简历发生异常：%s", e.getMessage()), e);
                    }
                } else {

                    logger.info("智联招聘   无url爬取详细信息");
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {

            if(cookieFailTimes >= 6){

                searchConfigModel.setAlive(false);
            }else {
                searchConfigModel.setAlive(true);
            }


        }
    }
}
