package com.jcl.recruit.crawler.service;

import com.alibaba.fastjson.JSONObject;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.jcl.core.common.page.PageResult;
import com.jcl.core.constants.ChannelSourceConstant;
import com.jcl.core.constants.CrawlerConstant;
import com.jcl.core.utils.ChannelUtils;
import com.jcl.core.utils.CollectionUtils;
import com.jcl.core.utils.DateUtils;
import com.jcl.core.utils.StringUtils;
import com.jcl.core.web.model.CrawlerUserToken;
import com.jcl.recruit.crawler.logic.CrawlerDownloadResumeFeedbackLogic;
import com.jcl.recruit.crawler.vo.CrawlerResumeCompleteParam;
import com.jcl.recruit.crawler.logic.CrawlerDownloadTaskLogic;
import com.jcl.recruit.crawler.model.CrawlerDownloadTask;
import com.jcl.recruit.resume.logic.ResumePersInfoLogic;
import com.jcl.recruit.resume.model.ResumePersInfo;
import com.jcl.recruit.crawler.vo.CrawlerResumeDownloadListParam;
import com.jcl.recruit.wechat.logic.WechatPushCrawlerLogic;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.util.*;

/**
 * @ClassName CrawlerResumeController

 * @Author yyliu
 * @Date 2018/10/22 14:51
 * @Version 1.0
 **/
@Service
public class CrawlerResumeService {

    @Autowired
    private CrawlerDownloadTaskLogic crawlerDownloadTaskLogic;

    @Autowired
    private CrawlerDownloadResumeFeedbackLogic crawlerDownloadResumeFeedbackLogic;

    @Autowired
    private ResumePersInfoLogic resumePersInfoLogic;

    @Autowired
    private WechatPushCrawlerLogic wechatPushCrawlerLogic;

    public PageResult downloadResumeList(CrawlerResumeDownloadListParam param) {

        if (StringUtils.isEmpty(param.getCorpCode())) {
            param.setCorpCode(null);
        }
        if (StringUtils.isEmpty(param.getSource())) {
            param.setSource(null);
        }
        List<Map<String, Object>> tempList = new ArrayList<>();
        int pageSize = param.getPageSize();
        PageHelper.startPage(param.getCurPage(), pageSize);
        Integer[] statusList = {CrawlerConstant.TASK_STATUS_0};
        List<CrawlerDownloadTask> taskList = crawlerDownloadTaskLogic.findListByDown(param.getCorpCode(), param.getSource(),
                statusList);
        PageInfo info = new PageInfo(taskList);

        if (CollectionUtils.isNotEmpty(taskList)) {
            List<Integer> ids = new ArrayList<>();
            for (CrawlerDownloadTask crawlerDownloadTask : taskList) {
                ids.add(crawlerDownloadTask.getId());
                Map<String, Object> data = contractMap(crawlerDownloadTask);
                tempList.add(data);
            }
        }

        PageResult result = new PageResult(param.getCurPage(), pageSize, info.getTotal(), tempList);
        return result;
    }


    public int completeResume(String corpCode,
                              String userId,
                              String resumeId,
                              CrawlerResumeCompleteParam entity) {
        // 首先判断 下载简历是否存在
        Integer _resumeId = Integer.valueOf(resumeId);
        ResumePersInfo persInfo = resumePersInfoLogic.findOne(corpCode, _resumeId);
        if (null == persInfo) {
            return 0;
        }
        // 获取下载任务
        List<CrawlerDownloadTask> taskList = crawlerDownloadTaskLogic.findListByResumeId(corpCode, _resumeId);
        if(CollectionUtils.isNotEmpty(taskList)){
            CrawlerDownloadTask task = taskList.get(0);
            Integer _status = task.getStatus();
            // 判断任务是否已经成功下载，如果是则return
            if(CrawlerConstant.TASK_STATUS_1 == _status){
                return 0;
            }
            // 判断返回的状态
            Integer status = CrawlerConstant.codeStatus.get(entity.getCode());
            if(null != status){
                // 状态成功，则更新简历
                if (CrawlerConstant.CODE_DOWN_SUCCESS.equals(entity.getCode())) {
                    int number = 0;
                    if (null != entity && StringUtils.hasLength(entity.getName())) {
                        persInfo.setName(entity.getName());
                        number++;
                    }
                    if (null != entity && StringUtils.hasLength(entity.getMobile())) {
                        persInfo.setMobile(entity.getMobile());
                        number++;
                    }
                    if (null != entity && StringUtils.hasLength(entity.getEmail())) {
                        persInfo.setEmail(entity.getEmail());
                        number++;
                    }
                    if (number > 0) {
                        resumePersInfoLogic.updateByPrimaryKey(persInfo);
                    }
                }
                // 更新简历下载情况
                String date = DateUtils.format(new Date(), DateUtils.DATE_FORMAT_YMDHMS);
                crawlerDownloadTaskLogic.updateTimer(date, status, corpCode, resumeId);
                List<Integer> statusList =
                        crawlerDownloadResumeFeedbackLogic.getStatusListByCrawlerId(corpCode, task.getId());
                if(!statusList.contains(status)){
                    // 微信推送
                    wechatPushCrawlerLogic.pushDownloadResumeMessage(corpCode, userId, entity.getCode(), _resumeId);
                    // 保存发送状态
                    crawlerDownloadResumeFeedbackLogic.add(corpCode, task.getId(), _resumeId, status);
                }
            }

        }
        return 0;
    }

    private Map<String, Object> contractMap(CrawlerDownloadTask crawlerDownloadTask) {
        Map<String, Object> data = new HashMap<>();
        data.put("taskType", "buy-resume");
        String externalMsg = ChannelUtils.decodeResumeExternalMsg(crawlerDownloadTask.getExternalMsg());
        data.put("buyResumeContext", JSONObject.parse(externalMsg));
        data.put("siteCode", ChannelSourceConstant.crawlerSourceMap.get(crawlerDownloadTask.getChannelType()));
        data.put("tenantId", CrawlerUserToken.encodeTenantId(crawlerDownloadTask.getCorpCode(),
                crawlerDownloadTask.getUserId(), crawlerDownloadTask.getResumeId()));
        data.put("corpCode", crawlerDownloadTask.getCorpCode());
        return data;
    }
}
