package com.jcl.recruit.crawler.service;

import com.alibaba.fastjson.JSONObject;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.jcl.core.constants.ChannelSourceConstant;
import com.jcl.core.utils.CollectionUtils;
import com.jcl.core.utils.DateUtils;
import com.jcl.core.utils.StringUtils;
import com.jcl.recruit.channel.vo.ChannelResumeParamInfo;
import com.jcl.core.common.page.PageResult;
import com.jcl.core.security.CrawlerUserToken;
import com.jcl.recruit.crawler.logic.CrawlerDownloadTaskLogic;
import com.jcl.recruit.crawler.model.CrawlerDownloadTask;
import com.jcl.recruit.resume.logic.ResumeLogic;
import com.jcl.recruit.resume.model.Resume;
import com.jcl.recruit.resume.model.ResumeExternalMsg;
import com.jcl.recruit.wechat.logic.WechatPushCrawlerLogic;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.util.*;

/**
 * @ClassName CrawlerResumeController
 * @Description TODO
 * @Author yyliu
 * @Date 2018/10/22 14:51
 * @Version 1.0
 **/
@Service
public class CrawlerResumeService {

    @Autowired
    private CrawlerDownloadTaskLogic crawlerDownloadTaskLogic;

    @Autowired
    private ResumeLogic              resumeLogic;

    @Autowired
    private WechatPushCrawlerLogic   wechatPushCrawlerLogic;

    public PageResult downloadResumeList(String corpCode, String source, Integer pageSize){

        if(StringUtils.isEmpty(corpCode)){
            corpCode = null;
        }
        if(StringUtils.isEmpty(source)){
            source = null;
        }
        List<Map<String, Object>> tempList = new ArrayList<>();
        pageSize = null == pageSize ? 50 : pageSize;
        PageHelper.startPage(1, pageSize);

        List<CrawlerDownloadTask> taskList = crawlerDownloadTaskLogic.findListByDownLoad( corpCode, source,
                null);
        PageInfo info = new PageInfo(taskList);

        if(CollectionUtils.isNotEmpty(taskList)){
            List<Integer> ids = new ArrayList<>();
            for(CrawlerDownloadTask crawlerDownloadTask: taskList){
                ids.add(crawlerDownloadTask.getId());
                Map<String, Object> data = contractMap(crawlerDownloadTask);
                tempList.add(data);
            }
            crawlerDownloadTaskLogic.update(ids, 1);
        }

        PageResult result = new PageResult(1, pageSize, info.getTotal(), tempList);
        return result;
    }


    public int completeResume(String corpCode,
                              String userId,
                              String resumeId,
                              ChannelResumeParamInfo entity){
        Integer _resumeId = Integer.valueOf(resumeId);
        Resume resume = resumeLogic.selectPrimarykeyByEntity(_resumeId);

        if(null != resume){
            int number = 0;
            if(null != entity && StringUtils.hasLength(entity.getName())){
                resume.setRealname(entity.getName());
                number ++;
            }
            if(null != entity && StringUtils.hasLength(entity.getMobile())){
                resume.setMobile(entity.getMobile());
                number ++;
            }
            if(null != entity && StringUtils.hasLength(entity.getEmail())){
                resume.setEmail(entity.getEmail());
                number ++;
            }
            if(number > 0){
                resumeLogic.updateByPrimaryKeySelective(resume);
            }
            String date = DateUtils.format(new Date(), DateUtils.DATE_FORMAT_YMDHMS);
            crawlerDownloadTaskLogic.updateTimer(date, corpCode, resumeId);

            // 微信推送
            wechatPushCrawlerLogic.pushDownloadResumeMessage(corpCode, userId, _resumeId);
        }

        return 0;
    }
    private Map<String, Object> contractMap(CrawlerDownloadTask crawlerDownloadTask){
        Map<String, Object> data = new HashMap<>();
        data.put("taskType", "buy-resume");
        String externalMsg = ResumeExternalMsg.decode(crawlerDownloadTask.getExternalMsg());
        data.put("buyResumeContext", JSONObject.parse(externalMsg));
        data.put("siteCode", ChannelSourceConstant.crawlerSourceMap.get(crawlerDownloadTask.getChannelType()));
        data.put("tenantId", CrawlerUserToken.encodeTenantId(crawlerDownloadTask.getCorpCode(),
                crawlerDownloadTask.getUserId(), crawlerDownloadTask.getResumeId()));
        return data;
    }
}
