package com.jcl.recruit.resume.service;

import com.jcl.core.security.CrawlerUserToken;
import com.jcl.core.utils.CollectionUtils;
import com.jcl.recruit.channel.constants.ChannelDownloadConstants;
import com.jcl.recruit.channel.logic.ChannelNetLogic;
import com.jcl.recruit.channel.model.ChannelNet;
import com.jcl.recruit.core.constants.ResumeLogType;
import com.jcl.recruit.core.web.utils.RequestUtils;
import com.jcl.recruit.crawler.logic.CrawlerDownloadTaskLogic;
import com.jcl.recruit.crawler.model.CrawlerDownloadTask;
import com.jcl.recruit.log.logic.LoggerProxy;
import com.jcl.recruit.resume.logic.ResumeBasicInfoLogic;
import com.jcl.recruit.resume.logic.ResumeTransferLogic;
import com.jcl.recruit.resume.model.ResumeBasicInfo;
import com.jcl.recruit.server.elastic.SearchResumeServer;
import com.jcl.recruit.server.elastic.vo.SearchResumeSimpleResultEntity;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * @ClassName ResumeDownloadService
 * @Description TODO
 * @Author yyliu
 * @Date 2018/9/17 11:03
 * @Version 1.0
 **/
@Service
public class ResumeDownloadService {
    private static final Logger logger = LoggerFactory.getLogger(ResumeDownloadService.class);
    @Autowired
    private ResumeBasicInfoLogic resumeBasicInfoLogic;

    @Autowired
    private ChannelNetLogic channelNetLogic;

    @Autowired
    private ResumeTransferLogic resumeTransferLogic;

    @Autowired
    private CrawlerDownloadTaskLogic crawlerDownloadTaskLogic;

    @Autowired
    private SearchResumeServer searchResumeServer;

    @Autowired
    private LoggerProxy loggerProxy;


    public Map<String, String> download(Integer resumeId) {

        Map<String, String> dataMap = new HashMap<>();

        Integer userId = Integer.valueOf(RequestUtils.getUserId());
        String corpCode = RequestUtils.getCorpCode();
        Integer searchResumeId = resumeId;

        ResumeBasicInfo resume = resumeBasicInfoLogic.findOne(resumeId);
        if (null == resume) {
            resume = resumeBasicInfoLogic.findOneByExternalKey(corpCode, resumeId.toString());
        } else {
            searchResumeId = Integer.valueOf(resume.getApplyOtherJid());

        }
        SearchResumeSimpleResultEntity entity = searchResumeServer.findSimpleEntity(searchResumeId);
        String channelType = entity.getSourceSub();

        Map<String, ChannelNet> channelMap = new HashMap<>();
        List<ChannelNet> channelNetList = channelNetLogic.selectList(corpCode, null);
        if (CollectionUtils.isNotEmpty(channelNetList)) {
            for (ChannelNet channelNet : channelNetList) {
                channelMap.put(channelNet.getChannelType(), channelNet);
            }
        }
        ChannelNet channelNet = channelMap.get(channelType);
        if (null == channelNet) {
            dataMap.put("code", ChannelDownloadConstants.EXCEPTION_CODE_3001);
            dataMap.put("msg", "当前简历无法关联外网信息");
            return dataMap;
        }

        if (null != resume) {
            // 判断简历是否 已经下载
            List<CrawlerDownloadTask> downloadTaskList = crawlerDownloadTaskLogic.findListByResumeId(corpCode,
                    resume.getId());
            if (CollectionUtils.isNotEmpty(downloadTaskList)) {
                dataMap.put("code", ChannelDownloadConstants.EXCEPTION_CODE_3003);
                dataMap.put("msg", "当前简历已经下载");
                return dataMap;
            }
        } else {
            // 新增
            try {
                resume = resumeTransferLogic.transferResumeById(searchResumeId,
                        ResumeLogType.TYPE_ADD_DOWNLOAD, corpCode);
                // 添加日志
                loggerProxy.addResumeLog(resume.getId(), ResumeLogType.TYPE_ADD_DOWNLOAD);
            } catch (Exception e) {
                logger.error(e.getMessage(), e);
            }
        }


        String tenantId = CrawlerUserToken.encodeTenantId(corpCode, userId, resume.getId());

        // 下载
        Map<String, Object> data = searchResumeServer.downloadResume(searchResumeId, corpCode, tenantId);

        if (null == data) {
            dataMap.put("code", ChannelDownloadConstants.EXCEPTION_CODE_3001);
            dataMap.put("msg", "当前简历无法关联外网信息");
            return dataMap;
        }

        CrawlerDownloadTask paramInfo = new CrawlerDownloadTask();
        paramInfo.setChannelType(channelType);
        paramInfo.setStatus(0);
        paramInfo.setCorpCode(corpCode);
        paramInfo.setResumeId(resume.getId());
        paramInfo.setExternalMsg(data.get("context").toString());
        paramInfo.setUserId(userId);
        paramInfo.setCreateTime(new Date());
        crawlerDownloadTaskLogic.add(paramInfo);


        return dataMap;
    }


}
