package com.jcl.elasticsearch.thread;

import com.jcl.core.utils.HttpRequest;
import com.jcl.elasticsearch.core.thread.ResumeCrawlerContext;
import com.jcl.elasticsearch.core.utils.CrawlerConfigUtils;
import com.jcl.elasticsearch.core.web.SpringContext;
import com.jcl.elasticsearch.resume.logic.RecruitElasticLogic;
import com.jcl.elasticsearch.resume.logic.ResumeCommonLogic;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

/**
 * @ClassName ResumeIntoElasticTask
 * @Description TODO
 * @Author yyliu
 * @Date 2018/11/13 17:33
 * @Version 1.0
 **/
public class ResumeDownloadTask {

    private static  String url = CrawlerConfigUtils.downloadUrl();

    public static void start(Map<String, Object> context) {
        ResumeDownloadThread thread = new ResumeDownloadThread(context);
        thread.start();
    }


    public static class ResumeDownloadThread extends Thread{

        private static final Logger logger = LoggerFactory.getLogger(ResumeDownloadThread.class);

        @Override
        public void run() {
            logger.debug("download start ...");
            handle();
            logger.debug("download end . ");
        }

        private void handle() {

            String message = HttpRequest.httpPost(url, HttpRequest.JSON_CONTENT_TYPE, this.context);
            System.out.println(message);
        }


        public ResumeDownloadThread(Map<String, Object> data) {
            this.context = context;
        }

        public Map<String, Object> getContext() {
            return context;
        }

        public void setContext(Map<String, Object> context) {
            this.context = context;
        }

        private Map<String, Object> context;
    }


}
