package com.djhu.newscollector.crawel.base;

import com.djhu.newscollector.core.Holder.JobHolder;
import com.djhu.newscollector.core.dispatch.JobDispatcher;
import com.djhu.newscollector.core.entity.Response;
import com.djhu.newscollector.core.job.BaseJob;
import com.djhu.newscollector.core.worker.Worker;
import com.djhu.newscollector.crawel.core.Url;
import com.djhu.newscollector.crawel.imp.dispacth.JmsJobDispatcher;
import com.djhu.newscollector.crawel.imp.holder.RedisJobHolder;
import com.google.common.util.concurrent.RateLimiter;
import org.apache.activemq.broker.scheduler.Job;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

import java.awt.*;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;

/**
 * Created by zw on 2018/2/11.
 */
public class Crawler implements Worker{

    private static Logger logger = LogManager.getLogger(Crawler.class.getName());

    private JobDispatcher jobDispatcher;

    private JobHolder jobHolder;

    private Worker worker;
    private RateLimiter rateLimiter = RateLimiter.create(2);

    public Response process(BaseJob baseJob) {
        while (true) {
            try {
                rateLimiter.acquire();
                BaseJob job = null;
                if (job == null){
                    job = jobDispatcher.dispatcher();
                }
                //BaseJob job = jobDispatcher.dispatcher();
                if (job == null) {
                    logger.info("receive a null msg!!!");
                    continue;
                }
                if (jobHolder.contains(job)) {
                    logger.warn("id has craweled" + job.id());
                    continue;
                }

                jobHolder.add(job);
                logger.debug("job id " + job.id());

                Response response = worker.process(job);
                if (response.success()) {
                    logger.info("job run success!!!" + response.id());
                } else {
                    jobHolder.remove(job);
                    jobDispatcher.retJob(job);
                }
                Thread.sleep(1000);
            }catch (Exception e){
                e.printStackTrace();
            }
        }
    }




    public JobDispatcher getJobDispatcher() {
        return jobDispatcher;
    }

    public void setJobDispatcher(JobDispatcher jobDispatcher) {
        this.jobDispatcher = jobDispatcher;
    }

    public JobHolder getJobHolder() {
        return jobHolder;
    }

    public void setJobHolder(JobHolder jobHolder) {
        this.jobHolder = jobHolder;
    }

    public Worker getWorker() {
        return worker;
    }

    public void setWorker(Worker worker) {
        this.worker = worker;
    }

    public static class PrintWorker implements Worker{

        @Override
        public Response process(BaseJob job) {
            System.out.println(job.name());
            return new Response() {
                @Override
                public String id() {
                    return job.id();
                }

                @Override
                public Exception exception() {
                    return null;
                }

                @Override
                public boolean success() {
                    return true;
                }
            };
        }
    }



    public static void main(String[] args) {
        JmsJobDispatcher dispatcher = new JmsJobDispatcher("failover:(tcp://127.0.0.1:61616)","news");
//                for (int i = 300;i<500;i++){
//            Crawler.PrintJob printJob = new Crawler.PrintJob(String.valueOf(i));
        for (int i = 1;i<101;i++){
            Url url = new Url();
            String link = "http://cl.57w2.pw/thread0806.php?fid=16&search=&page=" + i;
            url.setUrl(link);
            dispatcher.retJob(url);
        }

        }


}