/*
 * Author:   W.SH
 */
package com.wgg.crawler.parser;

import java.util.List;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import java.util.logging.Level;
import java.util.logging.Logger;

import org.apache.http.HttpStatus;

import com.wgg.crawler.core.HttpSpiderRequest;
import com.wgg.crawler.core.HttpSpiderResponse;
import com.wgg.crawler.core.SpiderRequest;
import com.wgg.crawler.model.CrawlerDatum;
import com.wgg.crawler.scheduler.BloomDepthScheduler;
import com.wgg.crawler.scheduler.Scheduler;
import com.wgg.crawler.scheduler.SchedulerMonitor;
import com.wgg.crawler.scheduler.thread.ThreadPool;
import com.wgg.crawler.util.Config;
import com.wgg.util.StringUtils;


/**
 *   page页面解析处理<br> 
 *
 *  @author W.SH
 */
public class Fetcher extends FetcherMonitor {
    
    private final static Logger LOG = Logger.getLogger(Fetcher.class.getName());
    
    private Scheduler scheduler;
    private Handler handler;
    private SpiderRequest request;
    private int depth;
    
    private final static  int RUNNING = 0;
    private final static int EXIT = 1;
    protected volatile int runStatus = RUNNING;
    
    private ThreadPool threadPool;
    protected volatile int poolSize = Config.THREAD_COUNT;
    
    protected ReentrantLock reentrantLock = new ReentrantLock();
    protected Condition newUrlCondition = reentrantLock.newCondition();
    
    public Fetcher(Scheduler scheduler, Handler handler) {
        this(scheduler, handler, 0);
    }
    
    public Fetcher(Scheduler scheduler, Handler handler, int depth) {
        this.scheduler = scheduler;
        this.handler = handler;
        this.depth = depth;
    }
    
    public void doFetche() {
        init();
        initScheduler();
        
        while(!Thread.currentThread().isInterrupted() && runStatus == RUNNING) {
            try {
                String url = null;
                int nullCount = 0;
                while((url = scheduler.poll()) == null) { //去队列中取url 如果没取到等3s, 如果连续10次取的都是null，则结束
                    waitNewUrl();
                    nullCount++;
                    if (nullCount == Config.GET_SEED_MAX_TRY) {
                        runStatus = EXIT;
                        threadPool.close();
                        break;
                    }
                }
                if (runStatus == EXIT) {
                    break;
                }
                LOG.log(Level.INFO, "\t scheduler had taked request count : " + ((SchedulerMonitor)scheduler).getRequestedCount() + "\n");
                LOG.log(Level.INFO, "\t scheduler now,  all request count  : " + ((SchedulerMonitor)scheduler).getTotalRequestCount() + "\n");
                String finalUrl = url;
                
                if (fetchedCount.get() > Config.LIMIT_FETCHED_COUNT) {
                    runStatus = EXIT;
                }
                
                threadPool.execute(new FetcherThread(finalUrl, request));
            } catch (InterruptedException e) {
            }
        }
    }
    public void init() {
        threadPool = new ThreadPool(poolSize);
        request = new HttpSpiderRequest().setPoolSize(Config.HTTP_MAX_CONNECT);
    }
 
    public void initScheduler() {
        if (scheduler instanceof BloomDepthScheduler) {
            if (((BloomDepthScheduler) scheduler).getDepthCount() <= depth) {
                ((BloomDepthScheduler) scheduler).merge();//当前队列 和 下一个队列合并
            }
        }
    }
 
    private void waitNewUrl() {
        reentrantLock.lock();
        try {
            if (runStatus == EXIT){
                return;
            }
            if (threadPool.getActivceThread().get() == 0) {
                initScheduler();
            }
            newUrlCondition.await(3000, TimeUnit.MILLISECONDS);
        } catch (InterruptedException e) {
            LOG.log(Level.WARNING, "waitNewUrl exception: " + e);
        }finally {
            reentrantLock.unlock();
        }
    }
    
    class FetcherThread implements Runnable {
        private String url;
        private SpiderRequest request;
        
        public FetcherThread(String url, SpiderRequest request) {
            this.request = request;
            this.url = url;
        }
        @Override
        public void run() {
            try {
                if (runStatus != EXIT || ((ThreadPoolExecutor)threadPool.getExecutorService()).getQueue().size() != 0) {
                    HttpSpiderResponse response = null;
                    if (runStatus == RUNNING && !StringUtils.isBlank(url)) {
                        LOG.log(Level.INFO, "\t" +Thread.currentThread().getName()  + "start fetch " + url);
                        CrawlerDatum datum = new CrawlerDatum(url);
                        response = (HttpSpiderResponse)request.getResponse(datum);
                        datum.setLastFetchTime(System.currentTimeMillis()); //这是最后一次爬取时间
                    }
                    synchronized(handler) {
                        if (response == null || response.getStatusLine() == null || response.getStatusLine().getStatusCode() != HttpStatus.SC_OK) {
                            handler.onFail(response);
                            LOG.log(Level.INFO, "\t fetcher this url failed" + response.getUrl());
                        } else {
                            handler.onSuccess(response);
                            fetchedCount.incrementAndGet();
                            LOG.log(Level.INFO, "\t now fetched url count" + fetchedCount.get() + "\n");
                            List<String> urlList = handler.handleAndGetLinks(response);
                            System.out.println(response.getUrl() +"--------------------------------------------------------------------------------开始");
                            for (String url : urlList) {
                                System.out.println(url);
                            }
                            addSeed(urlList);
                            LOG.log(Level.INFO, "\t" + Thread.currentThread().getName() + "\t fetched sucess");
                        }
                    }
                }
            } catch (Exception e) {
                LOG.log(Level.WARNING, "\t fetch task excpetion： " + e); //出现异常,打印出异常栈
            } 
        }
    }
    
    public void addSeed(List<String> urlList){
        if (scheduler == null){
            scheduler = new BloomDepthScheduler();
        }
        scheduler.offer(urlList);
    }

    public int getRunStatus() {
        return runStatus;
    }
}
