package crawler.handler;

import crawler.common.PostData;
import crawler.common.SiteData;
import crawler.impl.PostCrawlerImpl;
import crawler.itf.PostParserItf;
import crawler.model.CNewsModel;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import org.apache.log4j.Logger;
import org.jsoup.nodes.Document;

public class CrawlHandler2 {
    private static final Logger logger = Logger.getLogger(CrawlHandler2.class);

    public static final int WORDER_COUNT = 1;

    private static Worker[] workers = null;
    
    public static void init(List<SiteData> site) {
        try {
            Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
                @Override
                public void run() {
                    if (workers != null) {
                        for (Worker worker : workers) {
                            try {
                                worker.join();
                            }catch (InterruptedException ex) {
                                logger.error("InterruptedException: " + ex.getMessage(), ex);
                            }
                        }
                    }
                }
            }));

            workers = new Worker[WORDER_COUNT];
            for (int i = 0; i < workers.length; i++) {
                workers[i] = new Worker("Worker " + i, site);
                workers[i].start();
            }
        } catch (Exception ex) {
            logger.error("WorkerHandler.init: " + ex, ex);
        }
    }
    private static class Worker extends Thread {
        private List<SiteData> lstSite = new LinkedList<>();

        public Worker(String name, List<SiteData> lstSite) {
            super(name);
            this.lstSite = lstSite;
        }

        @SuppressWarnings("SleepWhileInLoop")
        @Override
        public void run() {
            while (true) {
                try {
                    int newsCount = 0;
                    for (SiteData site : lstSite) {
                        PostParserItf parser = site.getParser();
                        CNewsModel cNewsModel = new CNewsModel();
                        PostCrawlerImpl postCrawler = new PostCrawlerImpl();
                        Document listTitle = postCrawler.getDocument(site.getUrl());
                        if(listTitle == null) continue;
                        List<PostData> lstPostData = parser.getListTitle(listTitle);
                        for (PostData postData : lstPostData) {
                            if(null == postData)
                                continue;
                            boolean exists = cNewsModel.checkPostExists(postData.getUrl());
                            if(!exists){
                                String thumb = postData.getThumb();
                                Document detail = postCrawler.getDocument(postData.getUrl());
                                if(detail == null) continue;

                                postData = parser.getDetail(detail);
                                if(postData!=null){
                                    postData.setThumb(thumb);
                                    postData.setCategoryCode(site.getCategoryCode());
                                    cNewsModel.addNewPost(postData);
                                    cNewsModel.thriftNewPost(postData);
                                    newsCount ++;
                                }
                            }
                        }
                    }
                    logger.info("Crawl xong " +  " luc "  + new Date() +" - them "+ newsCount +" bai viet moi");
                    Thread.sleep(1 * 3600 * 1000);                        

                } catch (InterruptedException ex) {
                    logger.error("InterruptedException: " + ex.getMessage(), ex);
                    break;
                }
            }
        }

    }
    public static void stopAll(){
        if(workers ==null || workers.length == 0) return;
        for (int i = 0; i < workers.length; i++) {
            try {
                workers[i].join();
            } catch (InterruptedException ex) {
                logger.error("Cannot stop All: " + ex.getMessage(), ex);
            }
        }        
    }
}
