package com.xiaoyao.novel.task;

import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TimerTask;

import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;

import com.xiaoyao.novel.crawl.BaseCrawl;
import com.xiaoyao.novel.crawl.BbqugeCrawl;
import com.xiaoyao.novel.crawl.BiqiCrawl;
import com.xiaoyao.novel.crawl.BiqugeCrawl;
import com.xiaoyao.novel.crawl.Buy773Crawl;
import com.xiaoyao.novel.crawl.ChangxiangCrawl;
import com.xiaoyao.novel.crawl.ChuangshiCrawl;
import com.xiaoyao.novel.crawl.Du5Crawl;
import com.xiaoyao.novel.crawl.FengyunCrawl;
import com.xiaoyao.novel.crawl.Go114Crawl;
import com.xiaoyao.novel.crawl.IreaderCrawl;
import com.xiaoyao.novel.crawl.K16Crawl;
import com.xiaoyao.novel.crawl.K17Crawl;
import com.xiaoyao.novel.crawl.PiaotianCrawl;
import com.xiaoyao.novel.crawl.PinnongCrawl;
import com.xiaoyao.novel.crawl.QidianCrawl;
import com.xiaoyao.novel.crawl.QidianMMCrawl;
import com.xiaoyao.novel.crawl.QiuwuCrawl;
import com.xiaoyao.novel.crawl.QududuCrawl;
import com.xiaoyao.novel.crawl.QuduwuCrawl;
import com.xiaoyao.novel.crawl.ShuhuangCrawl;
import com.xiaoyao.novel.crawl.SilukeCrawl;
import com.xiaoyao.novel.crawl.TaduCrawl;
import com.xiaoyao.novel.crawl.TianjinCrawl;
import com.xiaoyao.novel.crawl.ZonghengCrawl;
import com.xiaoyao.novel.crawl.Zw69Crawl;
import com.xiaoyao.novel.pojo.Book;
import com.xiaoyao.novel.pojo.Chapter;
import com.xiaoyao.novel.service.BookService;
import com.xiaoyao.novel.service.ChapterService;
import com.xiaoyao.novel.service.StaticService;
import com.xiaoyao.novel.service.WebInfoService;
import com.xiaoyao.novel.util.ClassUtil;
import com.xiaoyao.novel.util.FileUtil;
import com.xiaoyao.novel.velocity.PageVelocityStatic;
import com.xiaoyao.novel.velocity.PhoneVelocityStatic;

public class BookCrawlTask extends TimerTask {
	private static Logger logger = Logger.getLogger(BookCrawlTask.class);
	private static Map<String, BaseCrawl> crawlMap = new HashMap<String, BaseCrawl>();
	private BookService bookService = new BookService();
	private ChapterService chapterService = new ChapterService();
	private static StaticService staticService = new StaticService();
	private static WebInfoService webInfoService = new WebInfoService();
	static {
		// 起点
		QidianCrawl qidianCrawl = new QidianCrawl();
		crawlMap.put(Book.TYPE_QIDIAN, qidianCrawl);
		// 起点女生
		QidianMMCrawl qidianMMCrawl = new QidianMMCrawl();
		crawlMap.put(Book.TYPE_QIDIANMM, qidianMMCrawl);
		// 书荒
		ShuhuangCrawl songchaoCrawl = new ShuhuangCrawl();
		crawlMap.put(Book.TYPE_SHUHUANG, songchaoCrawl);
		// 纵横
		ZonghengCrawl zonghengCrawl = new ZonghengCrawl();
		crawlMap.put(Book.TYPE_ZONGHENG, zonghengCrawl);
		// QQ创世 OK
		ChuangshiCrawl chuangshiCrawl = new ChuangshiCrawl();
		crawlMap.put(Book.TYPE_CHUANGSHI, chuangshiCrawl);
		// 笔趣阁 OK
		BiqugeCrawl biqugeCrawl = new BiqugeCrawl();
		crawlMap.put(Book.TYPE_BIQUGE, biqugeCrawl);
		// 16Kbook OK
		K16Crawl k16Crawl = new K16Crawl();
		crawlMap.put(Book.TYPE_K16, k16Crawl);
		// 17K OK
		K17Crawl k17Crawl = new K17Crawl();
		crawlMap.put(Book.TYPE_K17, k17Crawl);
		// 飘天 OK
		PiaotianCrawl piaotianCrawl = new PiaotianCrawl();
		crawlMap.put(Book.TYPE_PIAOTIAN, piaotianCrawl);

		// 塔读
		TaduCrawl taduCrawl = new TaduCrawl();
		crawlMap.put(Book.TYPE_TADU, taduCrawl);
		// 畅想
		ChangxiangCrawl changxiangCrawl = new ChangxiangCrawl();
		crawlMap.put(Book.TYPE_CHANGXIANG, changxiangCrawl);
		// 思路客
		SilukeCrawl silukeCrawl = new SilukeCrawl();
		crawlMap.put(Book.TYPE_SILUKE, silukeCrawl);
		//  
		QududuCrawl qududuCrawl = new QududuCrawl();
		crawlMap.put(Book.TYPE_QUDUDU, qududuCrawl);
		//  
		QuduwuCrawl quduwuCrawl = new QuduwuCrawl();
		crawlMap.put(Book.TYPE_QUDUWU, quduwuCrawl);
		//  
		Du5Crawl du5Crawl = new Du5Crawl();
		crawlMap.put(Book.TYPE_DU5, du5Crawl);

		PinnongCrawl pinnongCrawl = new PinnongCrawl();
		crawlMap.put(Book.TYPE_PINNONG, pinnongCrawl);

		Zw69Crawl zw69Crawl = new Zw69Crawl();
		crawlMap.put(Book.TYPE_ZW69, zw69Crawl);
		BiqiCrawl biqiCrawl = new BiqiCrawl();
		crawlMap.put(Book.TYPE_BIQI, biqiCrawl);

		Go114Crawl go114Crawl = new Go114Crawl();
		crawlMap.put(Book.TYPE_GO114, go114Crawl);
		BbqugeCrawl bbqugeCrawl = new BbqugeCrawl();
		crawlMap.put(Book.TYPE_BBQUGE, bbqugeCrawl);

		QiuwuCrawl qiuwuCrawl = new QiuwuCrawl();
		crawlMap.put(Book.TYPE_QIUWU, qiuwuCrawl);
		

		FengyunCrawl fengyunCrawl = new FengyunCrawl();
		crawlMap.put(Book.TYPE_FENGYUN, fengyunCrawl);
		TianjinCrawl tianjinCrawl = new TianjinCrawl();
		crawlMap.put(Book.TYPE_TIANJIN, tianjinCrawl);
		
		IreaderCrawl ireaderCrawl =new IreaderCrawl();
		crawlMap.put(Book.TYPE_IREADER, ireaderCrawl);
		
		Buy773Crawl buy773Crawl =new Buy773Crawl();
		crawlMap.put(Book.TYPE_773BUY, buy773Crawl);
		

	}
	public static int INTERVAL_SECOND = 10;// 间隔时间 秒
	public static int THREAD_NUM = 2; // 线程数

	public static int index = 0; // 计数器

	/**
	 * 爬取接口
	 */
	public void crawlBooks() {
		INTERVAL_SECOND = webInfoService.getStaticWebInfo().getCrawlInterval();
		// ExecutorService pool = Executors.newScheduledThreadPool(THREAD_NUM) ;
		// pool.execute( new SearchThread());
		for (int i = 0; i < THREAD_NUM; i++) {
			SearchThread searchThread = new SearchThread();
			new Thread(searchThread).start();
		}
	}

	// ///////////////////////////////////////////
	List<Book> bookList = new ArrayList<Book>();
	static int outerIndex = 0;// 第几轮
	final static int PERIOD_INDEX_STATIC = 10;// 更新首页和分类页的轮数周期
	/**
	 * 线程获取书籍信息
	 */
	public synchronized Book produceBook() {
		int size = bookList.size();
		// 一轮取完了,重新获取
		if (index >= size) {
			outerIndex++;
			if (outerIndex % PERIOD_INDEX_STATIC == 0) { // 每15轮更新一次首页和分类页
				staticIndex(); // 静态化首页
				staticCategory();;// 静态化分类
			} 
			index = 0;
			bookList = bookService.getAll();
			size = bookList.size();
			logger.warn("爬完一轮了,重新计数!本次需要更新的书籍本数: " + size + ",线程数:" + THREAD_NUM);
		}
		Book book = bookList.get(index);
		logger.warn(".正在分析第 " + index + "/" + size + "个. [" + book.bookName + "]");
		index++;
		return book;
	}

	/**
	 * 搜索线程
	 * 
	 * @author 旷明爱
	 * @date Aug 30, 2012 4:04:01 PM
	 */
	class SearchThread implements Runnable {
		public SearchThread() {
			logger.info("new thread :" + Thread.currentThread().getId() + "," + this);
		}
		public void run() {
			while (true) {
				Book book = produceBook();
				if (StringUtils.isNotEmpty(book.getWebId())) { // ID 为空,不需要爬取
					try {
						Thread.sleep(100);
						// // 如果该网站爬虫需要代理
						// if (ProxyUtil.needProxy(book.getCrawler())) {
						// ProxyUtil.startProxy();// 设置代理
						// }
						crawlBook(book);
						submit(book);
					} catch (Throwable e) {
						logger.error(e);
					} finally {
						// if (ProxyUtil.needProxy(book.getCrawler())) {
						// ProxyUtil.endProxy();// 取消代理
						// }
					}
				}
			}
		}
	}

	// ///////////////////////////////////////////

	/**
	 * 爬一本书,目录
	 * 
	 * @param bookId
	 */
	public void crawlBook(Book book) {
		logger.warn("==========爬虫:" + book.crawler + " 书籍:" + book.bookName + " 网站ID:" + book.webId + " 开始");
		BaseCrawl crawl = crawlMap.get(book.crawler);
		if (crawl != null) {
			try {
				// 验证图片是否存在.
				String filePath = FileUtil.getWebRoot() + book.picPath;
				File file = new File(filePath);
				if (!file.exists()) {
					book.setPicPath("");
				}
				crawl.crawlBook(book);
			} catch (Exception e) {
				logger.error("爬虫有点问题:" + book.crawler);
			}
		} else {
			logger.error("配置crawler有问题!" + book.crawler);
		}

	}

	/**
	 * 保存
	 * 
	 * @param book
	 */
	public void submit(Book book) {
		// 读取数据库中的最新信息
		Book newBook = bookService.getById(book.bookId);
		if (StringUtils.isEmpty(newBook.getAuthorName().trim())) {
			newBook.setAuthorName(book.authorName);
		}

		if (StringUtils.isEmpty(newBook.getDescription().trim())) {
			newBook.setDescription(book.description);
		}
		// logger.info("书籍:"+book.bookName+","+book.authorName);
		newBook.setPicPath(book.picPath);
		newBook.setKeywords(book.keywords);
		newBook.setList(book.list);
		book = newBook;
		// 保存章节
		int chapterCount = book.savedCount;
		int size = book.list.size();
		int newSize = size - chapterCount;
		for (int i = book.savedCount; i < size; i++) {
			Chapter chapter = book.list.get(i);
			if (!ClassUtil.isEmpty(chapter.chapterContent)) {	 
				chapterCount++;
				chapterService.insert(chapter);
			} else {
				break;
			}
		}
		if (size > 0) {
			book.newChapter = book.list.get(size - 1).chapterTitle;
		}
		// 保存书籍信息
		book.savedCount = chapterCount;
		logger.warn(book.bookName + ",新增章节数:" + newSize + ",总数:" + chapterCount);
		if (newSize > 20) {
			newSize = 20;
		}
		if (newSize > 0) {
			bookService.updateCrawl(book);
			staticBook(newBook,newSize+1) ; // 静态化书籍
			staticTopNChapter(book.bookId, newSize+1);// 静态化最新N章,从第一章开始.
			
		}
		book.list.clear();
	}

	// ///////////////////////////////////////////
	// ///////////////////////////////////////////
	////静态化封装
	
	public static void staticIndex(){
//		HtmlStatic.staticIndex();
		PageVelocityStatic.staticIndex() ;
		PhoneVelocityStatic.staticIndex();
	}

	public static void staticCategory(){
//		HtmlStatic.staticCategorys();
		PageVelocityStatic.staticCategory();
		PhoneVelocityStatic.staticCategory();
	}
	

	public static void staticBook(Book book ,int topN){
//		HtmlStatic.staticBook(book); 
		PageVelocityStatic.staticBookAndChapters(book, topN);
		PhoneVelocityStatic.staticBookAndChapters(book, topN);
	}

	public static void staticTopNChapter (int bookId,int size ){
//		staticService.staticTopNChapter( bookId, size);		
	}
	 

	// ///////////////////////////////////////////
	// ///////////////////////////////////////////
	public static void main(String[] args) throws Exception {
		BookCrawlTask novelCrawl = new BookCrawlTask();
		// novelCrawl.startTask();
		novelCrawl.crawlBooks();
	}

	public void startTask() {
		// int interval = INTERVAL_SECOND * 1000;
		// TimerTask myTimerTask = new BookCrawlTask();
		// Timer timer = new Timer("自动抓取", false);
		// Date firstTime = new Date();
		// logger.info("1分钟后启动爬虫!");
		// firstTime.setMinutes(firstTime.getMinutes() + 1);// 2分钟后启动
		// timer.schedule(myTimerTask, firstTime, interval);

		// logger.info("1分钟后启动爬虫!");
		// try {
		// Thread.sleep(60 * 1000);
		// } catch (Exception e) {
		// logger.error(e);
		// }
		crawlBooks();
	}

	@Override
	public void run() {
		if (webInfoService.getStaticWebInfo().getIsRun()) {
			crawlBooks();
		}
		// logger.info(INTERVAL_SECOND + "秒后继续更新....");
	}
}
