package com.eric.r2d2.r;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Properties;
import java.util.Set;

import org.apache.log4j.PropertyConfigurator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.eric.MysqlConfig;
import com.eric.r2d2.pageProcessor.NewsProcessor;
import com.eric.r2d2.pageProcessor.SeoInfoExtractor;
import com.eric.r2d2.pipline.JSONFilePipeline;
import com.eric.r2d2.pipline.MongoPipeline;
import com.eric.r2d2.pipline.XMLFilePipeline;
import com.eric.r2d2.scheduler.MongoScheduler;

import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.pipeline.ConsolePipeline;
import us.codecraft.webmagic.pipeline.FilePipeline;
import us.codecraft.webmagic.pipeline.Pipeline;
import us.codecraft.webmagic.scheduler.QueueScheduler;
import us.codecraft.webmagic.scheduler.RedisScheduler;
import us.codecraft.webmagic.scheduler.Scheduler;

public class Crawler {

	static String debug;
	static String starting_url;
	static int threads;
	static String mongohost;
	static String mongodb;
	static String mongocoll;
	static String mongohost_scheduler;
	static String mongodb_scheduler;
	static String mongocoll_scheduler;
	static String fileLoc;
	static String scheduler_def;
	static String store;
	static String crawlExternal;
	static String recrawl;
	String path2conf;
	static String redishost;

	private static List<String> approvedUrlList = new ArrayList<String>();

	private static Set<String> urlPattern = new HashSet<String>();

	/**
	 * @param args
	 * @throws Exception
	 */
	public static void main(String[] args) throws Exception {
		Logger logger = LoggerFactory.getLogger(Crawler.class);

		Crawler crawler = new Crawler();
		crawler.Config();

		// 读取参数
		if (args.length > 0)
			crawler.path2conf = args[0];

		// 判断是否爬取站外链接
		SeoInfoExtractor pp = new SeoInfoExtractor();
		if (crawlExternal.equals("1"))
			pp.crawlExternal();
		if (recrawl.equals("1"))
			pp.recrawl();

		// 判断pipeline种类
		Object pipe = null;
		if (debug.equals("1")) {
			pipe = new ConsolePipeline();
		} else {
			if (store.equals("file")) {
				pipe = new FilePipeline(fileLoc);
				logger.info("Using File Pipeline And Saving To " + fileLoc);
			} else if (store.equals("jsonfile")) {
				pipe = new JSONFilePipeline();
				logger.info("Using JSON File Pipeline And Saving To " + "data/r2d2_crawl_"
						+ new SimpleDateFormat("yyyyMMdd").format(new Date()) + ".json");
			} else if (store.equals("mongo")) {
				logger.info(
						"Using Mongodb Pipeline: Host:" + mongohost + ",DB:" + mongodb + ",Collection:" + mongocoll);
				pipe = new MongoPipeline(mongohost, mongodb, mongocoll);
			} else if (store.equalsIgnoreCase("xmlfile")) {
				logger.info("Using XML File Pipeline And Saving To " + "data/r2d2_crawl_"
						+ new SimpleDateFormat("yyyyMMdd").format(new Date()) + ".xml");
				pipe = new XMLFilePipeline(fileLoc);
			}
		}

		// 判断Scheduler
		Object scheduler = null;
		if (scheduler_def.equals("mongo")) {
			scheduler = new MongoScheduler(mongohost_scheduler, mongodb_scheduler, mongocoll_scheduler);
		} else if (scheduler_def.equals("queue") || scheduler_def == null) {
			scheduler = new QueueScheduler();
		} else if (scheduler_def.equals("redis")) {
			scheduler = new RedisScheduler(redishost);
		}

		// MysqlConfig.setHost("120.25.87.76");
		// MysqlConfig.setUser("root");
		// MysqlConfig.setPass("wih486eev");
		// MysqlConfig.setDB("crawl");
		// Connection con = MysqlConfig.connect();

		// 配置Spider
		Spider me = null;
		me = Spider.create(new NewsProcessor(approvedUrlList, urlPattern)).addPipeline((Pipeline) pipe)
				.setScheduler((Scheduler) scheduler).addUrl(starting_url).thread(threads);

		me.run();
	}

	private void Config() {
		PropertyConfigurator.configure("resources/log4j.properties");
		path2conf = "resources/config";

		Properties p = new Properties();
		try {
			p.load(new FileInputStream(new File(path2conf)));
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		}

		starting_url = p.getProperty("starting_url");

		mongohost = p.getProperty("mongohost");
		mongodb = p.getProperty("mongodb");
		mongocoll = p.getProperty("mongocoll");

		mongohost_scheduler = p.getProperty("mongohost_scheduler");
		mongodb_scheduler = p.getProperty("mongodb_scheduler");
		mongocoll_scheduler = p.getProperty("mongocoll_scheduler");

		threads = Integer.parseInt(p.getProperty("threads"));

		store = p.getProperty("store");
		scheduler_def = p.getProperty("scheduler_def");

		fileLoc = p.getProperty("file_loc");
		debug = p.getProperty("debug");
		crawlExternal = p.getProperty("crawlExternal");
		recrawl = p.getProperty("recrawl");

		redishost = p.getProperty("redishost");

		MysqlConfig.setHost("120.25.87.76");
		MysqlConfig.setUser("root");
		MysqlConfig.setPass("wih486eev");
		MysqlConfig.setDB("crawl");
		MysqlConfig.connect();
		ResultSet rs = MysqlConfig.executeSql("select * from `indie_sites`");
		try {
			while (rs.next()) {
				approvedUrlList.add(rs.getString(2));
			}
			ResultSet rs2 = MysqlConfig.executeSql("select * from `approved_url_pattern`");
			while (rs2.next()) {
				// urlPattern.add(UrlUtils.urlPattern(rs2.getString(2)));
				urlPattern.add((rs2.getString(2)));
			}
		} catch (SQLException e) {
			e.printStackTrace();
		}
	}

}
