package com.novel.service;

import java.io.IOException;
import java.io.InputStream;
import java.sql.Blob;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Random;

import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.select.Elements;

import com.alibaba.fastjson.JSON;
import com.novel.bean.BookBean;
import com.novel.bean.CrawlerBean;
import com.novel.bean.ResultDomain;
import com.novel.bean.SectionBean;
import com.novel.dao.NovelDao;
import com.novel.task.CrawlerSectionTask;
import com.novel.utils.CacheUtil;
import com.novel.utils.Database;
import com.novel.utils.StringUtils;

public enum CrawlerService {
	crawlerService;
	private CrawlerService(){}
	
	
	public ResultDomain singleBook(CrawlerBean bean) {
		ResultDomain ret = new ResultDomain();
		Random r= new Random();
		try {
//			Document doc = Jsoup.connect(bean.getBookurl()).get();
			Document doc = Jsoup.connect(bean.getBookurl()).timeout(30000).cookie("111", "111").userAgent(useragents[r.nextInt(17)]).get();
			
			Elements eles = doc.select(bean.getSectionelement());
			List<SectionBean> sectionList = new LinkedList<>();
			StringBuffer link = new StringBuffer();
			int preSec = bean.getPresection();
			SectionBean sec = null;
			for (int i=1+preSec;i<=eles.size();i++) {  
				sec = new SectionBean();
				sec.setNum(i-preSec);
				
				Elements links = eles.get(i-1).select("a[href]");
			    link.setLength(0);
			    link.append(links.attr("href"));
				if(!link.toString().startsWith("http")){
					if(!link.toString().startsWith("/")){
						link.insert(0, bean.getBookurl());
					}else{
						link.insert(0, bean.getBaseurl());
					}

				}
			    sec.setContent(link.toString());
			    sectionList.add(sec);
			}
			long bookid;
			bookid = saveBook(doc,bean);
			for(int i=0;i<1;i++){
				CrawlerSectionTask task = new CrawlerSectionTask(bookid, 1000, sectionList, bean,bean.getBooknameelement());
				Map<Long, CrawlerSectionTask> secTask = CacheUtil.getInstance().getCrawlerSectionTask();
				if(secTask == null){
					secTask  = new HashMap<>();
				}
				secTask.put(bookid, task);
				CacheUtil.getInstance().addCrawlerSectionTask(secTask);;
				CacheUtil.getInstance().add(bookid+"crawler", bean);
				Thread t = new Thread(task);
				t.start();
			}
			ret.setSuccess("解析书籍链接成功,爬取中...");
		} catch (Exception e) {
			System.out.println("读取书籍链接发生异常"+e.toString());
			ret.setFail("读取书籍链接发生异常",e.toString());
		}
		return ret;
	}

	private long saveBook(Document doc,CrawlerBean bean) throws Exception {
		long bookid = 0;
		BookBean book = CrawlerHelper.getBookInfo(doc, bean);
		Connection conn = Database.DB.getConnection();
		bookid = NovelDao.novelDAO.insertBook(conn, book);
		Database.DB.close(conn);
		return bookid;
	}

	public ResultDomain singleTest(CrawlerBean bean) {
		ResultDomain ret = new ResultDomain();
		Random r = new Random();
		try {
//			Document doc = Jsoup.connect(bean.getBookurl()).get();
			Document doc = Jsoup.connect(bean.getBookurl()).timeout(30000).cookie("111", "111").userAgent(useragents[r.nextInt(17)]).get();
			Elements eles = doc.select(bean.getSectionelement());
			List<String> sectionList = new LinkedList<>();
			StringBuffer link = new StringBuffer();
			int preSec = bean.getPresection();
			for (int i=1+preSec;i<=eles.size();i++) {  
				Elements links = eles.get(i-1).select("a[href]");
			    link.setLength(0);
			    link.append(links.attr("href"));
				if(!link.toString().startsWith("http")){
					if(!link.toString().startsWith("/")){
						link.insert(0, bean.getBookurl());
					}else{
						link.insert(0, bean.getBaseurl());
					}

				}
			    sectionList.add(link.toString());
			}
			BookBean book = CrawlerHelper.getBookInfo(doc, bean);
			String sectionContent = CrawlerHelper.getFirstSec(sectionList.get(0), bean);
			Map<String, Object> map = new HashMap<>();
			map.put("book", book);
			map.put("section", sectionContent);
			map.put("sectionnum", sectionList.size());
			ret.setSuccess("解析书籍链接成功,爬取中...",map);
		} catch (Exception e) {
			System.out.println("读取书籍链接发生异常"+e.toString());
			ret.setFail("读取书籍链接发生异常",e.toString());
		}
		return ret;
	}

	public boolean saveContinue(long bookid,List<SectionBean> list, CrawlerBean bean) {
		boolean ret = true;
		String sql = "INSERT INTO continuebook (bookid, listjson, rulejson) VALUES (?,?,?)";

		Connection conn = Database.DB.getConnection();
		PreparedStatement ps = null;
		try {
			ps = conn.prepareStatement(sql);
			ps.setLong(1, bookid);
			String s1 = StringUtils.transfer(JSON.toJSONString(list));
			String s2 = StringUtils.transfer(JSON.toJSONString(bean));
			ps.setString(2, s1);
			ps.setString(3, s2);
			ps.executeUpdate();
			ps.close();
		} catch (SQLException e) {
			System.out.println("持久化书籍未完成章节出错"+e.toString());
			ret = false;
		}finally {
			Database.DB.close(conn);
		}
		return ret;
	}

	public boolean continueBook(long id) {
		boolean ret = true;
		String sql = "select * from continuebook where bookid=?";
		String sql1 = "delete  from continuebook where bookid=?";
		Connection conn = Database.DB.getConnection();
		PreparedStatement ps = null;
		ResultSet rs = null;
		PreparedStatement ps1 = null;
		try {
			ps = conn.prepareStatement(sql);
			ps1 = conn.prepareStatement(sql1);
			ps.setLong(1, id);
			rs = ps.executeQuery();
			if(rs.next()){
				Blob a = rs.getBlob("listjson");
				Blob b = rs.getBlob("rulejson");
				InputStream in1 = a.getBinaryStream();
				InputStream in2 = b.getBinaryStream();
				StringBuffer listjson = new StringBuffer();
				StringBuffer rulejson = new StringBuffer();
				byte [] buffer1 = new byte[in1.available()];
				byte [] buffer2 = new byte[in2.available()];
                try {
	                while(in1.read(buffer1) != -1){
	                    listjson.append(new String(buffer1));
	                }
					while(in2.read(buffer2) != -1){
						rulejson.append(new String(buffer2));
					}
				} catch (IOException e) {
					System.out.println(e.toString());;
				}
				List<SectionBean> list = JSON.parseArray(listjson.toString(), SectionBean.class);
				CrawlerBean bean = JSON.parseObject(rulejson.toString(), CrawlerBean.class);
				
				CrawlerSectionTask task = new CrawlerSectionTask(id, 1000, list, bean,bean.getBooknameelement());
				
				Map<Long, CrawlerSectionTask> secTask = CacheUtil.getInstance().getCrawlerSectionTask();
				if(secTask == null){
					secTask  = new HashMap<>();
				}
				secTask.put(id, task);
				CacheUtil.getInstance().addCrawlerSectionTask(secTask);
				CacheUtil.getInstance().add(id+"crawler", bean);
				Thread t = new Thread(task);
				t.start();
			}
			
			ps.close();
			rs.close();
			ps1.setLong(1, id);
			ps1.executeUpdate();
			ps1.close();
		} catch (SQLException e) {
			System.out.println("恢复爬取失败"+e.toString());
			ret = false;
		} catch (IOException e1) {
			e1.printStackTrace();
		} finally {
			Database.DB.close(conn);
		}
		return ret;
	}
	
	
	private String[] useragents = new String[]{
			"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; AcooBrowser; .NET CLR 1.1.4322; .NET CLR 2.0.50727)",
			"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Acoo Browser; SLCC1; .NET CLR 2.0.50727; Media Center PC 5.0; .NET CLR 3.0.04506)",
			"Mozilla/4.0 (compatible; MSIE 7.0; AOL 9.5; AOLBuild 4337.35; Windows NT 5.1; .NET CLR 1.1.4322; .NET CLR 2.0.50727)",
			"Mozilla/5.0 (Windows; U; MSIE 9.0; Windows NT 9.0; en-US)",
			"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 2.0.50727; Media Center PC 6.0)",
			"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 1.0.3705; .NET CLR 1.1.4322)",
			"Mozilla/4.0 (compatible; MSIE 7.0b; Windows NT 5.2; .NET CLR 1.1.4322; .NET CLR 2.0.50727; InfoPath.2; .NET CLR 3.0.04506.30)",
			"Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN) AppleWebKit/523.15 (KHTML, like Gecko, Safari/419.3) Arora/0.3 (Change: 287 c9dfb30)",
			"Mozilla/5.0 (X11; U; Linux; en-US) AppleWebKit/527+ (KHTML, like Gecko, Safari/419.3) Arora/0.6",
			"Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.2pre) Gecko/20070215 K-Ninja/2.1.1",
			"Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9) Gecko/20080705 Firefox/3.0 Kapiko/3.0",
			"Mozilla/5.0 (X11; Linux i686; U;) Gecko/20070322 Kazehakase/0.4.5",
			"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.8) Gecko Fedora/1.9.0.8-1.fc10 Kazehakase/0.5.6",
			"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11",
			"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/535.20 (KHTML, like Gecko) Chrome/19.0.1036.7 Safari/535.20",
			"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.11 TaoBrowser/2.0 Safari/536.11",
			"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:5.0) Gecko/20100101 Firefox/5.0"
	};
}
