package com.spider.huxiu;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Scanner;
import java.util.Set;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.util.EntityUtils;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

import com.google.gson.Gson;
import com.spider.bean.Article;
import com.spider.bean.ResponseResule;
import com.spider.dao.ArticleDao;

/**
 * 
 * @ClassName: HuXiuSpiderQueueDeskTop
 * @Description:虎嗅新闻网爬虫 ,采用队列,多线程的方式爬取分页,及文章详情,该版本未保存数据库.若需要保存注意修改数据库,密码,表名.
 *                      必须先建表, 若改变打包版本注意pom中mainclass位置
 * @author Administrator
 * @date 2018年1月30日 下午7:22:49
 * @version 1.0.5
 */
public class HuXiuSpiderQueueDeskTop {

	private static Set<String> article_ids = new HashSet<String>();
	private static int total_page = 10;// 总页数
	private static int article_total_count = 0;// 文章链接总数
	private static int pageing_article_count = 0;// 每页文章链接条数
	private static int default_pick_total_page = 50; // 默认爬取30页
	private static long write_new_list_start_time;// 写入时间开始
	private static long write_new_list_end_time;// 写入时间结束

	private static String nextLine = "xxc";
	private static File article_list_file = null;
	private static Runnable runnable = null;
	private static Thread scannerListenerThread = null;

	private static final int THREAD_POOL_COUNT = 5;// 线程池中线程数量
	private static final int ARTICLE_IDS_SIZE = 20;// 默认超过该数量就写入到文件一次
	private static final long DEFAULT_WAIT_TIME = 15000L;// 默认等待时间,毫秒值
	private static final String HUXIU_HUXIU_HASH_CODE = "bc1acc4ae8cc354069293a255b8140fc";// 必须值
	private static final String HUXIU_URL = "https://www.huxiu.com";// 虎嗅首页
	private static final String HUXIU_ARTICLE_LIST_SAVE_PATH = "C:/HUXIU_ARTICLE_LIST_UTF-8.txt";// 文件保存地址
	private static final ArrayBlockingQueue<String> ARRAY_BLOCKING_QUEUE = new ArrayBlockingQueue<String>(399);
	private static final ExecutorService ARTICLE_THREAD_POOL = Executors.newFixedThreadPool(THREAD_POOL_COUNT);
	// private static final ArticleDao ARTICLE_DAO = new ArticleDao();
	private static final Scanner SCANNER = new Scanner(System.in);

	static {
		try {
			runnable = new Runnable() {
				@Override
				public void run() {
					nextLine = SCANNER.nextLine();
				}
			};

			scannerListenerThread = new Thread(runnable);

			article_list_file = new File(HUXIU_ARTICLE_LIST_SAVE_PATH);
			if (article_list_file.exists()) {
				article_list_file.delete();
				article_list_file.createNewFile();
			}
			if (!article_list_file.exists()) {
				article_list_file.createNewFile();
			}

		} catch (IOException e) {
			e.printStackTrace();
		}

	}

	public static void main(String[] args) {
		try {

			/**
			 * 创建的线程消费队列中的消息
			 */
			String data_last_dateline = pickHUXIU_HomePage();
			for (int i = 1; i <= THREAD_POOL_COUNT; i++) {
				ARTICLE_THREAD_POOL.execute(new Runnable() {
					public void run() {
						while (true) {
							pickArticleDetailAndSave2DB();
						}
					}
				});
			}

			for (int pageIndex = 2; pageIndex <= default_pick_total_page; pageIndex++) {
				data_last_dateline = pickPageingUrlByHttpPostAndPasreResultJson(data_last_dateline, pageIndex + "");
			}
		} catch (Exception e) {
			System.out.println("请检查你的网络连接是否畅通..");
		}
	}

	private static void saveHUXIUNewList2FileByArticleIds() {
		try {
			if (article_ids.size() >= 1) {
				BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(article_list_file, true));
				long start = System.currentTimeMillis();
				long begin = 0L;
				long end = 0L;
				for (String articleUrl : article_ids) {
					pageing_article_count++;
					begin = System.currentTimeMillis();
					bufferedWriter.write(articleUrl);
					bufferedWriter.flush();
					bufferedWriter.newLine();
					end = System.currentTimeMillis();
					System.out.println("线程小弟:" + Thread.currentThread().getName() + "(写入第" + pageing_article_count
							+ "篇文章链接信息成功,耗时:" + (end - begin) + "毫秒)");
				}
				bufferedWriter.close();
				long close = System.currentTimeMillis();
				System.out.println("\t线程小弟:" + Thread.currentThread().getName() + "(写入" + article_ids.size()
						+ "篇文章链接信息成功总耗时:" + (close - start) + "毫秒) ");
			}

		} catch (Exception e) {
			System.out.println("请检查你的网络连接是否正常A...");
		}
	}

	private static String pickHUXIU_HomePage() {
		try {
			// 爬取网页
			Document document = pickPagingArticleByHttpGet(HUXIU_URL);
			Elements elements = document.select(".mod-info-flow>div");
			String homePage_data_last_dateline = document.select("div.get-mod-more").get(0).attr("data-last_dateline")
					.trim();
			if (homePage_data_last_dateline != null && elements.size() >= 1) {
				// 得到h2 标签从而中文章链接,文章标题
				Elements selects = null;
				// 得到文章内部地址,后面拼接网址
				String articleHref = null;

				String articleTitle = null;

				// 遍历每一个文章的DIV
				for (Element element : elements) {

					// 获取判断第一个div的值是否为热点
					Elements hotTitle = element.select("div:eq(0)").select("div:eq(0)");

					String text = hotTitle.text();

					if (text.startsWith("热")) {
						selects = element.select("div:eq(3)").select("h2").select("a");
					} else {
						selects = element.select("div:eq(2)").select("h2").select("a");
					}
					articleHref = selects.attr("href");
					articleTitle = selects.text().trim();
					String articleUrl = HUXIU_URL + articleHref;
					article_ids.add(articleTitle + "\t" + articleUrl);
					// 添加文章链接到队列中
					ARRAY_BLOCKING_QUEUE.put(articleUrl);
				}
			}
			return homePage_data_last_dateline;

		} catch (Exception e) {
			System.out.println("请检查你的网络连接是否正常B...");
		}
		return null;
	}

	private static void pickArticleDetailAndSave2DB() {

		try {
			String articleUrl = null;
			if (ARRAY_BLOCKING_QUEUE.size() >= 1) {
				articleUrl = ARRAY_BLOCKING_QUEUE.take();
				System.out.println("线程小弟:" + Thread.currentThread().getId() + "正在处理文章链接:" + articleUrl);

				// 保存到数据库
				// article2DB(articleUrl.trim());

			}
		} catch (Exception e) {
			System.out.println("请检查你的网络连接是否正常C...");
		}

	}

	private static void article2DB(String articleUrl) {
		try {
			// e https://www.huxiu.com/article/231450.html
			// System.out.println("e\t\t" + articleUrl);
			Document document = pickPagingArticleByHttpGet(articleUrl);
			int lastIndexOf = articleUrl.lastIndexOf("/") + 1;
			String id = articleUrl.substring(lastIndexOf, lastIndexOf + 6);
			// System.out.println("f\t\t" + id);231517
			// article-wrap
			Elements elements = document.select(".article-wrap");

			// text 获取所有的文本
			// owntext 获取当前自己拥有的文本信息
			String articleAuthName = elements.get(0).select(".author-name").text().trim();

			String articlePublishTime = elements.get(0).select(".article-time").text().trim();
			String articleTitle = elements.get(0).select(".t-h1").text().trim();

			Elements articleContent = elements.get(0).select(".article-content-wrap>p");

			articleTitle = articleTitle.replace(" ", "").replace("?", "？").replace("\\", "分");

			// 保存文章到数据库
			Article article = new Article();
			article.setArticleTitle(articleTitle);
			article.setArticleAuthName(articleAuthName);
			article.setArticleContent(articleContentToString(articleContent));
			article.setArticlePublishTime(articlePublishTime);
			article.setArticleUrl(articleUrl);
			article.setId(id);

			// 保存到数据库
			// ARTICLE_DAO.save(article);

		} catch (Exception e) {
			System.out.println("请检查你的数据库链接是否正常!");

		}

	}

	private static String articleContentToString(Elements articleContent) {
		try {
			StringBuilder stringBuilder = null;
			if (articleContent != null) {
				stringBuilder = new StringBuilder();
				for (Element element : articleContent) {
					stringBuilder.append(element.text().trim());
				}
				return stringBuilder.toString();
			}
		} catch (Exception e) {
			System.out.println("请检查你的网络连接是否正常D...");
		}

		return null;
	}

	private static Document pickPagingArticleByHttpGet(String url) {
		try {

			// 1.创建http客户端
			CloseableHttpClient httpClient = HttpClients.createDefault();
			// 2.创建请求对象,传入网址
			HttpGet HttpGet = new HttpGet(url);
			// 2.1设置请求头,必要!
			HttpGet.addHeader("User-Agent",
					"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.110 Safari/537.36");
			// 3.执行请求,得到response对象
			CloseableHttpResponse response = httpClient.execute(HttpGet);
			// 4.获取整个页面
			String html = EntityUtils.toString(response.getEntity(), Charset.forName("UTF-8"));
			// 5.jsoup解析
			return Jsoup.parse(html);
		} catch (Exception e) {
			System.out.println("请检查你的网络连接是否正常F...");
			return null;
		}

	}

	private static String pickPageingUrlByHttpPostAndPasreResultJson(String data_last_dateline, String pageIndex) {
		try {
			printEveryOnePageInfo(pageIndex);

			// System.out.println("当前爬取总页数" + default_pick_total_page + "\t当前页码"
			// + pageIndex);

			if (default_pick_total_page == Integer.parseInt(pageIndex)) {
				System.out.println("\t总页数有:" + total_page + ",已爬取至" + pageIndex + "页,是否继续爬取?(y/n)");
				long waitingTimeStart = System.currentTimeMillis();

				scannerListenerThread.start();

				while (true) {
					if ("y".equalsIgnoreCase(nextLine)) {

						default_pick_total_page += 50;
						if (default_pick_total_page >= total_page) {
							default_pick_total_page = total_page;
						}
						int parseInt = Integer.parseInt(pageIndex);
						data_last_dateline = pickPageingUrlByHttpPostAndPasreResultJson(data_last_dateline,
								(parseInt++) + "");
						// 要修改成员变量为别的值,避免死循环,并中断该循环
						nextLine = "xxc";
						break;
					} else if ("n".equalsIgnoreCase(nextLine)) {
						long time = write_new_list_end_time - write_new_list_start_time;
						System.out.println("\t线程小弟:" + Thread.currentThread().getName() + "(写入" + article_total_count
								+ "篇文章链接信息成功总耗时:" + time + "毫秒,约" + Math.ceil(time / 1000 * 1.0) + "秒)");
						System.out.println(article_ids.size() + "篇文章链接信息成功写入到" + HUXIU_ARTICLE_LIST_SAVE_PATH);
						System.out.println("@熊骑士\t@2018年1月30日\t@V1.0.5");
						System.out.println("虎嗅爬虫已退出!");
						SCANNER.close();
						System.exit(0);
					} else {
						long waitingTimeEnd = System.currentTimeMillis();
						if ((waitingTimeEnd - waitingTimeStart) >= DEFAULT_WAIT_TIME) {
							nextLine = "y";
						}
					}
				}

			}

			// 1.创建http客户端
			String pageingUrl = "https://www.huxiu.com/v2_action/article_list";
			CloseableHttpClient httpClient = HttpClients.createDefault();
			// 2.创建请求对象,传入网址
			HttpPost httpPost = new HttpPost(pageingUrl);
			ArrayList<BasicNameValuePair> parameters = new ArrayList<BasicNameValuePair>();
			parameters.add(new BasicNameValuePair("huxiu_hash_code", HUXIU_HUXIU_HASH_CODE));
			parameters.add(new BasicNameValuePair("page", pageIndex));
			parameters.add(new BasicNameValuePair("last_dateline", data_last_dateline));
			httpPost.setEntity(new UrlEncodedFormEntity(parameters));
			// 2.1设置请求头,必要!
			httpPost.addHeader("User-Agent",
					"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.110 Safari/537.36");
			// 3.执行请求,得到response对象
			CloseableHttpResponse response = httpClient.execute(httpPost);

			// 4.获取整个页面
			String responseResultJson = EntityUtils.toString(response.getEntity(), Charset.forName("UTF-8"));

			Gson gson = null;
			String last_dateline = null;
			String articleDivDatas = null;

			if (responseResultJson != null) {
				gson = new Gson();
				ResponseResule responseResule = gson.fromJson(responseResultJson, ResponseResule.class);
				if (responseResule != null) {
					last_dateline = responseResule.getLast_dateline();
					articleDivDatas = responseResule.getData();
					total_page = Integer.parseInt(responseResule.getTotal_page());

				}
			}
			// 5.jsoup解析
			if (articleDivDatas != null) {

				Document documents = Jsoup.parse(articleDivDatas);

				Elements elements = documents.select(".mob-ctt");
				String articleUrl = null;
				String articleTitle = null;
				if (elements.size() >= 1) {

					for (Element element : elements) {

						// String articleUrl =
						// documents.select(".mob-ctt").select("h2").select("a").attr("href").trim();
						articleUrl = element.select("h2").select("a").get(0).attr("href").trim();
						// System.out.println("d\t\t" + articleUrl);
						articleTitle = element.select("h2").get(0).select("a").text().trim();
						ARRAY_BLOCKING_QUEUE.put(HUXIU_URL + articleUrl);
						String Url = articleTitle + "\t" + HUXIU_URL + articleUrl;
						// 添加文章链接到队列中
						article_ids.add(Url);
					}
				}
			}

			return last_dateline;
		} catch (Exception e) {
			e.printStackTrace();
			System.out.println("请检查你的网络连接是否正常G...");
			return null;
		}
	}

	private static void printEveryOnePageInfo(String pageIndex) throws InterruptedException {
		article_total_count += article_ids.size();
		System.out.println("\t\t第" + (Integer.parseInt(pageIndex) - 1) + "页,本页共" + article_ids.size() + "条链接,一共"
				+ article_total_count + "条链接 ");
		System.out.println("\t正在写入文章链接信息到" + HUXIU_ARTICLE_LIST_SAVE_PATH);
		System.out.println("\t\t@熊骑士\t@2018年1月30日\t@V1.0.5");
		// 保存爬取新闻列表到文件中

		write_new_list_start_time = System.currentTimeMillis();
		if (article_ids.size() >= ARTICLE_IDS_SIZE) {
			saveHUXIUNewList2FileByArticleIds();
		}
		article_ids.clear();
		Thread.sleep(500);

		write_new_list_end_time = System.currentTimeMillis();
	}

}
