package com.zxl.util.search;

import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import com.gargoylesoftware.htmlunit.FailingHttpStatusCodeException;

public class Run {

	static String search_url = ConfigPropertiesUtils.getStringValue("search_url");
	static String keywords_url = ConfigPropertiesUtils.getStringValue("keywords_url");
	static String keywords_file = ConfigPropertiesUtils.getStringValue("keywords_file");
	static String path = FileUtil.getResourcePath();
	static String fileName = path + keywords_file;
	static String content_file = ConfigPropertiesUtils.getStringValue("content_file");
	static String content_fileName = path + content_file;
	static String hotwords_url = ConfigPropertiesUtils.getStringValue("hotwords_url");
	
	static String referer = "";
	static String user_agent = ConfigPropertiesUtils.getStringValue("user_agent");
	static String page_url = ConfigPropertiesUtils.getStringValue("page_url");
	
	public static void main(String[] args) {
		
		String engines[] = search_url.split(",");
		String keyword = GetWebContent.getWebCon(keywords_url);
		
		String keywords[];
		int pageNum = 1;
		
		if(!new File(fileName).exists()){
			String exePath = System.getProperty("exe.path");
			if(new File(exePath + keywords_file).exists()){
				fileName = exePath + keywords_file;
				content_fileName = exePath + content_file;
			}
		}
		
		if(keyword != null && !keyword.equals("")){
			String keywordByFile = FileUtil.readFileByChars(fileName);
			
			if(!keyword.trim().equals(keywordByFile.trim())){
				FileUtil.writeFile(fileName, keyword, false);
			}
		}else{
			keyword = FileUtil.readFileByChars(fileName);
		}
		keywords = keyword.split("&");
		
		//获取百度热搜词
//		List<String> kw_list = getBaiduHotwords(hotwords_url, "GB2312");
//		keywords = (String[]) kw_list.toArray(new String[kw_list.size()]);
		
		Random r = new Random();
		
		for(int i = 0; i < 2; i++){
			String kw = keywords[r.nextInt(keywords.length)];
			System.out.println(kw);
			
			for(int j = 0; j < engines.length; j++){
				String eg = engines[j];
				if(eg.indexOf("baidu") != -1){
//					for(int k = 0; k < pageNum; k++){
//						getBaiduContent(eg + kw + "&pn=" + k, "UTF-8");
//					}
				}else if(eg.indexOf("soso") != -1){
//					for(int k = 1; k < pageNum; k++){
//						getSosoContent(eg + kw + "&pg=" + k, "UTF-8");
//					}
				}else if(eg.indexOf("sogou") != -1){
					//模拟浏览器的页面
					String pages[] = page_url.split(",");
					for(int i1 = 0; i1 < pages.length; i1++){
						try {
							GetWebContent.getHomeTile(pages[i1]);
						} catch (FailingHttpStatusCodeException e) {
							e.printStackTrace();
						} catch (MalformedURLException e) {
							e.printStackTrace();
						} catch (IOException e) {
							e.printStackTrace();
						}
					}
					
					for(int k = 1; k <= pageNum; k++){
						referer = "";
						if(k == 1){
							getSougouContent(eg + kw + ConfigPropertiesUtils.getStringValue("sogou_params"), "GB2312");
						}else{
							getSougouContent(eg + kw + "&page=" + k + ConfigPropertiesUtils.getStringValue("sogou_params"), "GB2312");
						}
					}
				}
			}
		}

	}
	
	/**
	 * 遍历百度里关键字的网址
	 * @param url
	 * @param encode
	 */
	public static void getBaiduContent(String url, String encode){
		
		String content = GetWebContent.getPage(url, encode);// 得到url所对应的网页的内容
//		System.out.println(content);

		// 百度对应的正则式
		String reg = "<h3 class=\"t\">" + "<a (.*?) href=\"(.*?)\".*?>(.*?)</a>(.*?)</h3>";
		
		Pattern p = Pattern.compile(reg, Pattern.CASE_INSENSITIVE);
		Matcher m = p.matcher(content);

//		int i = 1;
//		String title = "";
		String link = "";
		
		List<String> url_List = new ArrayList<String>();
		Random r = new Random();
		
		while (m.find()) {
			link = m.group(2);// 正则表达式
//			title = m.group(3).replaceAll("<.*?>", "");

//			System.out.println("----------------------------------------------");
//			System.out.println("第" + i + "个标题：" + title);
//			System.out.println("第" + i + "个链接：" + link);
//			i++;
			
			url_List.add(link);
			
			if(m.hitEnd()) break;
		}
		
		for(int i = 0; i < url_List.size(); i++){
			GetWebContent.getPage(url_List.get(i), encode); //访问二级页面
			
			int s = r.nextInt(7) + 3; //间隔3-10秒
			try {
				Thread.sleep(s * 1000);
			} catch (InterruptedException e) {
				e.printStackTrace();
			}
		}
	}
	
	/**
	 * 遍历搜搜里关键字的网址
	 * @param url
	 * @param encode
	 */
	public static void getSosoContent(String url, String encode){
		
		String content = GetWebContent.getPage(url, encode);// 得到url所对应的网页的内容

		// 搜搜对应的正则式
		String reg = "<h3><a href=\"(.*?)\".*?>(.*?)</a>(.*?)</h3>";
		
		Pattern p = Pattern.compile(reg, Pattern.CASE_INSENSITIVE);
		Matcher m = p.matcher(content);

		String link = "";
		
		List<String> url_List = new ArrayList<String>();
		Random r = new Random();
		
		while (m.find()) {
			link = m.group(1);// 正则表达式
			url_List.add(link);
			
			if(m.hitEnd()) break;
		}
		
		for(int i = 0; i < url_List.size(); i++){
			GetWebContent.getPage(url_List.get(i), encode); //访问二级页面
			
			int s = r.nextInt(7) + 3; //间隔3-10秒
			try {
				Thread.sleep(s * 1000);
			} catch (InterruptedException e) {
				e.printStackTrace();
			}
		}
	}
	
	/**
	 * 遍历搜狗里关键字的网址
	 * @param url
	 * @param encode
	 */
	public static void getSougouContent(String url, String encode){
		
		String content = GetWebContent.getPage(url, encode);// 得到url所对应的网页的内容
		referer = url;

		// 搜狗对应的正则式
		String reg = "<h3 class=\"pt\">(.*?)<a (.*?) href=\"(.*?)\".*?>(.*?)</a>(.*?)</h3>";
		
		Pattern p = Pattern.compile(reg, Pattern.CASE_INSENSITIVE);
		Matcher m = p.matcher(content);

		String link = "";
		
		List<String> url_List = new ArrayList<String>();
		Random r = new Random();
		
		while (m.find()) {
			link = m.group(3);// 正则表达式
			url_List.add(link);
			
			if(m.hitEnd()) break;
		}
		
		int n = 1;
		if(url_List.size() <= 0) n = 0;
		
		for(int i = 0; i < n; i++){
			GetWebContent.getPage(url_List.get(r.nextInt(url_List.size())), "GB2312"); //访问二级页面
			
			int s = r.nextInt(7) + 3; //间隔3-10秒
			try {
				Thread.sleep(s * 1000);
			} catch (InterruptedException e) {
				e.printStackTrace();
			}
		}
	}
	
	/**
	 * 遍历百度热搜里的关键字
	 * @param url
	 * @param encode
	 * @return
	 */
	public static List<String> getBaiduHotwords(String url, String encode){
		List<String> list = new ArrayList<String>();
		
		String content = GetWebContent.getPage(url, encode);// 得到url所对应的网页的内容

		// 百度热搜对应的正则式
		String reg = "<a class=\"list-title\" (.*?) href=\"(.*?)\">(.*?)</a>";
		
		Pattern p = Pattern.compile(reg, Pattern.CASE_INSENSITIVE);
		Matcher m = p.matcher(content);

		String link = "";
		
		while (m.find()) {
			link = m.group(3);// 正则表达式
			list.add(link);
			
			if(m.hitEnd()) break;
		}
		
		return list;
	}

}
