package com.CreditCrawler.moduleImpl;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.jsoup.Jsoup;
import org.jsoup.Connection.Method;
import org.jsoup.nodes.Document;
import org.jsoup.select.Elements;

import com.CreditCrawler.DataPicker;
import com.CreditCrawler.ModuleCrawler;
import com.CreditCrawler.datapickerImpl.PersonDataPickerImpl;
import com.CreditCrawler.util.CreateRootFile;
import com.CreditCrawler.vo.PageBaseInfoVO;


public class PersonModuleImpl implements ModuleCrawler {
	String href = "";
	String name = null;
/*	private String proxy = "36.7.108.56";// 代理IP
	private int port = 8000;// 代理端口号
	private boolean useProxy = true;// 判断是否使用代理
*/
	private String rootPath;
	public PersonModuleImpl()
	{
		rootPath=CreateRootFile.getRootFileName();
	}
	
	public List<PageBaseInfoVO> getInfoList()  {
	
		List<PageBaseInfoVO> listP =new ArrayList<PageBaseInfoVO>();

		
		try
		{
			Document doc = Jsoup
					.connect(
							"http://www.zjcredit.gov.cn/xyzj2015/html/personCreditTips2.jsp")//http://www.zjcredit.gov.cn/xyzj2015/html/nocorporationsCreditTips2.jsp
					.timeout(8000).method(Method.POST).post();
			Elements firstNode = doc.select("table.listf3[width=150]");// 标题。。省高院
			for (int i = 0; i < firstNode.size(); i++) {
				Elements secondNode=null;
				File file1 = new File(rootPath+"\\个人\\"
						+ firstNode.get(i).text()
								.replaceAll(Jsoup.parse("&nbsp;").text(), ""));
				
				

				
				secondNode = firstNode.get(i).nextElementSibling()//二层文件。。 个人未履行生效裁判信息
						.nextElementSibling().select("a");
				for (int j = 0; j < secondNode.size(); j++) {
					
					String regex = "id=(.+)";
					Pattern p = Pattern.compile(regex);
					Matcher m = p.matcher(secondNode.get(j)
							.attr("href"));
					String ids = null;
					while (m.find()) {
						ids = m.group(1);

					}
					int sums = Integer.parseInt(secondNode
							.get(j).parent().nextElementSibling().select(".listf5>font[color=#0a9ce9]").text()
							.replaceAll(Jsoup.parse("&nbsp;").text(), "")
							.replaceAll(",", ""));// sums
							
		
					File file2 = new File(file1, secondNode.get(j).text()
							.replaceAll(Jsoup.parse("&nbsp;").text(), ""));
					file2.mkdirs();
					PageBaseInfoVO pbi = new PageBaseInfoVO(ids, name, href, sums);
					String file = rootPath+"\\个人\\"
							+ firstNode.get(i).text()
									.replaceAll(Jsoup.parse("&nbsp;").text(), "")
							+ "\\"
							+ secondNode.get(j).text()
									.replaceAll(Jsoup.parse("&nbsp;").text(), "");
					pbi.setName(file);
					listP.add(pbi);
				}

				  
					}
		} catch (IOException e)
		{
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
			
		
		return listP;
	}

	public void startCrawler()  {
		
		List<PageBaseInfoVO> ListPBI = getInfoList();
		
			for (PageBaseInfoVO pi : ListPBI) {
				DataPicker dp = new PersonDataPickerImpl(pi);
				dp.createJSONList();

			}
		}
		
	}

