package crawler.script;

import java.util.ArrayList;
import java.util.List;

import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.alibaba.fastjson.JSONObject;

import crawler.client.sdk.task.java.CrawlerExtractInfo;
import crawler.client.sdk.task.java.CrawlerTaskInfo;
import crawler.utility.http.HttpClientResponse;
import crawler.utility.http.HttpClientUtil;

public class Step2 {
	
	private static Logger log = LoggerFactory.getLogger(Step2.class);
	
	
	public static CrawlerExtractInfo crawler(CrawlerTaskInfo crawlerTaskInfo) throws Exception {
		String nextTaskName = "crawler.script.Step3";
		
		
		String url =  crawlerTaskInfo.getTaskUrl();
		String preInfo = crawlerTaskInfo.getInformation();
		JSONObject preInfoJSON = JSONObject.parseObject(preInfo);
		log.info("Step2  preInfoJSON : " + preInfoJSON);
		
		HttpClientUtil httpClientUtil = new HttpClientUtil();
		httpClientUtil.setUrl(url);
		
		HttpClientResponse responseBean = httpClientUtil.get();
		
		String html = responseBean.getContent();
		
		Document doc = Jsoup.parse(html);
		Elements divElements = doc.select("div#plist > ul > li > div");
		Elements imgElements = divElements.select("div.p-img > a");
		Elements nameElements = divElements.select("div.p-name > a");
		
		
		List<CrawlerTaskInfo> crawleOutput = new ArrayList<CrawlerTaskInfo>();
		CrawlerTaskInfo crawlerURL;
		
		String startStr = "https:" ;
		
		for (int i = 0; i < nameElements.size(); i++) {
			Element imgElement = imgElements.get(i);
			Element nameElement = nameElements.get(i);
			
			crawlerURL = new CrawlerTaskInfo();
			
			String childUrl = startStr + nameElement.attr("href");	
			Element emElement = nameElement.select("em").first();
			String name = "no title";
			if (emElement != null) {
				name = emElement.text();
			}
	
			
			crawlerURL.setChildUrl(childUrl);
			
			JSONObject infoObj = new JSONObject(preInfoJSON);
			infoObj.put("nameStep3", name);
			preInfoJSON.put("urlStep2", childUrl);
			crawlerURL.setInformation(infoObj.toJSONString());
			
			crawlerURL.setTaskName(nextTaskName);
			
			crawleOutput.add(crawlerURL);
		}
	
		return new CrawlerExtractInfo(crawleOutput);
	}
	
	
	public static void main(String[] args) throws Exception {
		JSONObject jsonObject = new JSONObject();
		jsonObject.put("nameStep1", "nameStep1");
		jsonObject.put("urlStep1", "urlStep1");
		
		
		CrawlerTaskInfo crawlerTaskInfo = new CrawlerTaskInfo();
		crawlerTaskInfo.setTaskUrl("https://list.jd.com/list.html?cat=737,794,12392");
		crawlerTaskInfo.setInformation(jsonObject.toJSONString());
		
		CrawlerExtractInfo crawlerExtractInfo = crawler(crawlerTaskInfo);
		List<CrawlerTaskInfo>   crawlerTaskInfos = crawlerExtractInfo.getCrawlerTaskInfos();
		for (int i = 0; i < crawlerTaskInfos.size(); i++) {
			System.out.println(crawlerTaskInfos.get(i));
		}
	}
}
