package crawler.script;

import java.util.List;

import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;

import crawler.client.sdk.task.java.CrawlerExtractInfo;
import crawler.client.sdk.task.java.CrawlerTaskInfo;
import crawler.utility.http.HttpClientResponse;
import crawler.utility.http.HttpClientUtil;

public class Exam4 {

	/**
	 * 示例4，传递到下一个任务
	 * @param crawlerTaskInfo
	 * @return
	 * @throws Exception
	 */
	public static CrawlerExtractInfo crawler(CrawlerTaskInfo crawlerTaskInfo) throws Exception {
		String url =  crawlerTaskInfo.getTaskUrl();

		HttpClientUtil httpClientUtil = new HttpClientUtil();
		httpClientUtil.setUrl(url);
		
		HttpClientResponse responseBean = httpClientUtil.get();
		
		String html = responseBean.getContent();
		Document doc = Jsoup.parse(html);
		Elements elements = doc.select("a");
		
		CrawlerTaskInfo crawleOutput = new CrawlerTaskInfo();
		
		JSONArray jsonArray = new JSONArray();
		JSONObject result = null;
		for (Element element : elements) {
			result = new JSONObject();
			result.put("name", element.text());
			result.put("url", element.attr("href"));
			
			jsonArray.add(result);
		}
		
		crawleOutput.setChildUrl("https://www.exam.com/");
		//同一个Group下的任务无需设置这个值。
		//强烈不建议跨Group调要
		crawleOutput.setGroupName("NextGroupName");
		crawleOutput.setTaskName("NextClassName");
		//多个类之间传递参数，比如目录结构等后续步骤可能需要使用的值
		//也可以用作验证，调试等用途
		crawleOutput.setInformation(JSONArray.toJSONString(jsonArray));
		
		return new CrawlerExtractInfo(crawleOutput);
	}
	
	public static void main(String[] args) throws Exception {
		CrawlerTaskInfo crawlerTaskInfo = new CrawlerTaskInfo();
		crawlerTaskInfo.setTaskUrl("https://www.baidu.com/");
		CrawlerExtractInfo crawlerExtractInfo = crawler(crawlerTaskInfo);
		List<CrawlerTaskInfo>   crawlerTaskInfos = crawlerExtractInfo.getCrawlerTaskInfos();
		for (int i = 0; i < crawlerTaskInfos.size(); i++) {
			System.out.println(crawlerTaskInfos.get(i));
		}
		
	}
}
