package com.lee.crawler.gov.strategy.list;

import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.Arrays;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Stream;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.stereotype.Component;

import com.lee.crawler.gov.strategy.FuzhouArticleStrategy;
import com.lee.crawler.gov.util.HttpClientUtil;

@Component
public class FuzhouListStrategy implements ListStrategy {

	private static final String wjjPageUrl = "http://wjj.fuzhou.gov.cn/was5/web/search?channelid=290792&templet=docs.jsp&sortfield=-docorderpri,-docreltime&classsql=chnlid=5914&page=%d&prepage=20";
	private static final String jaqPageUrl = "http://jaq.fuzhou.gov.cn/was5/web/search?channelid=290792&templet=advsch.jsp&sortfield=-docorderpri,-docreltime&classsql=chnlid=8630&prepage=15&page=%d";

	private static final Pattern pagenumPttern = Pattern.compile("\"pagenum\":\"(\\d+)\"");

	private static final Pattern urlPttern = Pattern.compile("\"url\":\"(http.*?\\.htm)\"");

	@Autowired
	private FuzhouArticleStrategy fuzhouArticleStrategy;

	public static void main(String[] args) throws UnsupportedEncodingException {

		ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext("spring/applicationContext.xml",
				"spring/applicationContext-datasource.xml", "spring/applicationContext-redis.xml");
		context.start();
		Arrays.asList(context.getBeanDefinitionNames()).forEach(System.out::println);

		FuzhouListStrategy bean = (FuzhouListStrategy) context.getBean("fuzhouListStrategy");
		bean.saveCrawlerArticles();

		try {
			System.in.read();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} finally {
			if (context != null) {
				context.close();
			}
		}

	}

	@Override
	public String setCategroyUrl() {
		return null;
	}

	@Override
	public void saveCrawlerArticles() {
		Stream.of(wjjPageUrl, jaqPageUrl).forEach(pageUrl -> {
			String contentStr = HttpClientUtil.doGet(String.format(pageUrl, 1));
			Matcher matcher = pagenumPttern.matcher(contentStr);
			if (matcher.find()) {
				Integer pagenum = Integer.valueOf(matcher.group(1));
				for (int i = 1; i <= pagenum; i++) {
					savePage(String.format(pageUrl, i));
				}
			}
		});
	}

	private void savePage(String pageUrl) {
		String contentStr = HttpClientUtil.doGet(pageUrl);
		Matcher matcher = urlPttern.matcher(contentStr);
		while (matcher.find()) {
			String link = matcher.group(1);
			try {
				fuzhouArticleStrategy.saveCrawlerArticle(link);
			} catch (Exception e) {
				e.printStackTrace();
			}
		}
	}

}
