package helpers;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;

import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.select.Elements;

import entities.AutoSite;

public class CrawlerHelper {
	
	public static List<String> getLastestNews(AutoSite autoSite) {
		ArrayList<String> listNews = new ArrayList<String>();
		try {
			Document doc = Jsoup.connect(autoSite.getUrl()).get();
			Elements links = doc.select(autoSite.getRegLink());
			if (links.size() > 0) {
				for (int i = 0; i < links.size(); i++) {
					if(!Pattern.matches("^http://[\\w\\W]+$",links.get(i).attr("href")))
					{
						listNews.add("http://"+UrlHelper.fomatDomains(autoSite.getUrl()) + links.get(i).attr("href"));
					} else {
						listNews.add(links.get(i).attr("href"));
					}
				}
			}
			
		} catch (Exception e) {
			listNews = null;
			e.printStackTrace();
		}
		return listNews;
	}	
	
	public static List<String> getPageNews(AutoSite autoSite) {
		List<String> listNews = new ArrayList<String>();
		try {
			Document doc = Jsoup.connect(autoSite.getSitePage() + autoSite.getCurrentPage()).get();
			Elements links = doc.select(autoSite.getRegLink());
			if (links.size() > 0) {
				for (int i = 0; i < links.size(); i++) {
					if(!Pattern.matches("^http://[\\w\\W]+$",links.get(i).attr("href")))
					{
						listNews.add("http://"+UrlHelper.fomatDomains(autoSite.getUrl()) + links.get(i).attr("href"));
					} else {
						listNews.add(links.get(i).attr("href"));
					}	
				}
			}
			
		} catch (Exception e) {
			listNews = null;
			e.printStackTrace();
		}
		
		return listNews;
	}
	
}
