package extractorCode;



import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
//import org.jsoup.*;


public class TPMBlogText {
	
	private final static int INITIAL_BUF_SIZE = 65536;
	public static void main(String args[]) throws Exception
	{
	
		int count=0,i=12;
	
		BufferedWriter wr;
		BufferedReader weekInput;
		String weekStart,month;
		//For each year, for each month, vary page numbers and capture text
		for(int k=2006;k<=2007;k++)
		{
			while(i<=12)
			{
				//wr= new BufferedWriter(new FileWriter("c:\\newData\\Boing\\"+k+"\\boing_urls"+i+"_"+k+".txt"));
				weekInput = new BufferedReader(new InputStreamReader(System.in));
			       System.out.print("The year under process is"+k+" the month is "+i+"enter the week start number");
			        String wStart=weekInput.readLine();
				for(int l=0;l<=7;l++)
		{	
		System.out.println("page"+l);	
	       
		   
	       
	        month=(i<10) ? ("0"+Integer.toString(i)) : Integer.toString(i);
		String page = (fetchPage("http://talkingpointsmemo.com/archives/week_"+k+"_"+month+"_"+wStart+".php?page="+l));
		//System.out.println(page);
		page= page.replaceAll(">\\s*<", "><");
		
		page = page.replaceAll("\t", "");
		page = page.replaceAll("\b", "");
		page = page.replaceAll("\r", "");
		page = page.replaceAll("\n", "");
		page = page.replaceAll("\r\n", "");
		Matcher fullText=Pattern.compile("<div class=\"entry_text\">(.*?)</div>", Pattern.CASE_INSENSITIVE).matcher( page );
		//<a href="http://talkingpointsmemo.com/archives/2011/10/just_go_all_in.php#more" class="rbutton more">Read more</a>
		Matcher newsItem = Pattern.compile("<a href=\"http://talkingpointsmemo.com/archives(.*?)\"(.*?)>", Pattern.CASE_INSENSITIVE).matcher( page );
		
	//	newsItem.find();

		
		
		int fileCount=0;
	while(fullText.find())
		{
	//	System.out.println(newsItem.group(1));
		
		String text = fullText.group(1);
		//System.out.println(text);
		text = text.replaceAll("\\<.*?>","");
		text = text.replaceAll("[^\\p{ASCII}]", "");
		System.out.println(text);
		wr = new BufferedWriter(new FileWriter("c:\\newData\\TPM\\week_"+k+"_"+month+"_"+wStart+"\\page_"+l+" "+(fileCount++)+".txt"));
		wr.write(text);
		
		wr.close();
		//System.out.println(text);
		//wr.write("http://newsbusters.org"+newsItem.group(1)+"\n");
		}
		
		
		
		}
				if(Integer.parseInt(wStart)>31)i++;
				}
			}
		
		
	}
	
	private static String fetchPage(String urlString){
		StringBuilder buffer = new StringBuilder(INITIAL_BUF_SIZE);
		try {
			URL url = new URL( urlString );
			BufferedInputStream reader = new BufferedInputStream(url.openStream());
			int temp = reader.read();
			while(temp != -1){
				buffer.append((char)temp);
				temp = reader.read();
			}
		} catch (MalformedURLException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return buffer.toString();
	}

}
