package geturl;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

public class geturl {
	public static void main(String args[]) throws IOException{
		String urlstr = "http://zh.wikipedia.org/wiki/%E9%A6%96%E9%A1%B5";

		int urlcount = 0;
		int urlMax = 100000000;
		
//		File file = new File("a.html");
		
		String filename = "a.txt";
		
		LinkQueue lq = new LinkQueue();
		
		lq.addUnvisitedUrl(urlstr);
		
		while(urlcount<urlMax){
			urlcount=getPageURL(lq,filename,urlcount);
		}
			
//		Iterator<String> br = getPageURL(urlstr);
				
//        BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file)));
//        while(br.hasNext()){
//        	String st = br.next();
//        	System.out.println(st);
//            bw.write(st);
//            bw.newLine();
//            
//            urlcount++;
//            bw.flush();
//        }
//        bw.close();
	}
	
	public static int getPageURL(LinkQueue lq,String filename,int urlcount) throws IOException{
		File file = new File(filename);
		if(!file.exists()){
			file.createNewFile();
		}
//		int length = path.length;
		String nextLine = System.getProperty("line.separator");
//		ArrayList<String> allURL = new ArrayList<String>();
		String path = (String) lq.unVisitedUrlDeQueue();
		if(path.startsWith("http")){
			URL url = new URL(path);
			URLConnection con = url.openConnection();
			
			String ss = con.getContentType();
//				System.out.println(ss);
			String str[] = {};
			if(ss!=null){
				str = ss.split("=");
			}
			
			if(str.length>1){
				ss = str[1];
			}
			else{
				ss = "gb2312";
			}
			
			
			try{
				InputStream inputstream = url.openStream();
				InputStreamReader inputstreamreader = new InputStreamReader(inputstream,ss);
				BufferedReader br = new BufferedReader(inputstreamreader);
				FileWriter writer = new FileWriter(file, true);
				if(br != null){
		            String s = null;
		            while((s = br.readLine())!=null){
		            	Iterator<String> it = getinnerURL(s);
		            	while(it.hasNext()){
		            		String st = it.next();
		            		lq.addUnvisitedUrl(st);
		            		
		            		writer.write(nextLine+st);
		            		urlcount++;
		            		writer.flush();
		            	}
		            }
				}
				writer.close();
				br.close();
				inputstreamreader.close();
				inputstream.close();
			}catch(Exception e){
				lq.removeVisitedUrl(path);
				return urlcount;
			}
		}
		lq.removeVisitedUrl(path);
		return urlcount;
	}
	
	public static Iterator<String> getinnerURL(String s){
		String patternstr = "(<a\\s*href=[^>]*>)";
		Pattern p = Pattern.compile(patternstr); 
		Matcher m = p.matcher(s); 
		ArrayList<String> li = new ArrayList<String>();
		boolean result = m.find(); 
		while(result){ 
			for(int i=1;i<=m.groupCount();i++){
				String addstr[] = m.group(i).split("\"");
//				System.out.println(modifyurl(addstr[1]));
				if(addstr.length>1){
					li.add(modifyurl(addstr[1]));
				}
				else{
					System.out.println(m.group(i));
				}
				
		    } 
			result=m.find(); 
			} 
		return li.iterator();
	}
	
	public static String modifyurl(String s){
		if(s.startsWith("//")){
			return "http:"+s;
		}
		else if(s.startsWith("http")){
			return s;
		}
		else if(s.startsWith("/")){
			return "http://zh.wikipedia.org"+s;
		}
		else{
			return "http://zh.wikipedia.org/"+s;
		}
		
	}
}

