package unrelated;
import java.util.*;
import java.io.*;
/*
 * Test code for getting links from wiki page.
 * Gets all links to other wiki pages and put them into a linkedlist
 * Test page being used is "Robot"
 * Will extract all wiki links, needs adjusments for use with other functions or pieces of code
 * to feed it new pages to yank links from
 * */
public class LinkGetter {
	public static void main(String[] args){
		//variables
		LinkedList<String> links = new LinkedList<String>(); //linkedlist of links
		String htmlCode = ""; //the html code as a string
		String key = "<a href=\"/wiki/"; //the term we are searching for
		String temp = ""; //temp string
		int index = 0; //index of the search
		int start = 0; // start index of the link
		int end = 0; // end index of the link
		File code = new File("Robot.htm"); //file being read
		Scanner sc; //scanner

		try {
			//reads html code into string
			sc = new Scanner(code);
			while(sc.hasNextLine()){
				htmlCode += sc.nextLine();
			}
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		}

		//runs while index is less than string length
		while (index < htmlCode.length()){
			//searches for key terms and gets index
			start = htmlCode.indexOf(key, index);
			//if key terms found, add them to the list and update index
			if (start > 0){
				end = htmlCode.indexOf("\"", start+15);
				temp = "http://en.wikipedia.org/wiki/" + htmlCode.substring(start+15, end);
				links.add(temp);
				index = end;
			}
			//if key terms not found, end loop
			else if (start == -1){
				break;
			}
		}
		//prints the links then the total number of wiki links on the page
		for (int i = 0; i < links.size(); i++){
			System.out.println(links.get(i));
		}
		System.out.println("The total number of Wiki links on this page is " + links.size() + ".");
	}
}
