import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.net.MalformedURLException;
import java.util.*;

/**
 * Created by Bella on 15/04/2014.
 */
public class Search {

    static WebGraph webGraph = new WebGraph();
    static Map<String, HitsData> prevWebGraph= new HashMap<String, HitsData>();
    static InvertedIndex invertedIndex = new InvertedIndex();

    //The threshold influences the number of iterations of hits function.
    //After checking the change of hubness and authority values between every two successive iterations,
    //I defined the threshold in order to determine whether the change is significant or not.
    //If the changes for every pageNode are smaller then the threshold- stop the iteration, otherwise continue until
    //you reach this level of changes.
    //The value chosen for the threshold causes 37 iteration. when n->infinity the number of iterations-> 37,
    // therefor more itarations wouldn't change the rank result and make it more precise.

    static public double threshold = 1/Math.pow(2,100);
    private static double TA_threshold;
    private static List<ItemData> topKItems = new LinkedList<ItemData>();
    private static int index;
    private static int aboveTH;

    public static void crawl(PageNode pageNode) throws MalformedURLException {

        Queue<PageNode> queue = new ArrayDeque<PageNode>();
        webGraph.addWebPage(pageNode.getNodeId(),pageNode);
        queue.add(pageNode);

        while (webGraph.getWebGraph().size() <= 100) {

            PageNode p = queue.poll();
            if (p == null) {
                break;
            }

            TreeMap<String, Integer> wordsInPage = p.getWordsInPage();
            int pageLength = p.getPageLength();

            //for every word in this page, update the inverted index.
            //if the url is a Wikipedia page, crawl recursively on this page as well

            for (Map.Entry<String, Integer> entry : wordsInPage.entrySet()) {

                if (webGraph.getWebGraph().size() > 100)
                    break;

                String word = entry.getKey();
                Integer counter = entry.getValue();
                invertedIndex.addWord(word, pageNode.getNodeId(), counter, pageLength);

                String newLink = "http://simple.wikipedia.org";
                String[] splitedWord = word.split("/");
                if (splitedWord.length > 1) {
                    if (splitedWord[0].contains("href=")) {
                        //if (splitedWord[1]!=null) {
                        if (splitedWord[1].equals("wiki")) {

                            //concatenate to the prefix of the address the suffix of it
                            for (int i = 1; i < splitedWord.length - 1; i++) {
                                newLink = newLink.concat("/" + splitedWord[i]);
                            }
                            //the last part of the address finishes with ". this part handles this problem and drops "
                            String lastWordInAddress = splitedWord[splitedWord.length - 1];
                            char[] lastWordInAddressUpdated = new char[lastWordInAddress.length() - 1];
                            lastWordInAddress.getChars(0, lastWordInAddress.length() - 1, lastWordInAddressUpdated, 0);
                            String s = "";
                            for (int i = 0; i < lastWordInAddressUpdated.length; i++) {
                                if (lastWordInAddressUpdated[i]=='\"') {
                                    break;
                                }
                                s += lastWordInAddressUpdated[i];
                            }

                            newLink = newLink.concat("/" + s);
                            PageNode linkedPageNode = new PageNode(newLink);
                            webGraph.addWebPage(linkedPageNode.getNodeId(), linkedPageNode);
                            p.addLink(linkedPageNode);
                            //add in and out page nodes for hub and authority question
                            linkedPageNode.addInPageNode(p);
                            p.addOutPageNode(linkedPageNode);

                            queue.add(linkedPageNode);

                        }

                    }
                }

            }
        }

        //update the invertedIndex with all the words of all the pages in webGraph
        for (PageNode p: webGraph.getWebGraph().values()) {
            TreeMap<String, Integer> wordsInPage= p.getWordsInPage();
            for (String word : wordsInPage.keySet()) {
                invertedIndex.addWord(word, p.getNodeId(), wordsInPage.get(word), p.getPageLength());
            }
        }

    }


    public static List<OutputHitsData> hits (WebGraph webGraph) {

        double norm;

        while (!isConverege(webGraph)) {

            norm = 0.0;
            for (PageNode pageNode : webGraph) { // update all authority values first
                pageNode.getHitsData().setAuthority(0.0);
                double pageNodeAuthority = pageNode.getHitsData().getAuthority();
                double sumOfHubs = 0.0;

                for (PageNode inPageNode : pageNode.getInPageNode()) { // update all authority values first
                    double inPageNodeHub = inPageNode.getHitsData().getHubness();
                    sumOfHubs += inPageNodeHub;
                }

                pageNode.getHitsData().setAuthority(pageNodeAuthority + sumOfHubs);

                norm += Math.pow(pageNode.getHitsData().getAuthority(), 2); // calculate the sum of the squared auth values to normalise

            }

            norm = Math.sqrt(norm);

            for (PageNode pageNode : webGraph) { // update the auth scores
                double pageNodeAuthority = pageNode.getHitsData().getAuthority() / norm;
                pageNode.getHitsData().setAuthority(pageNodeAuthority); //normalise the auth values
            }

            norm=0.0;

            for (PageNode pageNode : webGraph) {
                pageNode.getHitsData().setHubness(0.0);
                double pageNodeHubness = pageNode.getHitsData().getHubness();
                double sumOfAuthority = 0.0;

               // if (pageNode.getOutPageNode().size() != 0) {
                    for (PageNode outPageNode : pageNode.getOutPageNode()) {
                        double outPageNodeAuthority = outPageNode.getHitsData().getAuthority();
                        sumOfAuthority += outPageNodeAuthority;
                    }
                //}

                pageNode.getHitsData().setHubness(pageNodeHubness + sumOfAuthority);

                norm += Math.pow(pageNode.getHitsData().getHubness(), 2); // calculate the sum of the squared hub values to normalise

            }

            norm = Math.sqrt(norm);

            for (PageNode pageNode : webGraph) { // update the auth scores
                double pageNodeHubness = pageNode.getHitsData().getHubness()/ norm;
                pageNode.getHitsData().setHubness(pageNodeHubness); //normalise the auth values
            }
        }

        List<OutputHitsData> results= results(webGraph);

        return results;
    }

    private static List<OutputHitsData> results(WebGraph webGraph) {

        // Create results data
        List<OutputHitsData> results = new LinkedList<OutputHitsData>();

        for(PageNode pageNode : webGraph.getWebGraph().values()){

            OutputHitsData outputHitsdata = new OutputHitsData(pageNode.getNodeId(), pageNode.getHitsData().getAuthority());
            results.add(outputHitsdata);
        }

        // Sort results in decreasing authority score
        Collections.sort(results);
        // Return sorted results
        return results;
    }

    private static boolean isConverege(WebGraph webGraph) {
        if(numChanges(webGraph) == 0)
            return true;

        return false;
    }

    /**
     * sums the number of vertices in web graph that their change of authority or hubness is higher then threshold
     * @param webGraph
     * @return
     */
    private static int numChanges(WebGraph webGraph) {

        int counter = 0;

        for(PageNode pageNode : webGraph.getWebGraph().values()){

            HitsData hitsData = prevWebGraph.get(pageNode.getNodeId());
            if(hitsData == null) // Rank doesn't exist, add to map
            {
                hitsData = new HitsData(pageNode.getNodeId());
                hitsData.setAuthority(pageNode.getHitsData().getAuthority());
                hitsData.setHubness(pageNode.getHitsData().getHubness());
                prevWebGraph.put(hitsData.getId(), hitsData);
                counter++; // Increase counter - new rank
            }
            else // Rank exists, compare and update
            {
                double d1 = Math.abs(hitsData.getHubness() - pageNode.getHitsData().getHubness());
                double d2 = Math.abs(hitsData.getAuthority() - pageNode.getHitsData().getAuthority());
                if(d1 > threshold || d2 > threshold)
                    counter++; // Increase counter - above threshold

                hitsData.setHubness(pageNode.getHitsData().getHubness());
                hitsData.setAuthority(pageNode.getHitsData().getAuthority());
            }
        }

        return counter;
    }

    /**
     * the threshold algorithm
     * @param categories
     * @param k
     * @return
     */
    public static List<ItemData> TA (List<CategoryData> categories, int k) {

        if (categories.isEmpty())
            return null;
        topKItems.clear();

        Map<String, ItemData> results = new HashMap<String, ItemData>();
        List<ItemData> res;
        res = new LinkedList<ItemData>(results.values());
        index = aboveTH = 0;

        // sorted access
        for (int i = 0; numResultsAboveThreshold(res) < k; i++) {

            if (i == categories.size()) {
                // Update threshold after each full sorted access iteration
                updateThreshold();

                i = 0;
            }

            // Get next category
            CategoryData category = categories.get(i);

            // Get top item in this category
            ItemData topItem = category.next();
            if (topItem == null)
                continue;

            topKItems.add(topItem);

            if (results.containsKey(topItem.getName()))
                continue;

            List<Double> scores = new LinkedList<Double>();

            // random access in order to find this item's score
            for (int j = 0; j < categories.size(); j++) {
                ItemData temp = categories.get(j).find(topItem.getName());
                if (temp == null)
                    continue;

                scores.add(temp.getScore());
            }

            // Calculate score and add item to results list
            double score = maxAggregate(scores);
            ItemData newItem = new ItemData(topItem.getName(), score);
            results.put(newItem.getName(), newItem);
            res = new LinkedList<ItemData>(results.values());
        }

        return results(results.values());


    }

    /**
     * calculates the number of results above threshold
     * @param results
     * @return
     */
    private static int numResultsAboveThreshold(List<ItemData> results) {
        int i = 0;
        ItemData currItem;
        for (i = index; i < results.size(); i++) {
            currItem = results.get(i);
            if (currItem.getScore() >= TA_threshold)
                aboveTH++;
        }
        index = i;
        return aboveTH;
    }


    private static void updateThreshold() {
        List<Double> scores = new LinkedList<Double>();
        for (ItemData item : topKItems) {
            scores.add(item.getScore());
        }

        TA_threshold = maxAggregate(scores);
        topKItems.clear();
        index = aboveTH = 0;

    }
    /**
     * The aggregation function I chose is the Max function.
     * In order to give the most accurate result to the key word, the most relevant page to the search is the one with the biggest score
     * on the top-k list. The score is bigger when the rank is bigger.
     */

    private static double maxAggregate(List<Double> scores) {

        if (scores.isEmpty())
            return 0.0;

        double max = 0.0;
        for (double d : scores) {
            if (max < d)
                max = d;
        }

        return max;
    }

    private static List<ItemData> results(Collection<ItemData> items) {
        List<ItemData> list = new LinkedList<ItemData>();
        for (ItemData item : items) {
            list.add(item);
        }

        Collections.sort(list);

        return list;
    }


    public static void main(String[] args) throws Exception {

        PageNode pageNode = new PageNode("http://simple.wikipedia.org/wiki/Albert_einstein");
        crawl(pageNode);

        File file = new File("urls.txt");
        if (!file.exists()) {
            file.createNewFile();
        }
        FileWriter fw = new FileWriter(file.getAbsoluteFile());
        BufferedWriter bw = new BufferedWriter(fw);
        bw.write(webGraph.toString());
        bw.close();

        file = new File("rank.txt");
        if (!file.exists()) {
            file.createNewFile();
        }
        fw = new FileWriter(file.getAbsoluteFile());
        bw = new BufferedWriter(fw);

        List<OutputHitsData> list;
        list = hits(webGraph);
        for (int i = 0; i < 5; i++) {
            bw.write(list.get(i).toString());
            bw.append(System.getProperty("line.separator"));
        }
        bw.close();

        String input;
        String[] inputArray = null;
        Scanner in = new Scanner(System.in);
        System.out.println("Enter Keywords");
        while (!(input = in.nextLine()).equals("exit")) {
            inputArray = input.split(" ");


            List<CategoryData> categories = new LinkedList<CategoryData>();

            Map<String, Double> wordScore = null;
            for (int j = 0; j < inputArray.length; j++) {
                wordScore = invertedIndex.getUrlAndScoreForWord(inputArray[j]);
                if (wordScore == null) {
                    continue;
                }

                List<ItemData> items = new LinkedList<ItemData>();

                for (Map.Entry<String, Double> entry : wordScore.entrySet()) {
                    ItemData itemData = new ItemData(entry.getKey(), entry.getValue());
                    items.add(itemData);
                }
                CategoryData categoryData = new CategoryData(inputArray[j], items);
                categories.add(categoryData);
            }

            if ((wordScore == null) && (categories.size() == 0)) {
                System.out.print("No Results, Please try again.\n");
                System.out.println("\nEnter Keywords");
                continue;
            }


            List<ItemData> items = new LinkedList<ItemData>();
            for (OutputHitsData resHitsData : list) {
                ItemData itemData = new ItemData(resHitsData.getId(), resHitsData.getRank());
                items.add(itemData);
            }
            CategoryData categoryData = new CategoryData("HITS", items);
            categories.add(categoryData);

            List<ItemData> result = Search.TA(categories, 5);
            if (result.size() > 0) {
                System.out.print("Search Results: \n");

                for (int i = 0; (i < 5) && (i < result.size()); i++) {
                    System.out.print(result.get(i).toString() + "\n");
                }
            } else {
                System.out.print("No Results, Please try again.\n");
            }

            System.out.println("\nEnter Keywords");


        }

        return;
    }

    }