package Image;

import Data.Settings;
import WebExample.DownloadPage;
import WebExample.MySQLjdbc;
import edu.uci.ics.crawler4j.crawler.Page;
import edu.uci.ics.crawler4j.parser.HtmlParseData;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.select.Elements;

import java.io.FileInputStream;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.List;
import java.util.Properties;

public class filterHTML {

    /*
     * Parses specific content to be filtered and returns if page is page is flagged
     */
    public static boolean filterThreadPosts(Page page) throws IOException, ClassNotFoundException, SQLException {

        boolean contentFlagged;
        int flaggedCount;
        String contentToFilter = "";
        int docid = page.getWebURL().getDocid();

        // loads properties
        Properties logicProps = new Properties();
        FileInputStream propInputStream = new FileInputStream(Settings.FILTER_LOGIC);
        logicProps.load(propInputStream);

        Properties myProps = new Properties();
        FileInputStream myPropInputStream = new FileInputStream(Settings.FILTER_CONFIG);
        myProps.load(myPropInputStream);

        // gets url information and parses url
        String url = page.getWebURL().getURL();
        String parentUrl = page.getWebURL().getParentUrl();

        //Usa um cast de page e assim escusa de fazer um JSoup.Connect e fazer um novo pedido ao servidor do target.
        Document doc = Jsoup.parseBodyFragment(((HtmlParseData) page.getParseData()).getHtml());

        // checks if specific divs want to be filtered, if true filters divs
        if (logicProps.getProperty("filterDiv").equals("true")) {
            String divClass = logicProps.getProperty("div1");
            List<String> divNames = splitString(divClass);

            // takes each tag name and appends content to string to be filtered.
            for (String divName : divNames) {
                Elements divContent = doc.getElementsByClass(divName);
                String divContentString = divContent.text();
                contentToFilter += divContentString;
            }
        }

        // checks if specific tags want to be filtered, if true filters tags
        if (logicProps.getProperty("filterTag").equals("true")) {
            String tagClass = logicProps.getProperty("tag1");
            List<String> tagNames = splitString(tagClass);
            // takes each tag name and appends content to string to be filtered.
            for (int j = 0; j < tagNames.size(); j++) {
                Elements tagContents = doc.getElementsByClass(tagClass);
                String tagContentString = tagContents.text();
                contentToFilter += tagContentString;
            }
        }

        // Filters content of page for key words and gets number of keywords present
        contentFlagged = passFilter(contentToFilter);
        flaggedCount = countFilter(contentToFilter);

        // adds to database if not exist in database
        if (contentFlagged) {
            if (!MySQLjdbc.checkSiteFlagged(url)) {
                MySQLjdbc.addRow(docid, url, parentUrl, flaggedCount);
                System.out.println("**FLAGGED: Added to list**");
                DownloadPage.writeURLtoFile(page);

                //if (myProps.getProperty("downloadPage").equals("true")) {
                //DownloadPage.writeURLtoFile(page);
                //}

            } else if (MySQLjdbc.checkSiteFlagged(url)) {
                System.out.println("**FLAGGED: but already on list**");
            }

        }

        return contentFlagged;
    }

    /*
     * Filters content for keywords, returns true if keywords present
     */
    private static boolean passFilter(String content) throws IOException {
        boolean flagged = false;

        Properties filterProps = new Properties();
        FileInputStream filterInputStream = new FileInputStream(Settings.FILTER_CONFIG);
        filterProps.load(filterInputStream);

        Enumeration eFilterProp = filterProps.propertyNames();
        while (eFilterProp.hasMoreElements()) {
            String key = (String) eFilterProp.nextElement();
            String filterWord = filterProps.getProperty(key);

            if (content.toLowerCase().contains(filterWord)) {
                flagged = true;
            }
        }

        return flagged;
    }

    /*
     * Returns number of keywords present in flagged page
     */
    private static int countFilter(String content) throws IOException {
        int count = 0;

        Properties filterProps = new Properties();
        FileInputStream filterInputStream = new FileInputStream(Settings.FILTER_CONFIG);
        filterProps.load(filterInputStream);

        Enumeration eFilterProp = filterProps.propertyNames();
        while (eFilterProp.hasMoreElements()) {
            String key = (String) eFilterProp.nextElement();
            String filterWord = filterProps.getProperty(key);

            if (content.toLowerCase().contains(filterWord)) {
                count++;
            }
        }

        return count;
    }

    /*
     * Takes string argument, splits it into a list at each space.
     */
    private static List<String> splitString(String string) {
        return Arrays.asList(string.split("\\s"));
    }


    public static int getContentLength(String urlString) throws IOException {

        URL url = new URL(urlString);

        HttpURLConnection connection = (HttpURLConnection) url.openConnection();
        connection.connect();
        return connection.getContentLength();
    }
}