package arm_search_3.crawling;

import arm_search_3.data.DocumentCollection;
import arm_search_3.data.Seed;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.Queue;
import java.util.Scanner;
import java.util.logging.Level;
import java.util.logging.Logger;

/**
 * Crawler adalah kelas abstrak bagi crawler, tugas dari crawler adalah
 * manyuplai document collection dengan document dari seed yang diberikan
 *
 * @author Robert Gunawan
 */
public abstract class Crawler {

    /**
     * konstanta nama file untuk document ID - email ID mapping
     */
    public static String DOC_MAP_ID_FILE = "DOC_MAP_ID_FILE.dim";
    /**
     * hashmap untuk menampung mapping antara document ID dengan email ID
     */
    protected HashMap<Integer, String> documentMapping;

    /**
     * mendapatkan mapping antara document ID dengan email ID
     *
     * @return mapping document ID dengan email ID
     */
    public HashMap<Integer, String> getDocumentMapping() {
        return documentMapping;
    }

    /**
     * menyimpan mapping antara document ID - email ID ke dalam sebuah file
     *
     * @param directory alamat index directory
     */
    public void saveDocMappingFile(String directory, boolean isCompressed) {
        if (!isCompressed) {
            try {
                FileWriter fstream = null;
                String filePath = directory + File.separator + DOC_MAP_ID_FILE;
                fstream = new FileWriter(filePath);
                BufferedWriter out = new BufferedWriter(fstream);
                Iterator it = documentMapping.entrySet().iterator();
                while (it.hasNext()) {
                    Map.Entry pairs = (Map.Entry) it.next();
                    out.write(pairs.getKey() + " " + pairs.getValue() + "\n");
                }
                out.close();
                fstream.close();
            } catch (IOException ex) {
                Logger.getLogger(Crawler.class.getName()).log(Level.SEVERE, null, ex);
            }
        } else {
            try {
                FileWriter fstream = null;
                String filePath = directory + File.separator + DOC_MAP_ID_FILE;
                fstream = new FileWriter(filePath);
                BufferedWriter out = new BufferedWriter(fstream);
                Iterator it = documentMapping.entrySet().iterator();
                int awal = 0;
                Integer tempResult;
                while (it.hasNext()) {
                    Map.Entry pairs = (Map.Entry) it.next();
                    tempResult = Integer.parseInt(pairs.getKey().toString())-awal;
                    out.write(tempResult + " " + pairs.getValue() + "\n");
                    awal = Integer.parseInt(pairs.getKey().toString());
                }
                out.close();
                fstream.close();
            } catch (IOException ex) {
                Logger.getLogger(Crawler.class.getName()).log(Level.SEVERE, null, ex);
            }
        }
    }

    /**
     * membaca mapping antara document ID - email ID ke dalam memory crawler
     *
     * @param directory alamat dari index directory
     */
    public void readDocMappingFile(String directory, boolean isCompressed) {
        if (!isCompressed) {
            try {
                String filePath = directory + File.separator + DOC_MAP_ID_FILE;
                Scanner in = new Scanner(new File(filePath));
                String temp;
                String tempArr[];
                while (in.hasNext()) {
                    temp = in.nextLine();
                    tempArr = temp.split(" ");
                    this.addDocumentMapping(Integer.parseInt(tempArr[0]), tempArr[1]);
                }
                in.close();
            } catch (FileNotFoundException ex) {
                Logger.getLogger(Crawler.class.getName()).log(Level.SEVERE, null, ex);
            }
        } else {
            try {
                String filePath = directory + File.separator + DOC_MAP_ID_FILE;
                Scanner in = new Scanner(new File(filePath));
                String temp;
                String tempArr[];
                int awal = 0;
                Integer tempBaca;
                while (in.hasNext()) {
                    temp = in.nextLine();
                    tempArr = temp.split(" ");
                    tempBaca = Integer.parseInt(tempArr[0]);
                    this.addDocumentMapping(awal+=tempBaca, tempArr[1]);
                }
                in.close();
            } catch (FileNotFoundException ex) {
                Logger.getLogger(Crawler.class.getName()).log(Level.SEVERE, null, ex);
            }
        }
    }

    /**
     * menambahkan mapping document ID dengan email ID
     *
     * @param documentID document ID
     * @param emailID email ID
     */
    public synchronized void addDocumentMapping(Integer documentID, String emailID) {
        documentMapping.put(documentID, emailID);
    }

    /**
     * mengisikan jumlah document yang dicrawl
     */
    public void setCrawledCount(Integer crawledCount) {
        this.crawledCount = crawledCount;
    }

    /**
     * mengisikan antrian document collection
     *
     * @param documentCollections antrian document collection
     */
    public void setDocumentCollections(Queue<DocumentCollection> documentCollections) {
        this.documentCollections = documentCollections;
    }

    /**
     * mendapatkan jumlah dokumen yang telah dicrawl
     *
     * @return jumlah crawled documents
     */
    public Integer getCrawledCount() {
        return crawledCount;
    }

    /**
     * mengembalikan antrian document collection
     *
     * @return antrian document collection
     */
    public Queue<DocumentCollection> getDocumentCollections() {
        return documentCollections;
    }

    /**
     * mengembalikan dokumen collection dari antrian
     *
     * @return document collection
     */
    public DocumentCollection getDocument() {
        return documentCollections.poll();
    }

    /**
     * menandai semua document collection bahwa proses crawling telah selesai
     */
    public void allFinish() {
        for (DocumentCollection docCollection : documentCollections) {
            docCollection.setIsFinishedCollecting(true);
        }
    }

    /**
     * menambahkan document collection untuk disuplai oleh crawler
     *
     * @param collection document collection
     */
    public void addDocument(DocumentCollection collection) {
        documentCollections.add(collection);
    }
    /**
     * antrian document collection yang disuplai oleh crawler
     */
    private Queue<DocumentCollection> documentCollections = new LinkedList<DocumentCollection>();
    /**
     * jumlah dokumen yang dicrawl
     */
    private Integer crawledCount = 0;

    /**
     * melakukan crawling
     *
     * @param seed root seed
     * @param seedNameFilter filter bagi seed yang tidak akan diproses
     */
    public abstract void crawl(Seed seed, ArrayList<String> seedNameFilter);
}
