#!/usr/local/bin/python2.7

import pickle, os

from docparser import *
from conversionlog import *
from constants import *

class StripCrawler(object):
    def __init__(self):
        self._db_root = Constants.db_root
        self._log_filepath = Constants.logs_dir
        self._log_filename = 'strip-log.pickle'
        self._parser = None
        self._completed_components = []
        self._conversion_log = None
        self._curr_filepath = None
        self._curr_log_entry = LogEntry()

    def run(self):
        self._loadLog()
        self._prepareParser()
        self._beginCrawling()
        self.saveLog()

    def _loadLog(self):
        full_path = os.path.join(self._log_filepath, self._log_filename)
        if not os.path.isfile(full_path):
            self._conversion_log = ConversionLog()
        else:
            with open(full_path, 'r') as log:
                self._conversion_log = pickle.load(log)

    def _prepareParser(self):
        self._parser = Parser(self._db_root)

    def _beginCrawling(self):
        os.chdir(self._db_root)

        n_files = 0
        for root, dirs, files in os.walk('.'):
            for f in files:
                n_files += 1
        print 'There are ' + str(n_files) + ' files to convert.'

        count = 0
        last_perc = 0
        for root, dirs, files in os.walk('.'):
            for name in files:
                count += 1
                perc_done = round(float(count) / n_files * 100, 0)
                if perc_done != last_perc:
                    print str(perc_done) + '% done...'
                    last_perc = perc_done

                self._curr_filepath = os.path.join(root, name)
                if not self._isIndexed():
                    self._convertFile()

    def _isIndexed(self):
        return self._conversion_log.isCurrent(self._curr_filepath)

    def _convertFile(self):
        self._parseFile()
        if self._curr_log_entry.parsed_successfully:
            pass
            #self._writeFile()
        self._writeLog()

    def _parseFile(self):
        self._curr_log_entry.src = self._curr_filepath
        self._parser.filepath = self._curr_filepath
        
        try:
            self._parser.processFile()
        except ParseError as err:
            print err
            self._curr_log_entry.addError(err)
        except Exception as err:
            print err
            garbage = raw_input('Hit enter to resume...')
            self._curr_log_entry.addError(err)
        else:
            self._curr_log_entry.addErrors(self._parser.nonfatal_errors)
            self._curr_log_entry.parsed_successfully = True
            self._curr_log_entry.time = os.stat(self._curr_filepath).st_mtime
            self._completed_components.append(self._parser.component)

        self._writeLog()

    def _writeLog(self):
        self._conversion_log.addEntry(self._curr_log_entry)
        self._curr_log_entry = LogEntry()

    def saveLog(self):
        if self._conversion_log is None:
            return

        full_log_path = os.path.join(self._log_filepath, self._log_filename)
        with open(full_log_path, 'w') as myfile:
            pickle.dump(self._conversion_log, myfile)

    def getDbRoot(self):
        return self._db_root
    def setDbRoot(self, val):
        self._db_root = val
    db_root = property(getDbRoot, setDbRoot)

    def getCompletedComponents(self):
        return self._completed_components
    completed_components = property(getCompletedComponents)

    def getLogFilepath(self):
        return self._log_filepath
    def setLogFilepath(self, val):
        self._log_filepath = val
    log_filepath = property(getLogFilepath, setLogFilepath)

