#-*- coding: utf-8 -*-

#_______________________ Simple As FucK Image Downloader _______________________
#
#                                by newfagpower

"""
    This program is free software: you can redistribute it and/or modify
    it under the terms of the GNU General Public License as published by
    the Free Software Foundation, either version 3 of the License, or
    (at your option) any later version.

    This program is distributed in the hope that it will be useful,
    but WITHOUT ANY WARRANTY; without even the implied warranty of
    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    GNU General Public License for more details.

    You should have received a copy of the GNU General Public License
    along with this program.  If not, see <http://www.gnu.org/licenses/>.

    © 2011 newfagpower

"""

import threading
import multiprocessing
import time
import logging
log = logging.getLogger(__name__)

import downloader


# Multiprocessor version:
#   class Worker(multiprocessing.Process):
class Worker(threading.Thread):

    PREPARE_FAILED = 1
    SAVING_FAILED = 2
    
    _SLEEP_TIME_SEC = 3

    def __init__(self, info):
        # raises Downloader.NotFound
        
        # Multiprocessor version:
        #   multiprocessing.Process.__init__(self)
        threading.Thread.__init__(self)
        self.info = info
        self.url = info['url']
        self.downloader = downloader.get_for_domain(self.url)
        log.debug('worker «{0}» downloader created'.format(self.url))

    def run(self):
        log.debug('worker «{0}» running'.format(self.url))
        try:
            self.downloader.prepare()
            self.info['name'] = self.downloader.get_name()
            self.info['nb_pending'] = self.downloader.get_nb_pending()
            self.info['nb_done'] = self.downloader.get_nb_done()
        except Exception as e:
            log.debug('«{0}» prepare failed: {1}'.format(self.url, e),
                        exc_info=True)
            self.info['running'] = False
            self.info['error'] = Worker.PREPARE_FAILED
            return
            
        while self.info['running']:
            if self.info['pause']:
                time.sleep(Worker._SLEEP_TIME_SEC)
                continue
            ok = self.downloader.download_image()
            # use «and» to allow 'running' to be turnt to «False» by the ui
            self.info['running'] = self.info['running'] and ok
            self.info['nb_pending'] = self.downloader.get_nb_pending()
            self.info['nb_done'] = self.downloader.get_nb_done()

        try:
            log.debug('worker «{0}» saving data'.format(self.url))
            if self.info['remove']:
                self.downloader.remove()
            else:
                self.downloader.save(self.info['destination'], self.info['zip_it'])
        except Exception as e:
            log.debug('{0} «{1}» saving failed: {2}'.
                        format(self.info['name'], self.url, e), exc_info=True)
            self.info['error'] = Worker.SAVING_FAILED
            return
        log.debug('{0} «{1}» finished well'.format(self.info['name'], self.url))

        
class WorkerAlreadyExists(Exception): pass


class Scheduler(object):
    def __init__(self):
        self.manager = multiprocessing.Manager()
        self.worker_dict = {}
        self.join_dict = {}

    def launch_worker(self, url, dest, zip_it):
        # raises Downloader.NotFound
        # raises WorkerAlreadyExists
        
        # normalize url:
        if not (url.startswith('http://') or url.startswith('https://')):
            url = 'http://' + url
            
        if url in self.worker_dict:
            raise WorkerAlreadyExists
        
        info = self.manager.dict()
        info['url'] = url
        info['zip_it'] = zip_it
        info['destination'] = dest
        info['running'] = True
        info['pause'] = False
        info['remove'] = False
        info['name'] = ''
        info['nb_done'] = 0
        info['nb_pending'] = 0
        info['error'] = ''
        
        worker = Worker(info)
        worker.start()
        self.join_dict[url] = worker
        self.worker_dict[url] = info
        log.debug('worker «{0}» launched'.format(url))

    def remove_worker(self, url):
        url_list = self.worker_dict.keys()
        if url in url_list:
            # wait for worker to stop
            worker = self.join_dict[url]
            worker.join()
            # remove associated data
            del self.join_dict[url]
            del self.worker_dict[url]

