import subprocess
import sys
import threading
from io import StringIO

# It is not clear if it is necessary or changes anything.
ON_POSIX = 'posix' in sys.builtin_module_names


class Scrape:
    def __init__(self, sourceId):
        """Base class for managing scrape subprocess."""
        self.sourceId = sourceId
        self.subprocess = None
        self.type = None
        self.buffer = StringIO()

    def start(self):
        pass

    def get_scrape_identifier(self):
        """Return unique identifier for scrape."""
        return self.sourceId + '|' + self.type





class LiveScrape(Scrape):
    def __init__(self, sourceId):
        super().__init__(sourceId)
        self.type = 'live'

    def start(self):
        """Start live scrape subprocess. nohup is necessary to run several scrapes simultaneously."""
        self.subprocess = subprocess.Popen(['nohup', 'python', 'run_live_scrape.py', self.sourceId],
                                           stdout=subprocess.PIPE,
                                           stderr=subprocess.STDOUT,
                                           bufsize=1,
                                           close_fds=ON_POSIX)
        # new Thread is started for queueing output of the subprocess output
        self.t = threading.Thread(target=buffer_output, args=(self.subprocess.stdout, self.buffer))
        self.t.daemon = True
        self.t.start()


def buffer_output(out, buffer):
    """Run in Thread. Queues stdout of subprocess."""
    for line in iter(out.readline, b''):
        buffer.write(line.decode('utf-8').rstrip())
    out.close()


class ScrapeManager:
    def __init__(self, logger):
        """Class for managing scrapes."""
        self.logger = logger

        # dict for storing current running scrapes
        self.scrapes = dict()
        self.run_counter = dict()

    def poll_processes(self):
        """Polls all running scrapes. Periodically executed from Flask app."""
        ended_scrapes = []

        # Create a list from the dict to then iterate over. Prevents concurrent modification
        for key, scrape in list(self.scrapes.items()):

            # if poll return none subprocess has ended
            if scrape.subprocess.poll() is not None:
                self.logger.info('%s has ended: %s', scrape.get_scrape_identifier(), scrape.buffer.getvalue())
                scrape.buffer.close()
                ended_scrapes.append(key)

        # update scrape dict
        for scrape in ended_scrapes:
            self.scrapes.pop(scrape)

    def add_scrape(self, scrape):
        """Start new scrape"""

        key = scrape.get_scrape_identifier()

        if key in self.scrapes:

            # if scrape is already running increment counter. Restart scrape if counter exceeds limit
            if key in self.run_counter:
                self.run_counter[key] += 1
                if self.run_counter[key] > 2:
                    self.logger.info('%s will be restarted', key)
                    self.scrapes[key].subprocess.kill()
                    self.logger.info('%s has been killed: %s', self.scrapes[key].get_scrape_identifier(),
                                     self.scrapes[key].buffer.getvalue())
                    self.scrapes.pop(key)
                    self.add_scrape(scrape)
            else:
                self.run_counter[key] = 1
                self.logger.info('%s is still running and will not be started again', key)

        else:
            scrape.start()
            self.logger.info('starting scrape. source: %s', key)
            self.scrapes[key] = scrape
            self.run_counter[key] = 0

