import time

import codecs
import datetime
import json
import os
import gzip

from framework.common.pt_logger import logger
from framework.core.crawler_result import Result


class FileOutput(object):
    def __init__(self, base_path, compress=True, loop_through=True):
        self.base_path = base_path
        # self.filename_pattern = filename_pattern
        self.compress = compress
        self.loop_through = loop_through

    @staticmethod
    def timestamp():
        return time.time()

    def _dump_list(self, results, prefix=None):
        logger.info("output List<Result>, length is %s" % len(results))
        output_path = "%s/%s_%s.json" % (self.base_path, prefix, self.timestamp())

        if not os.path.isdir(os.path.dirname(output_path)) and os.path.dirname(output_path) != "":
            os.makedirs(os.path.dirname(output_path))

        if self.compress:
            with gzip.open(output_path + ".gz", mode="wt") as f_tar:
                line = json.dumps(results, default=lambda o: o.__dict__, indent=None)
                # line = line[1:-2]  # strip the first and last brackets in case pyspark could process
                f_tar.writelines(line)
        else:
            with codecs.open(output_path, mode='ab+', encoding='utf-8') as f:
                line = json.dumps(results, default=lambda o: o.__dict__, indent=None)
                logger.info('output file to %s...' % output_path)
                f.write(line)

    def _dump_single(self, result, prefix=None):
        # logger.info("output is %s", result)
        # if self.filename_pattern.kind == "tag" and result.get_tag(self.filename_pattern.prefix):
        #     output_path = "%s/%s_%s.json" % (self.base_path, result.get_tag(self.filename_pattern.prefix), self.filename_pattern.timestamp())
        # else:
        #     output_path = "%s/%s.json" % (self.base_path, self.filename_pattern.timestamp())
        output_path = "%s/%s_%s.json" % (self.base_path, prefix, self.timestamp())

        if not os.path.isdir(os.path.dirname(output_path)) and os.path.dirname(output_path) != "":
            os.makedirs(os.path.dirname(output_path))

        if self.compress:
            with gzip.open(output_path + ".gz", mode="wt") as f_tar:
                line = json.dumps(result, default=lambda o: o.__dict__, indent=2)
                f_tar.writelines(line)
        else:
            with codecs.open(output_path, mode='ab+', encoding='utf-8') as f:
                line = json.dumps(result, default=lambda o: o.__dict__, indent=2)
                logger.info('output file to %s...' % output_path)
                f.write(line)

    def done(self):
        logger.info("output done")
        pass

    def dump(self, results, prefix=None):
        if isinstance(results, Result):
            result = results
            self._dump_single(result, prefix)
        elif isinstance(results, list) and len(results) > 0:
            if self.loop_through:
                for result in results:
                    self._dump_single(result, prefix)
            else:
                self._dump_list(results, prefix)
