import contextlib
import json
import os
import sys
import threading
import time
import traceback
from typing import Callable, Any, Dict, List, Literal, NewType, TypedDict, Union
from unittest import TestProgram, TextTestRunner, installHandler, registerResult, TestSuite, TestCase, TextTestResult, TestLoader
from concurrent.futures import ThreadPoolExecutor
from .funcTracer import trace_call
from .sync import PropSemaphore, PropertyExecManager
import random
import warnings
from dataclasses import dataclass, asdict
import requests
from .absDriver import AbstractDriver
from functools import wraps
from .utils import TimeStamp, getLogger, singleton
from .hypiumDriver import HDriver
from pathlib import Path
from .traceRecorder import TraceRecorder
import types


PRECONDITIONS_MARKER = "preconds"
PROP_MARKER = "prop"
MAX_TRIES_MARKER = "max_tries"

logger = getLogger(__name__)


# Class Typing
PropName = NewType("PropName", str)
PropertyStore = NewType("PropertyStore", Dict[PropName, TestCase])

TIME_STAMP = TimeStamp().getTimeStamp()
RESFILE = f"result_{TIME_STAMP}.json"

screenshots_dir: Union[Path, str]

def precondition(precond: Callable[[Any], bool]) -> Callable:
    """the decorator @precondition

    @precondition specifies when the property could be executed.
    A property could have multiple preconditions, each of which is specified by @precondition.
    """
    def accept(f):
        @wraps(f)
        def precondition_wrapper(*args, **kwargs):
            return f(*args, **kwargs)

        preconds = getattr(f, PRECONDITIONS_MARKER, tuple())

        setattr(precondition_wrapper, PRECONDITIONS_MARKER, preconds + (precond,))

        return precondition_wrapper

    return accept

def prob(p: float):
    """the decorator @prob

    @prob specify the propbability of execution when a property is satisfied.
    """
    p = float(p)
    if not 0 < p <= 1.0:
        raise ValueError("The propbability should between 0 and 1")
    def accept(f):
        @wraps(f)
        def precondition_wrapper(*args, **kwargs):
            return f(*args, **kwargs)

        setattr(precondition_wrapper, PROP_MARKER, p)

        return precondition_wrapper

    return accept


def max_tries(n: int):
    """the decorator @max_tries

    @max_tries specify the maximum tries of executing a property.
    """
    n = int(n)
    if not n > 0:
        raise ValueError("The maxium tries should be a positive integer.")
    def accept(f):
        @wraps(f)
        def precondition_wrapper(*args, **kwargs):
            return f(*args, **kwargs)

        setattr(precondition_wrapper, MAX_TRIES_MARKER, n)

        return precondition_wrapper

    return accept


@dataclass
class Options:
    """
    Kea and Fastbot configurations
    """
    # the driver_name in script (if self.d, then d.) 
    driverName: str
    # the driver (only U2Driver available now)
    Driver: AbstractDriver
    # list of package names. Specify the apps under test
    packageNames: List[str]
    # target device
    serial: str = None
    # test agent. "native" for stage 1 and "u2" for stage 1~3
    agent: Literal["u2", "native"] = "u2"
    # max step in exploration (availble in stage 2~3)
    maxStep: Union[str, float] = float("inf")
    # time(mins) for exploration
    running_mins: int = 10
    # time(ms) to wait when exploring the app
    throttle: int = 200
    # the output_dir for saving logs and results
    output_dir: str = "output"

    def __post_init__(self):
        if self.serial and self.Driver:
            self.Driver.setDeviceSerial(self.serial)
        self.output_dir: Path = Path(self.output_dir).absolute()
        self.output_dir.mkdir(parents=True, exist_ok=True)
        global screenshots_dir, TIME_STAMP, RESFILE
        output_path = Path(self.output_dir) / TIME_STAMP
        output_path.mkdir(parents=True, exist_ok=True)
        self.output_dir = str(output_path)
        screenshots_dir = str(output_path / "screenshots")
        RESFILE = str(output_path / RESFILE)


@dataclass
class PropStatistic:
    precond_satisfied: int = 0
    executed: int = 0
    fail: int = 0
    error: int = 0

class PBTTestResult(dict):
    def __getitem__(self, key) -> PropStatistic:
        return super().__getitem__(key)


def getFullPropName(testCase: TestCase):
    return ".".join([
        testCase.__module__,
        testCase.__class__.__name__,
        testCase._testMethodName
    ])

class JsonResult(TextTestResult):
    res: PBTTestResult
    lastest_exec_info = {
        "prop": None,
        "state": None
    }

    @classmethod
    def setProperties(cls, allProperties: Dict):
        cls.res = dict()
        for testCase in allProperties.values():
            cls.res[getFullPropName(testCase)] = PropStatistic()

    def flushResult(self, outfile):
        json_res = dict()
        for propName, propStatitic in self.res.items():
            json_res[propName] = asdict(propStatitic)
        with open(outfile, "w", encoding="utf-8") as fp:
            json.dump(json_res, fp, indent=4)

    def addExcuted(self, test: TestCase):
        self.lastest_exec_info["prop"] = getFullPropName(test)
        self.lastest_exec_info["state"] = "start"
        self.res[getFullPropName(test)].executed += 1

    def addPrecondSatisfied(self, test: TestCase):
        self.res[getFullPropName(test)].precond_satisfied += 1

    def addFailure(self, test, err):
        super().addFailure(test, err)
        self.res[getFullPropName(test)].fail += 1
        global screenshots_dir
        HDriver.takeScreenshot(screenshots_dir, getFullPropName(test))

    def addError(self, test, err):
        super().addError(test, err)
        self.res[getFullPropName(test)].error += 1
        global screenshots_dir
        HDriver.takeScreenshot(screenshots_dir, getFullPropName(test))

    def getExcuted(self, test: TestCase):
        return self.res[getFullPropName(test)].executed

class KeaPropRunner(TextTestRunner):
    resultclass: JsonResult
    allProperties: PropertyStore
    options: Options = None

    @classmethod
    def setOptions(cls, options: Options):
        if not isinstance(options.packageNames, list) and len(options.packageNames) > 0:
            raise ValueError("packageNames should be given in a list.")
        if options.Driver is not None and options.agent == "native":
            print("[Warning] Can not use any Driver when runing native mode.", flush=True)
            options.Driver = None
        cls.options = options

    def run(self, test):

        self.allProperties = dict()
        self.collectAllProperties(test)

        if len(self.allProperties) == 0:
            print("[Warning] No property has been found.", flush=True)

        JsonResult.setProperties(self.allProperties)
        self.resultclass = JsonResult

        result: JsonResult = self._makeResult()
        registerResult(result)
        result.failfast = self.failfast
        result.buffer = self.buffer
        result.tb_locals = self.tb_locals

        with warnings.catch_warnings():
            if self.warnings:
                # if self.warnings is set, use it to filter all the warnings
                warnings.simplefilter(self.warnings)
                # if the filter is 'default' or 'always', special-case the
                # warnings from the deprecated unittest methods to show them
                # no more than once per module, because they can be fairly
                # noisy.  The -Wd and -Wa flags can be used to bypass this
                # only when self.warnings is None.
                if self.warnings in ["default", "always"]:
                    warnings.filterwarnings(
                        "module",
                        category=DeprecationWarning,
                        message=r"Please use assert\w+ instead.",
                    )

                # initialize the result.json file
                result.flushResult(outfile=RESFILE)
                # setUp for the u2 driver
                self.scriptDriver = self.options.Driver.getScriptDriver()

                self.semaphore = PropSemaphore()

                while not self.semaphore.exit.is_set():

                    # Polling for execProp signal
                    if not self.semaphore.execProp.acquire(timeout=0.05):
                        # when there is no singal. Go to next loop to check if the system exit.
                        continue

                    try:
                        propsSatisfiedPrecond = self.getValidProperties()
                    except requests.ConnectionError:
                        break

                    print(f"{len(propsSatisfiedPrecond)} precond satisfied.", flush=True)

                    # Go to the next round if no precond satisfied
                    if len(propsSatisfiedPrecond) == 0:
                        self.semaphore.finishProp.release()
                        continue

                    # get the random probability p
                    p = random.random()
                    propsNameFilteredByP = []
                    # filter the properties according to the given p
                    for propName, test in propsSatisfiedPrecond.items():
                        result.addPrecondSatisfied(test)
                        if getattr(test, "p", 1) >= p:
                            propsNameFilteredByP.append(propName)

                    if len(propsNameFilteredByP) == 0:
                        print("Not executed any property due to probability.", flush=True)
                        continue

                    execPropName = random.choice(propsNameFilteredByP)
                    test = propsSatisfiedPrecond[execPropName]
                    # Dependency Injection. driver when doing scripts
                    self.scriptDriver = self.options.Driver.getScriptDriver()
                    setattr(test, self.options.driverName, self.scriptDriver)
                    print("execute property %s." % execPropName, flush=True)

                    result.addExcuted(test)
                    try:
                        test(result)
                    finally:
                        result.printErrors()
                        self.scriptDriver

                    result.flushResult(outfile=RESFILE)
                    self.semaphore.finishProp.release()

                result.flushResult(outfile=RESFILE)

            self.tearDown()

        # Source code from unittest Runner
        # process the result
        expectedFails = unexpectedSuccesses = skipped = 0
        try:
            results = map(
                len,
                (result.expectedFailures, result.unexpectedSuccesses, result.skipped),
            )
        except AttributeError:
            pass
        else:
            expectedFails, unexpectedSuccesses, skipped = results

        infos = []
        if not result.wasSuccessful():
            self.stream.write("FAILED")
            failed, errored = len(result.failures), len(result.errors)
            if failed:
                infos.append("failures=%d" % failed)
            if errored:
                infos.append("errors=%d" % errored)
        else:
            self.stream.write("OK")
        if skipped:
            infos.append("skipped=%d" % skipped)
        if expectedFails:
            infos.append("expected failures=%d" % expectedFails)
        if unexpectedSuccesses:
            infos.append("unexpected successes=%d" % unexpectedSuccesses)
        if infos:
            self.stream.writeln(" (%s)" % (", ".join(infos),))
        else:
            self.stream.write("\n")
        self.stream.flush()
        return result

    def getValidProperties(self) -> PropertyStore:

        scriptDriver = self.options.Driver.getScriptDriver()

        validProps: PropertyStore = dict()
        for propName, test in self.allProperties.items():
            valid = True
            prop = getattr(test, propName)
            # check if all preconds passed
            try:
                for precond in prop.preconds:
                    # Dependency injection. Static driver checker for precond
                    setattr(test, self.options.driverName, scriptDriver)
                    # excecute the precond
                    if not precond(test):
                        valid = False
                        break
            except Exception as e:
                valid = False
                logger.error(f"[ERROR] Failed to execute preconds of {getFullPropName(test)}")
                traceback.print_exception(e)
            # if all the precond passed. make it the candidate prop.
            if valid:
                validProps[propName] = test
        return validProps

    def collectAllProperties(self, test: TestSuite):
        """collect all the properties to prepare for PBT
        """

        def remove_setUp(testCase: TestCase):
            """remove the setup function in PBT
            """
            def setUp(self): ...
            testCase.setUp = types.MethodType(setUp, testCase)

        def remove_tearDown(testCase: TestCase):
            """remove the tearDown function in PBT
            """
            def tearDown(self): ...
            testCase = types.MethodType(tearDown, testCase)

        def iter_tests(suite):
            for test in suite:
                if isinstance(test, TestSuite):
                    yield from iter_tests(test)
                else:
                    yield test

        # Traverse the TestCase to get all properties
        for t in iter_tests(test):
            testMethodName = t._testMethodName
            # get the test method name and check if it's a property
            testMethod = getattr(t, testMethodName)
            if hasattr(testMethod, PRECONDITIONS_MARKER):
                # remove the hook func in its TestCase
                remove_setUp(t)
                remove_tearDown(t)
                # save it into allProperties for PBT
                self.allProperties[testMethodName] = t
                print(f"[INFO] Load property: {getFullPropName(t)}", flush=True)

    def tearDown(self):
        # TODO Add other tearDown methods (remove local port, etc.)
        pass


class KeaRPCRunner(TextTestRunner):
    resultclass: JsonResult
    allProperties: PropertyStore
    options: Options = None

    @classmethod
    def setOptions(cls, options: Options):
        if not isinstance(options.packageNames, list) and len(options.packageNames) > 0:
            raise ValueError("packageNames should be given in a list.")
        if options.Driver is not None and options.agent == "native":
            print("[Warning] Can not use any Driver when runing native mode.", flush=True)
            options.Driver = None
        cls.options = options

    def run(self, test):

        self.allProperties = dict()
        self.collectAllProperties(test)

        if len(self.allProperties) == 0:
            print("[Warning] No property has been found.", flush=True)

        JsonResult.setProperties(self.allProperties)
        self.resultclass = JsonResult

        result: JsonResult = self._makeResult()
        registerResult(result)
        result.failfast = self.failfast
        result.buffer = self.buffer
        result.tb_locals = self.tb_locals

        with warnings.catch_warnings():
            if self.warnings:
                # if self.warnings is set, use it to filter all the warnings
                warnings.simplefilter(self.warnings)
                # if the filter is 'default' or 'always', special-case the
                # warnings from the deprecated unittest methods to show them
                # no more than once per module, because they can be fairly
                # noisy.  The -Wd and -Wa flags can be used to bypass this
                # only when self.warnings is None.
                if self.warnings in ["default", "always"]:
                    warnings.filterwarnings(
                        "module",
                        category=DeprecationWarning,
                        message=r"Please use assert\w+ instead.",
                    )

                # initialize the result.json file
                result.flushResult(outfile=RESFILE)
                # setUp for the u2 driver
                self.scriptDriver = self.options.Driver.getScriptDriver()

                self.propExecManager = PropertyExecManager()

                while not self.propExecManager.exit_event.is_set():

                    # Polling for execProp signal
                    if not self.propExecManager.kea2_runing.is_set():
                        # when there is no singal. Go to next loop to check if the system exit.
                        time.sleep(0.05)
                        continue

                    try:
                        propsSatisfiedPrecond = self.getValidProperties()
                    except requests.ConnectionError:
                        break

                    print(f"{len(propsSatisfiedPrecond)} precond satisfied.", flush=True)

                    # Go to the next round if no precond satisfied
                    if len(propsSatisfiedPrecond) == 0:
                        self.propExecManager.propExecInfo.clear()
                        self.propExecManager.switch_controller("monkey")
                        continue

                    # get the random probability p
                    p = random.random()
                    propsNameFilteredByP = []
                    # filter the properties according to the given p
                    for propName, test in propsSatisfiedPrecond.items():
                        result.addPrecondSatisfied(test)
                        if getattr(test, "p", 1) >= p:
                            propsNameFilteredByP.append(propName)

                    if len(propsNameFilteredByP) == 0:
                        print("Not executed any property due to probability.", flush=True)
                        continue

                    execPropName = random.choice(propsNameFilteredByP)
                    test = propsSatisfiedPrecond[execPropName]
                    # Dependency Injection. driver when doing scripts
                    self.scriptDriver = self.options.Driver.getScriptDriver()
                    setattr(test, self.options.driverName, self.scriptDriver)
                    print("execute property %s." % execPropName, flush=True)

                    result.addExcuted(test)

                    self.propExecManager.propExecInfo.setPropStepInfo(
                        execPropName, "OnGoing"
                    )

                    try:
                        sys.settrace(trace_call)
                        test(result)
                        sys.settrace(None)
                    finally:
                        result.printErrors()
                        self.scriptDriver

                    result.flushResult(outfile=RESFILE)
                    self.propExecManager.propExecInfo.state = "Completed"
                    self.propExecManager.switch_controller("monkey")

                result.flushResult(outfile=RESFILE)

            self.tearDown()

        # Source code from unittest Runner
        # process the result
        expectedFails = unexpectedSuccesses = skipped = 0
        try:
            results = map(
                len,
                (result.expectedFailures, result.unexpectedSuccesses, result.skipped),
            )
        except AttributeError:
            pass
        else:
            expectedFails, unexpectedSuccesses, skipped = results

        infos = []
        if not result.wasSuccessful():
            self.stream.write("FAILED")
            failed, errored = len(result.failures), len(result.errors)
            if failed:
                infos.append("failures=%d" % failed)
            if errored:
                infos.append("errors=%d" % errored)
        else:
            self.stream.write("OK")
        if skipped:
            infos.append("skipped=%d" % skipped)
        if expectedFails:
            infos.append("expected failures=%d" % expectedFails)
        if unexpectedSuccesses:
            infos.append("unexpected successes=%d" % unexpectedSuccesses)
        if infos:
            self.stream.writeln(" (%s)" % (", ".join(infos),))
        else:
            self.stream.write("\n")
        self.stream.flush()
        return result

    def getValidProperties(self) -> PropertyStore:

        scriptDriver = self.options.Driver.getScriptDriver()

        validProps: PropertyStore = dict()
        for propName, test in self.allProperties.items():
            valid = True
            prop = getattr(test, propName)
            # check if all preconds passed
            try:
                for precond in prop.preconds:
                    # Dependency injection. Static driver checker for precond
                    setattr(test, self.options.driverName, scriptDriver)
                    # excecute the precond
                    if not precond(test):
                        valid = False
                        break
            except Exception as e:
                valid = False
                logger.error(f"[ERROR] Failed to execute preconds of {getFullPropName(test)}")
                traceback.print_exception(e)
            # if all the precond passed. make it the candidate prop.
            if valid:
                validProps[propName] = test
        return validProps

    def collectAllProperties(self, test: TestSuite):
        """collect all the properties to prepare for PBT
        """

        def remove_setUp(testCase: TestCase):
            """remove the setup function in PBT
            """
            def setUp(self): ...
            testCase.setUp = types.MethodType(setUp, testCase)

        def remove_tearDown(testCase: TestCase):
            """remove the tearDown function in PBT
            """
            def tearDown(self): ...
            testCase = types.MethodType(tearDown, testCase)

        def iter_tests(suite):
            for test in suite:
                if isinstance(test, TestSuite):
                    yield from iter_tests(test)
                else:
                    yield test

        # Traverse the TestCase to get all properties
        for t in iter_tests(test):
            testMethodName = t._testMethodName
            # get the test method name and check if it's a property
            testMethod = getattr(t, testMethodName)
            if hasattr(testMethod, PRECONDITIONS_MARKER):
                # remove the hook func in its TestCase
                remove_setUp(t)
                remove_tearDown(t)
                # save it into allProperties for PBT
                self.allProperties[testMethodName] = t
                print(f"[INFO] Load property: {getFullPropName(t)}", flush=True)

    def tearDown(self):
        # TODO Add other tearDown methods (remove local port, etc.)
        pass


class KeaPropLoader(TestLoader):
    """A TestLoader for loading properties from a TestSuite
    """
    
    _hooks = []
    
    def loadHooks(self, hookClass):
        """Return a suite of all test cases contained in testCaseClass"""
        hooks = []
        if issubclass(hookClass, Hook):
            hooks.append(hookClass())
        return hooks
    
    @property
    def hooks(self):
        return self._hooks


    def loadTestsFromModule(self, module, *args, pattern=None, **kws):
        for name in dir(module):
            obj = getattr(module, name)
            if (
                isinstance(obj, type)
                and issubclass(obj, Hook)
                and obj is not Hook
            ):
                self._hooks.extend(self.loadHooks(obj))
        return super().loadTestsFromModule(module, *args, pattern=pattern, **kws)


class KeaTestProgram(TestProgram):
    def __init__(self, module='__main__', defaultTest=None, argv=None,
                testRunner=None, testLoader=KeaPropLoader(),
                exit=True, verbosity=1, failfast=None, catchbreak=None,
                buffer=None, warnings=None, *, tb_locals=False):
        if isinstance(module, str):
            self.module = __import__(module)
            for part in module.split('.')[1:]:
                self.module = getattr(self.module, part)
        else:
            self.module = module
        if argv is None:
            argv = sys.argv

        self.exit = exit
        self.failfast = failfast
        self.catchbreak = catchbreak
        self.verbosity = verbosity
        self.buffer = buffer
        self.tb_locals = tb_locals
        if warnings is None and not sys.warnoptions:
            self.warnings = 'default'
        else:
            self.warnings = warnings
        self.defaultTest = defaultTest
        self.testRunner = testRunner
        self.testLoader: KeaPropLoader = testLoader
        self.progName = os.path.basename(argv[0])
        self.parseArgs(argv)
        self.hooks = self.testLoader.hooks
        hooks_thread = threading.Thread(target=self.runHooks, daemon=True)
        hooks_thread.start()
        self.runTests()

    def runHooks(self):
        if self.catchbreak:
            installHandler()
        self.hookRunner = KeaHookRunner
        if isinstance(self.hookRunner, type):
            try:
                try:
                    hookRunner = self.hookRunner(verbosity=self.verbosity,
                                                 failfast=self.failfast,
                                                 buffer=self.buffer,
                                                 warnings=self.warnings,
                                                 tb_locals=self.tb_locals)
                except TypeError:
                    # didn't accept the tb_locals argument
                    hookRunner = self.hookRunner(verbosity=self.verbosity,
                                                 failfast=self.failfast,
                                                 buffer=self.buffer,
                                                 warnings=self.warnings)
            except TypeError:
                # didn't accept the verbosity, buffer or failfast arguments
                hookRunner = self.hookRunner()
        else:
            # it is assumed to be a TestRunner instance
            hookRunner = self.hookRunner
        self.hooks_result = hookRunner.run(self.hooks)


class KeaHookRunner(TextTestRunner):

    def __init__(self, stream = None, descriptions = True, verbosity = 1, failfast = False, buffer = False, resultclass = None, warnings = None, *, tb_locals = False):
        super().__init__(stream, descriptions, verbosity, failfast, buffer, resultclass, warnings, tb_locals=tb_locals)
        self.threadPool = ThreadPoolExecutor(max_workers=8)
    
    def run(self, hooks: List["Hook"]):
        "Run the given test case or test suite."
        result = self._makeResult()
        registerResult(result)
        result.failfast = self.failfast
        result.buffer = self.buffer
        result.tb_locals = self.tb_locals
        with warnings.catch_warnings():
            if self.warnings:
                # if self.warnings is set, use it to filter all the warnings
                warnings.simplefilter(self.warnings)
                # if the filter is 'default' or 'always', special-case the
                # warnings from the deprecated unittest methods to show them
                # no more than once per module, because they can be fairly
                # noisy.  The -Wd and -Wa flags can be used to bypass this
                # only when self.warnings is None.
                if self.warnings in ['default', 'always']:
                    warnings.filterwarnings('module',
                            category=DeprecationWarning,
                            message=r'Please use assert\w+ instead.')

            self.semaphore = PropSemaphore()
            
            while not self.semaphore.exit.is_set():
                
                self.semaphore.before_monkey.acquire()
            
                for hook in hooks:
                    hook(result, "before")
                
                self.semaphore.before_monkey_done.release()
                
                self.semaphore.after_monkey.acquire()
                
                for hook in hooks:
                    hook(result, "after")
                
                self.semaphore.after_monkey_done.release()
            

        result.printErrors()
        return result

def _addError(result, test, exc_info):
    if result is not None and exc_info is not None:
        if issubclass(exc_info[0], test.failureException):
            result.addFailure(test, exc_info)
        else:
            result.addError(test, exc_info)

class KeaHookOutcome(object):
    def __init__(self, result=None):
        self.expecting_failure = False
        self.result = result
        self.done = False
        self.success = True
        self.expectedFailure = None

    @contextlib.contextmanager
    def testPartExecutor(self, test_case):
        old_success = self.success
        self.success = True
        try:
            yield
        except KeyboardInterrupt:
            raise
        except:
            exc_info = sys.exc_info()
            if self.expecting_failure:
                self.expectedFailure = exc_info
            else:
                self.success = False
                _addError(self.result, test_case, exc_info)
            # explicitly break a reference cycle:
            # exc_info -> frame -> exc_info
            exc_info = None
        finally:
            self.success = self.success and old_success


class HookTestBase(TestCase):
    def __call__(self, result, hookName: Literal["before", "after"]):
        return self.run(result, hookName)

    def run(self, result, hookName: Literal["before", "after"]):
        try:
            testMethod = getattr(self, hookName)

            if hookName == "before":
                self._outcome = KeaHookOutcome(result)
            try:
                with self._outcome.testPartExecutor(self):
                    testMethod()

                if hookName == "after":
                    self._outcome.done = True
                    if self._outcome.success:
                        result.addSuccess(self)
                return result
            finally:
                if self._outcome.done:
                    self._outcome = None
        finally:
            result.stopTest(self)


class Hook(HookTestBase):

    def __init__(self):
        super().__init__()
        self.driver = HDriver.getScriptDriver()
        self.traceRecorder = TraceRecorder(self.driver)
    
    def before(self):
        """Hook before monkey Step"""
        pass

    def after(self):
        """Hook after monkey Step"""
        pass
