import subprocess
from .source_file_validator import SourceFileValidator
from .source_data_loader import SourceDataLoader
from .execution_recorder import ExecutionRecorder
from .sinker_executor import SinkerExecutor
from utils import LOG
from script_generator.generator_chain import GeneratorChain

class CliRunner:
    def __init__(self, config: dict) -> None:
        self.config = config
        self.cli_mode_config = config.get('cli-mode')
        self.common_paths = config.get('common-paths')
        self.openai_config = config.get('openai')
        self.row_number = config.get('row_number', 10)
        self.use_zip = config.get('use_zip')
        self.file_validator = SourceFileValidator(self.cli_mode_config, self.common_paths, self.use_zip)
        self.data_loader = SourceDataLoader(self.cli_mode_config, self.common_paths)
        self.sinker_executor = SinkerExecutor(self.cli_mode_config, self.common_paths)
        self.generator_chain = GeneratorChain(model_name = self.openai_config.get("model"), 
                                              api_key = self.openai_config.get("api-key"), 
                                              base_url = self.openai_config.get("base-url"),
                                              common_paths = self.common_paths)
        self.execution_recorder = ExecutionRecorder(self.cli_mode_config, self.common_paths)

    def run(self):
        output_dir, zip_filename, valid = self.file_validator.validate_zipfiles()
        if not valid:
            LOG.error('Validation failed')
            return
        LOG.info(f'1. validation successful, use_zip: {self.use_zip}')
        unchanged, example_string, rules_df = self.data_loader.load_data(output_dir)
        LOG.debug(f'example_string: {example_string}')
        LOG.info('2. data loaded successfully')
        module_name = None
        data_type = None
        if unchanged:
            module_name, data_type = self.execution_recorder.load_script_record(zip_filename)
            if module_name:
                LOG.info('3. no changes in the data files, use the existing script')
            else:
                LOG.warning('Script not found in the execution recorder')
        if not unchanged or not module_name:
            module_name, data_type, success = self.generator_chain.run(example_string, rules_df)
            if not success:
                LOG.error('Script generation failed')
                return
            LOG.info('3. script generated successfully')
        subprocess.run(['python', 'fake_data_script_executer/script_executor.py', 
                        '--name', f'{self.common_paths.get("fake-data-generator-path")}.{module_name}', 
                        '--zip_filename', zip_filename,
                        '--row_number', f'{self.row_number}'])
        LOG.info('4. fake data file generated successfully')
        continue_execution = input('Do you want to continue the execution? (y/n): ')
        if continue_execution.lower() == 'y':
            self.sinker_executor.execute(data_type)
            LOG.info('5. data sent to sinker successfully')
        values = { 
            'module_name': module_name, 
            'data_type': data_type 
        }
        self.execution_recorder.save_script_record(zip_filename=zip_filename, values=values)
        LOG.info('6. save execution records successfully')

