import os
import time

from .leetcode_operation import LeetCodeOperation


class Submission(LeetCodeOperation):
    def __init__(self, requests_manager, base_url="https://leetcode.cn", **params):
        super().__init__(requests_manager, base_url)
        self.params = params.get('params')
        self.endpoint = 'problems/{}/submit'

    def generate_payload(self):
        return self.params

    def parse_graphql_data(self, data):
        # {"submission_id": 482821073}
        return data


    def fetch_all_data(self, folder_path="Submission", request_interval=60):
        os.makedirs(os.path.join(self.directory, folder_path), exist_ok=True)
        
        discuss_topic_dir = os.path.join(self.directory, 'DiscussTopic')
        submission_list_dir = os.path.join(self.directory, folder_path)
        # slug_id_map = self.get_slug_id_map()
        discuss_files = os.listdir(discuss_topic_dir)
        submission_files = os.listdir(submission_list_dir)
        todo_list = set(discuss_files) - set(submission_files)
        failed = []

        for filename in todo_list:
            slug = filename.split('.')[0]  # Extract slug from filename
            self.logger.info(f"filename: {filename}")
            file_path = os.path.join("QuestionEditorData", filename)
            if not os.path.exists(os.path.join(self.directory, file_path)):
                self.logger.error(f"not found {filename}")
                continue

            editor_data = self.load_json(file_path)
            question_id = editor_data.get("questionId")
            code_snippets = editor_data.get("codeSnippets")
            if not question_id or not code_snippets:
                failed.append(f"{slug}: question_id")
                self.logger.debug(f"Question ID not found for slug: {slug}")
                continue

            code_list = self.get_slug_code_map(slug, code_snippets=code_snippets)
            if not code_list:
                failed.append(f"{slug}: typed_code")
                self.logger.debug(f"No code found for slug: {slug}")
                continue

            for lang, typed_code in code_list:
                # Update params and construct the endpoint URL
                self.params.update({
                    "question_id": question_id,
                    "typed_code": typed_code,
                    "lang": lang
                })
                endpoint = self.endpoint.format(slug)

                # Fetch and process the data
                data = self.fetch_data(endpoint, 'POST')
                if not data:
                    time.sleep(request_interval)
                    data = self.fetch_data(endpoint, 'POST')
                if isinstance(data, dict) and data.get("submission_id"):
                    self.save_json(data, os.path.join(folder_path, filename))
                    continue
                    time.sleep(request_interval)
                    # break
                else:
                    failed.append(f"{slug}: submit")
                    self.logger.error(f"{slug}: submit error {data}")
                # Pause for the specified interval
                time.sleep(request_interval)
        self.save_json(failed, os.path.join(folder_path, 'failed.json'))


    def execute(self):
        id_slug_map = self.get_id_slug_map()
        question_id = self.params.get('question_id')
        slug = id_slug_map.get(question_id)
        endpoint = self.endpoint.format(slug)
        data = self.fetch_data(endpoint, method='POST')
        parsed_data = self.parse_graphql_data(data)
        return parsed_data

