import json
import os
import time

from .leetcode_operation import LeetCodeOperation


class DistributedSubmission(LeetCodeOperation):
    def __init__(self, requests_manager, base_url="https://leetcode.cn/graphql/", **params):
        super().__init__(requests_manager, base_url)
        self.endpoint = 'noj-go'
        self.params = params.get('params')

    def generate_payload(self):
        query_string = "\n    query distributedSubmission($input: DistributedSubmissionInput!) {\n  distributedSubmission(in: $input) {\n    hasMore\n    nextId\n    submission {\n      id\n      code\n      runtime\n      memory\n      date\n      user {\n        realName\n        userSlug\n        userAvatar\n      }\n    }\n  }\n}\n    "
        variables = {
            "input": self.params
        }
        return {
            "query": query_string,
            "variables": variables,
            "operationName": "distributedSubmission"
        }

    def parse_graphql_data(self, data):
        submission_data = data.get('data', {}).get('distributedSubmission', {}).get('submission', {})
        if not submission_data:
            return {}
        parsed_submission = {
            'id': submission_data.get('id'),
            'code': submission_data.get('code'),
            'runtime': submission_data.get('runtime'),
            'memory': submission_data.get('memory'),
            'date': submission_data.get('date'),
            'user': {
                'realName': submission_data.get('user', {}).get('realName'),
                'userSlug': submission_data.get('user', {}).get('userSlug'),
                'userAvatar': submission_data.get('user', {}).get('userAvatar')
            }
        }
        return parsed_submission


    def fetch_all_data(self, request_interval=1, folder_path="DistributedSubmission"):
        memory_dist_dir = os.path.join(self.directory, 'MemoryDistribution')

        os.makedirs(os.path.join(self.directory, folder_path), exist_ok=True)
        distribution_data = []
        observerId = ''
        slug = ''
        distributed_submission_data = []
        failed = []

        for file_name in os.listdir(memory_dist_dir):
            save_path = os.path.join(folder_path, f"{file_name}")
            if os.path.exists(os.path.join(self.directory, save_path)):
                continue
            
            self.logger.info(f"file_name: {file_name}")

            with open(os.path.join(memory_dist_dir, file_name), 'r') as file:
                slug_data = json.load(file)
                slug = os.path.splitext(file_name)[0]
                
                distribution_data = slug_data.get('merged_distribution', [])
                
                # gen = (element for i, element in enumerate(distribution_data) if i % 4 < 2)
                # distribution_data = list(islice(gen, 6))
                
                observerId = slug_data.get('observerId')
                
            if not distributed_submission_data:
                os.unlink(os.path.join(memory_dist_dir, file_name))
                
            if not distribution_data:
                failed.append({slug :observerId})
                self.save_json(failed, os.path.join(folder_path, "failed.json"))
                self.logger.info(f"distribution_data not found: {distribution_data}")
                continue

            for distribution in distribution_data:
                self.params.update({
                    'titleSlug': slug,
                    "observerId": observerId,
                    "value": distribution.get('displayed_value'),
                    "minValue": distribution.get('min_value'),
                    "maxValue": distribution.get('max_value'),
                })
                self.logger.info(f"observerId: {observerId}")
                data = self.fetch_data(self.endpoint, method='POST')
                parsed_data = self.parse_graphql_data(data)
                if parsed_data:
                    distributed_submission_data.append(parsed_data)
                else:
                    failed.append({slug :observerId})
                    # 
                    self.logger.info(f"parsed_data not found: {distribution_data}")
                time.sleep(request_interval)
            if distributed_submission_data:
                self.save_json(distributed_submission_data, save_path)
            self.save_json(failed, os.path.join(folder_path, "failed.json"))
            

    def execute(self):
        data = self.fetch_data(self.endpoint, method='POST')
        parsed_data = self.parse_graphql_data(data)
        self.save_json(parsed_data, self.filename)
