import json
import os
import time

from .leetcode_operation import LeetCodeOperation


class UserProfileQuestions(LeetCodeOperation):
    def __init__(self, requests_manager, base_url="https://leetcode.cn", **params):
        super().__init__(requests_manager, base_url)
        self.params = params.get('params')
        self.filename=f"{self.params.get('status', 'status')}.json"
        self.endpoint = 'graphql'

    def generate_payload(self):
        query_string = "query userProfileQuestions($status: StatusFilterEnum!, $skip: Int!, $first: Int!, $sortField: SortFieldEnum!, $sortOrder: SortingOrderEnum!, $keyword: String, $difficulty: [DifficultyEnum!]) {\n  userProfileQuestions(status: $status, skip: $skip, first: $first, sortField: $sortField, sortOrder: $sortOrder, keyword: $keyword, difficulty: $difficulty) {\n    totalNum\n    questions {\n      translatedTitle\n      frontendId\n      titleSlug\n      title\n      difficulty\n      lastSubmittedAt\n      numSubmitted\n      lastSubmissionSrc {\n        sourceType\n        ... on SubmissionSrcLeetbookNode {\n          slug\n          title\n          pageId\n          __typename\n        }\n        __typename\n      }\n      __typename\n    }\n    __typename\n  }\n}\n"

        return {
            "query": query_string,
            "variables": self.params,
            "operationName": "userProfileQuestions"
            }
    def parse_graphql_data(self, data):
        questions_data = data.get('data', {}).get('userProfileQuestions', {}).get('questions', [])
            
        parsed_questions = [data.get('titleSlug') for data in  questions_data]
        return parsed_questions


    def fetch_all_data(self, request_interval=1):
        all_ = []
        delete_slug = [
            # "questionTopicsList",
            # "DiscussTopic",
            # "DistributedSubmission",
            # "MemoryDistribution",
            # "slug_code_map",
            "Submission",
            "SubmissionList"
            ]
        
        status = ["UNTOUCHED", "FAILED",]
        for s in status:
            all_data = []
            skip = 0
            has_more = True
            limit=20
            self.filename = f"userProfileQuestions_{s}.json"
            self.params['status'] = s
            # if os.path.exists(os.path.join(self.directory, self.filename)):
            #     self.logger.debug(f'exists: {self.filename}')
            #     data = self.load_json(self.filename)
            #     continue

            while has_more:
                self.params.update({'skip': skip, 'limit': limit})
                data = self.fetch_data(self.endpoint, method='POST')
            
                questions = self.parse_graphql_data(data)
                all_data.extend(questions)
                totalNum = data.get('data', {}).get('userProfileQuestions', {}).get('totalNum', 0)
                skip += limit
                self.logger.info(f"skip: {skip}")
                # time.sleep(request_interval)
                if skip >= totalNum:
                    has_more = False
            all_.extend(all_data)
            self.save_json(all_data, self.filename)
        
        for folder in delete_slug:
            for slug in all_:
                file_path = os.path.join(self.directory, folder, f"{slug}.json")
                if os.path.exists(file_path):
                    os.unlink(file_path)
        return all_

    def execute(self):
        data = self.fetch_data(self.endpoint, method='POST')
        data = self.parse_graphql_data(data)
        self.save_json(data, self.filename)