#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import json
import os
import stat
import time
import datetime
import sys
import zipfile

from ndmp.service.index.parse_index_param import IndexParam
from common.common_models import SubJobDetails, LogDetail
from common.const import SubJobStatusEnum, RepositoryDataTypeEnum, SubJobPriorityEnum, ParamConstant, ExecuteResultEnum
from ndmp.comm.utils import report_job_details
from ndmp.comm.log import log
from common.exception.common_exception import ErrCodeException
from ndmp.service.ndmp_service_base import NDMPServiceBase
from ndmp.comm.const import SubJobType, NDMPCode, BackupType, Constant


class NDMPIndexService(NDMPServiceBase):
    def __init__(self, req_id: str, job_id: str, sub_id: str):
        super().__init__(req_id, job_id, sub_id)
        self.param = IndexParam(self.req_id)
        self.data_path = self.param.get_curr_repository(RepositoryDataTypeEnum.DATA_REPOSITORY.value)
        self.meta_path = self.param.get_curr_repository(RepositoryDataTypeEnum.META_REPOSITORY.value)
        self.index_path = self.param.get_index_repository_path(RepositoryDataTypeEnum.INDEX_REPOSITORY.value)
        self.copy_id = self.param.get_copy_id()
        self.last_copy_id = self.param.get_last_copy_id()
        self.start_time = int(time.time())


    def generate_index(self):
        self.start_time = int(time.time())
        # 拿到rfi文件：
        log.info(f"Enter generate_index, jobId:{self.job_id}, subjobId:{self.sub_id}, copyId:{self.copy_id}")
        file_name = "index_" + self.sub_id + ".txt"
        rfi_path = os.path.join(self.index_path, file_name)
        self.generate_rfi_file()
        zip_file_name = "index_" + self.sub_id + ".zip"
        zip_index_path = os.path.join(self.index_path, zip_file_name)
        zip_file = zipfile.ZipFile(zip_index_path, 'w', zipfile.ZIP_DEFLATED)
        try:
            zip_file.write(rfi_path, arcname=file_name)
        except Exception as e:
            log.error(e, exc_info=True)
            self.report_build_index_faild()
        zip_file.close()
        self.report_build_index_complete(zip_file_name)
    
    def report_build_index_complete(self, index_file):
        index_files = ["/" + index_file]
 
        extend_info = {"copyId": self.copy_id, "rfiFiles": index_files}
        log.info(f"Report build index job complete, job:{self.job_id}, subJob:{self.sub_id}, extendInfo:{extend_info}")
        report_job_details(self.req_id, self.job_id,
                           SubJobDetails(taskId=self.job_id, subTaskId=self.sub_id, progress=100,
                                         logDetail=None, speed=None, extendInfo=extend_info,
                                         taskStatus=SubJobStatusEnum.COMPLETED.value).dict(by_alias=True))

    def report_build_index_faild(self):
        log.info(f"Report build index job failed, job:{self.job_id}, subJob:{self.sub_id}")
        report_job_details(self.req_id, self.job_id,
                           SubJobDetails(taskId=self.job_id, subTaskId=self.sub_id, progress=100,
                                         logDetail=None, speed=None, extendInfo=None,
                                         taskStatus=SubJobStatusEnum.FAILED.value).dict(by_alias=True))  

    def report_build_index_running(self):
        log.info(f"Report build index job process, job:{self.job_id}, subJob:{self.sub_id}")
        report_job_details(self.req_id, self.job_id,
                           SubJobDetails(taskId=self.job_id, subTaskId=self.sub_id, progress=100,
                                         logDetail=None, speed=None, extendInfo=None,
                                         taskStatus=SubJobStatusEnum.RUNNING.value).dict(by_alias=True))

    def generate_rfi_file(self):
        log.info(f"Entering rfi file, last_copy_id:{self.last_copy_id}, cur_copy_id:{self.copy_id}")
        rfi_file = "index_" + self.sub_id + ".txt"
        rfi_path = os.path.join(self.index_path, rfi_file)
        self.init_rfi_file_header(rfi_path)
        if self.last_copy_id != self.copy_id:
            try:
                self.generate_inc_index_file(rfi_path)
            except Exception as e:
                log.error(e, exc_info=True)
                self.report_build_index_faild()
        else:
            self.copy_index_info(self.copy_id, "new")

    '''
    比较逻辑：
        1.分别打开上一次副本的index文件和本次副本的index文件，逐行比较（因为存入的时候按照inode值，从小打大排序）
        2.上一次的文件信息与本次文件信息内容相同，则不插入增量的索引文件中
        3.上一次的indeo值比本次副本读取的值小，证明此副本中无法恢复上一次副本中的数据，插入增量索引文件中，状态为“old”
        4.上一次的inode值比本次副本读取的值大，将本次index文件中的数据，插入增量索引文件中，状态为“new”
    '''
    def generate_inc_index_file(self, write_rfi_path):
        last_copy_file = "index_" + self.last_copy_id + ".txt"
        last_copy_path = os.path.join(self.meta_path, last_copy_file)

        cur_copy_file = "index_" + self.copy_id + ".txt"
        cur_copy_path = os.path.join(self.meta_path, cur_copy_file)
        num = 0
        last_time = self.start_time
        in_str = ""
        with open(last_copy_path, 'r', encoding=Constant.UTF_8) as last_index, open(cur_copy_path,
            'r', encoding=Constant.UTF_8) as cur_index:
            last_line = last_index.readline()
            cur_line = cur_index.readline()
            last_line = last_index.readline()
            cur_line = cur_index.readline()  
            while True:
                is_none = (last_line == "" or last_line is None or cur_line == "" or
                    cur_line is None)
                if is_none is True:
                    break
                ret, line_str = self.compare_last_and_cur_index(last_line, cur_line)
                if ret > 0:
                    last_line = last_index.readline()
                elif ret < 0:
                    cur_line = cur_index.readline()
                else:
                    last_line = last_index.readline()
                    cur_line = cur_index.readline()
                in_str += line_str
                num = num + 1
                if num > 100000:
                    self.write_rfi_file_info(in_str, write_rfi_path)
                    in_str = ""
                    num = 0
                current_time = int(time.time())
                if current_time - last_time > 120:
                    last_time = current_time
                    self.report_build_index_running()
            while last_line:
                num, in_str = self.write_rest_index_file(last_line, in_str, num, write_rfi_path, "old")
                last_line = last_index.readline()
            while cur_line:
                num, in_str = self.write_rest_index_file(cur_line, in_str, num, write_rfi_path, "new")
                cur_line = cur_index.readline()    
        self.write_rfi_file_info(in_str, write_rfi_path)

    def write_rest_index_file(self, line, in_str, num, write_rfi_path, status):
        last = json.loads(line.strip())
        in_str += self.build_rfi_line(last, status)
        num = num + 1
        if num > 100000:
            self.write_rfi_file_info(in_str, write_rfi_path)
            in_str = ""
            num = 0
        return num, in_str

    def compare_last_and_cur_index(self, last_one, cur_one):
        last = json.loads(last_one.strip())
        cur = json.loads(cur_one.strip())
        input_str = ""
        if int(last.get("inode")) < int(cur.get("inode")):
            input_str = self.build_rfi_line(last, "old")
            return 1, input_str
        elif int(last.get("inode")) > int(cur.get("inode")):
            input_str = self.build_rfi_line(cur, "new")
            return -1, input_str
        elif last.get("inode") == cur.get("inode"):
            if last.get("hashCode") != cur.get("hashCode"):
                input_str = self.build_rfi_line(last, "old")
                input_str += self.build_rfi_line(cur, "new")
        return 0, input_str

    def build_rfi_line(self, node, status):
        one_line = {
                            "path": node.get("path"),
                            "mtime": node.get("mtime"),
                            "size": node.get("size"),
                            "inode": node.get("inode"),
                            "id": node.get("id"),
                            "type": node.get("type"),
                            "status": status
        }
        return json.dumps(one_line, ensure_ascii=False) + "\n"
    
    def write_copy_index(self, write_rfi_path, copy, status):
        flags = os.O_RDONLY
        num = 0
        in_str = ""
        modes = stat.S_IRUSR
        meta_file = "index_" + copy + ".txt"
        meta_path = os.path.join(self.meta_path, meta_file)
        last_time = self.start_time
        log.debug(f"Entering copy index file:{meta_path}")
        with os.fdopen(os.open(meta_path, flags, modes), 'r', encoding=Constant.UTF_8) as rfi_file:
            line = rfi_file.readline()
            start_time = int(time.time())
            while True:
                line = rfi_file.readline()
                if line is None or line == "":
                    break
                one_file = json.loads(line.strip())
                in_str += self.build_rfi_line(one_file, status)
                num = num + 1
                if num > 100000:
                    self.write_rfi_file_info(in_str, write_rfi_path)
                    in_str = ""
                    num = 0
                current_time = int(time.time())
                if current_time - last_time > 120:
                    last_time = current_time
                    self.report_build_index_running()
        self.write_rfi_file_info(in_str, write_rfi_path)

    def copy_index_info(self, copy, status):
        write_rfi_file = "index_" + self.sub_id + ".txt"
        write_rfi_path = os.path.join(self.index_path, write_rfi_file)
        try:
            self.write_copy_index(write_rfi_path, copy, status)    
        except Exception as e:
            log.error(e, exc_info=True)
            self.report_build_index_faild()

    def init_rfi_file_header(self, file):
        first_line = "{\"title\": \"Raw File-system Index Database\",\"version\": \"2.0\",\"time\":"
        first_line = first_line + "\"" + self.get_timestamp() + "\"" + "}\n"
        log.debug(f"Entering init rfi file:{file}")
        flags = os.O_WRONLY | os.O_CREAT
        modes = stat.S_IWUSR | stat.S_IRUSR | stat.S_IXUSR
        with os.fdopen(os.open(file, flags, modes), 'w', encoding=Constant.UTF_8) as rfi_file:
            rfi_file.write(first_line)
    
    def get_timestamp(self):
        stamp = str(int(time.mktime(datetime.datetime.now(datetime.timezone.utc).timetuple())))
        return stamp + str("%06d" % datetime.datetime.now(datetime.timezone.utc).microsecond)

    def write_rfi_file_info(self, str_line, rfi_path):
        log.debug(f"Entering write rfi file:{rfi_path}")
        flags = os.O_WRONLY | os.O_CREAT
        modes = stat.S_IWUSR | stat.S_IRUSR | stat.S_IXUSR
        with os.fdopen(os.open(rfi_path, flags, modes), 'a', encoding=Constant.UTF_8) as rfi_file:
            rfi_file.write(str_line)