# encoding: utf-8
"""
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    created by lei.chang on '24/05/2023'
    comment: 服务流程逻辑
"""
from datetime import datetime
from datetime import timedelta
import io
import os
import uuid
import time
import ujson

from fastapi import UploadFile
from werkzeug.urls import url_parse

from config import Config
from project.lib.common import logger
from project.core.log import Colors
from project.model.basic import Operator, FeiShuDocsType, FlushFrequency
from project.core.exception import BizError, NoRecordError
from project.core.base import BodyParams
from project.model.database.knowledge import Knowledge, KnowledgeData, KnowledgeDetail, KnowledgeDataImportLog
from project.micro_service.apollo import Apollo
from project.micro_service.gaea import Gaea
from project.middleware.feishu import FeiShu, TaskResult
from project.micro_service.adonis import Adonis


class FeiShuUrl:

    def __init__(self, url):
        """
        :param url:
        """
        self.url = url
        self.token = ''
        self.type = ''
        self.export_file_extension = ''  # 导出文件格式
        # 校验链接格式
        self._validate()

    def _validate(self):
        """ 校验飞书云文档链接
        :return:
        """
        self.parsed_url = url_parse(self.url)
        path = self.parsed_url.path
        path = path.split(os.sep)
        path = [v for v in path if v]

        if len(path) != 2:
            raise BizError('非法飞书云文档链接')

        # 多维表格
        if path[0] == str(FeiShuDocsType.base.value):
            self.type = 'bitable'
            self.export_file_extension = 'csv'
        # 电子表格
        elif path[0] == str(FeiShuDocsType.sheets.value):
            self.type = 'sheet'
            self.export_file_extension = 'csv'
        # 文档
        elif path[0] == str(FeiShuDocsType.docx.value):
            self.type = 'docx'
            self.export_file_extension = 'pdf'
        else:
            raise BizError('非法飞书云文档链接')

        self.token = path[-1]
        if len(self.token) != 27:
            raise BizError('非法飞书云文档链接')


class KnowledgeService:

    @staticmethod
    async def do_knowledge_add(session, operator: Operator, body_params: BodyParams):
        """ 添加知识库
        :param session:
        :param operator:
        :param body_params:
        :return:
        """
        knowledge = await Knowledge.create_modify(session, organization_code=operator.organization_code, **body_params.dict())
        if not knowledge:
            raise BizError('知识库新增失败')
        knowledge.code = str(int(time.time() * 1000000))

        await session.commit()

        return knowledge

    @staticmethod
    async def delete_knowledge(session, knowledge_id):
        """ 删除知识库
        :param session:
        :param knowledge_id:
        :return:
        """
        knowledge = await Knowledge.fetch_one(session, wheres=(Knowledge.id == knowledge_id))
        if not knowledge:
            raise BizError('知识库不存在')
        knowledge.is_deleted = True

        # 删除知识库数据集
        knowledge_datas = await KnowledgeData.fetch_all(session, wheres=(KnowledgeData.knowledge_id == knowledge_id, ~KnowledgeData.is_deleted))
        for knowledge_data in knowledge_datas:
            knowledge_data.is_deleted = True

        # 删除知识库数据集详情
        knowledge_data_ids = [v.id for v in knowledge_datas]
        knowledge_details = await KnowledgeDetail.fetch_all(session, wheres=(KnowledgeDetail.knowledge_data_id.in_(knowledge_data_ids), ~KnowledgeDetail.is_deleted))
        for knowledge_detail in knowledge_details:
            knowledge_detail.is_deleted = True

        # 删除向量化数据
        await Apollo.delete_knowledge_batch([v.code for v in knowledge_details])

    @staticmethod
    async def delete_knowledge_data(session, knowledge_data_id):
        """ 删除知识库数据集
        :param session
        :param knowledge_data_id:
        :return:
        """
        knowledge_data = await KnowledgeData.fetch_one(session, wheres=(KnowledgeData.id == knowledge_data_id))
        if not knowledge_data:
            raise BizError('知识库数据集不存在')
        knowledge_data.is_deleted = True

        knowledge_details = await KnowledgeDetail.fetch_all(session, wheres=(KnowledgeDetail.knowledge_data_id == knowledge_data.id, ~KnowledgeDetail.is_deleted))
        for knowledge_detail in knowledge_details:
            knowledge_detail.is_deleted = True

        await session.commit()

        # 删除向量化数据
        await Apollo.delete_knowledge_batch([v.code for v in knowledge_details])

    @staticmethod
    async def do_knowledge_data_file_import(session, knowledge_id, files: list[UploadFile]):
        """ 知识库数据集文件导入
        :param session:
        :param knowledge_id: 知识库id
        :param files:
        :return:
        """
        knowledge = await Knowledge.fetch_one(session, wheres=(Knowledge.id == knowledge_id, ~Knowledge.is_deleted))
        if not knowledge:
            raise BizError('知识库不存在')

        task_id = str(uuid.uuid4()).replace('-', '')
        file_names = []
        new_files = []
        for file in files:
            file_name = file.filename
            if file_name in file_names:
                continue
            new_files.append(file)
            title = file_name.split('.')[0]
            knowledge_data = await KnowledgeData.create_modify(session,
                                                               title=title,
                                                               file_name=file_name,
                                                               knowledge_id=knowledge_id,
                                                               task_id=task_id,
                                                               status='init',
                                                               import_type='local_file',
                                                               commit=False)

            if not knowledge_data:
                raise BizError('知识库数据集导致失败')

            # 插入日志
            await KnowledgeDataImportLog.create_modify(session, task_id=task_id, knowledge_data_id=knowledge_data.id, commit=False)

        await session.commit()

        # 发送知识库文件导入接口
        await Gaea.do_knowledge_data_file_import(new_files,
                                                 organization_code=knowledge.organization_code,
                                                 task_id=task_id, hook_url=f'{Config.LETO_SERVER}/m/knowledge/data/file/import/callback')


    @staticmethod
    async def knowledge_data_file_import_callback(session, body_params):
        """ 文件导入回调
        :param session:
        :param body_params:
        :return:
        """
        logger.info(f'回调地址接收: {body_params}', font_color=Colors.PURPLE.value)
        task_id = body_params['task_id']
        file_name = body_params['file_name']
        knowledge_data = await KnowledgeData.fetch_one(session, wheres=(KnowledgeData.task_id == task_id, KnowledgeData.file_name == file_name))
        if not knowledge_data:
            raise BizError(f'知识库数据集不存在 task_id: {task_id}, file_name: {file_name}')

        knowledge_data.status = status = body_params['status']
        knowledge_data.file_url = body_params.get('origin_file_url', knowledge_data.file_url)
        knowledge_data.md_url = body_params.get('md_file_url', knowledge_data.md_url)
        knowledge_data.import_mode = body_params.get('import_mode',  knowledge_data.import_mode)
        if status in ('success', 'fail'):
            knowledge_data.flush_end_time = datetime.now()
            knowledge_data.fail_message = body_params.get('message', '')

        # 添加回调日志
        await KnowledgeDataImportLog.create_modify(session,
                                                   task_id=task_id,
                                                   knowledge_data_id=knowledge_data.id,
                                                   status=status,
                                                   callback_text=body_params,
                                                   commit=False)

        # 新增数据集详情
        knowledge_detail_code = body_params.get('knowledge_detail_code')
        if knowledge_detail_code:
            await KnowledgeDetail.create_modify(session,
                                                code=knowledge_detail_code,
                                                question=body_params.get('question', ''),
                                                answer=body_params.get('answer', ''),
                                                text=body_params.get('text', ''),
                                                knowledge_id=knowledge_data.knowledge_id,
                                                knowledge_data_id=knowledge_data.id,
                                                commit=False)

        await session.commit()

    @staticmethod
    async def add_knowledge_detail(session, body_params: BodyParams):
        """ 新增知识库数据详情
        :param session:
        :param body_params:
        :return:
        """
        knowledge = await Knowledge.fetch_one(session, wheres=(Knowledge.id == body_params.knowledge_id))
        if not knowledge:
            raise BizError('知识库不存在')

        knowledge_detail = await KnowledgeDetail.create_modify(session, **body_params.dict(), commit=False)
        if not knowledge_detail:
            raise BizError('知识库数据详情添加失败')
        knowledge_detail.code = str(int(time.time() * 1000000))

        # 知识库向量化
        await Apollo.do_knowledge_flush(knowledge_detail.code, knowledge.organization_code, knowledge_detail.text)

        await session.commit()

        return knowledge_detail

    @staticmethod
    async def update_knowledge_detail(session, body_params: BodyParams):
        """ 修改知识库数据详情
        :param session:
        :param body_params:
        :return:
        """
        knowledge_detail = await KnowledgeDetail.fetch_one(session, wheres=(KnowledgeDetail.id == body_params.id))
        if not knowledge_detail:
            raise BizError('知识库数据详情不存在')

        knowledge = await Knowledge.fetch_one(session, wheres=(Knowledge.id == knowledge_detail.knowledge_id))
        if not knowledge:
            raise BizError('知识库不存在')

        knowledge_detail.text = body_params.text
        await session.commit()

        # 同步向量数据
        await Apollo.do_knowledge_flush(knowledge_detail.code, knowledge.organization_code, knowledge_detail.text)

    @staticmethod
    async def delete_knowledge_detail(session, knowledge_detail_id):
        """ 删除知识库数据详情
        :param session:
        :param knowledge_detail_id:
        :return:
        """
        try:
            knowledge_detail = await KnowledgeDetail.create_modify(session, wheres=(KnowledgeDetail.id == knowledge_detail_id), no_record_error=True, is_deleted=1)

            # 删除向量数据
            await Apollo.delete_knowledge(knowledge_detail.code)

        except NoRecordError:
            raise BizError('知识库详情不存在')

    @staticmethod
    async def do_knowledge_test(session,  knowledge_id, text):
        """ 知识库搜索测试
        :param session:
        :param knowledge_id:
        :param text:
        :return:
        """
        knowledge_details = await KnowledgeDetail.fetch_all(session, wheres=(KnowledgeDetail.knowledge_id == knowledge_id, ~KnowledgeDetail.is_deleted))
        if not knowledge_details:
            return []
        knowledge_detail_map = {v.code: v for v in knowledge_details}

        knowledge_match_list = await Apollo.get_knowledge_match_list([v.code for v in knowledge_details], text)

        ret_data = []
        for knowledge_match in knowledge_match_list:
            knowledge_detail = knowledge_detail_map.get(knowledge_match['knowledge_detail_code'])
            if not knowledge_detail:
                continue
            ret_data.append(knowledge_detail.dict('text'))

        return ret_data

    @staticmethod
    async def do_knowledge_data_feishu_import(session, knowledge_id, feishu_urls: list[str], flush_frequency: str, flush_frequency_value: int):
        """ 知识库数据集文件导入(飞书链接)
        :param session:
        :param knowledge_id: 知识库id
        :param feishu_urls:  飞书链接列表
        :param flush_frequency:
        :param flush_frequency_value:
        :return:
        """
        knowledge = await Knowledge.fetch_one(session, wheres=(Knowledge.id == knowledge_id, ~Knowledge.is_deleted))
        if not knowledge:
            raise BizError('知识库不存在')

        feishu_urls: list[str] = list(set(feishu_urls))  # 文件链接去重
        feishu_urls: list[FeiShuUrl] = [FeiShuUrl(v) for v in feishu_urls]

        # 导出飞书文档
        task_results: list[TaskResult] = list()
        for feishu_url in feishu_urls:
            # 导出飞书文档
            task_result = await KnowledgeService.download_feishu_doc_url(feishu_url)
            task_results.append(task_result)

        # 文件名称去重
        file_names = {}
        for task_result in task_results:
            if task_result.file_name in file_names:
                file_names[task_result.file_name] += 1
            else:
                file_names[task_result.file_name] = 1

            if file_names[task_result.file_name] > 1:
                task_result.file_name = f'{task_result.file_name}({file_names[task_result.file_name]})'

        # 创建知识库数据集
        knowledge_datas = []
        task_id = str(uuid.uuid4()).replace('-', '')
        new_files: list[UploadFile] = list()
        for feishu_url, task_result in zip(feishu_urls, task_results):
            file_name = f'{task_result.file_name}.{task_result.file_extension}'
            knowledge_data = await KnowledgeData.create_modify(
                session,
                title=task_result.file_name,
                file_name=file_name,
                knowledge_id=knowledge_id,
                import_mode='image_text',
                task_id=task_id,
                feishu_url=feishu_url.url,
                flush_frequency=flush_frequency,
                flush_frequency_value=flush_frequency_value,
                import_type='feishu_url',
                flush_start_time=datetime.now(),
                commit=False
            )
            if not knowledge_data:
                raise BizError('知识库数据集导致失败')
            knowledge_datas.append(knowledge_data)

            # 插入日志
            await KnowledgeDataImportLog.create_modify(session, task_id=knowledge_data.task_id, knowledge_data_id=knowledge_data.id, commit=False)

            #
            new_files.append(UploadFile(file_name, file=io.BytesIO(task_result.binary_stream)))

        await session.commit()

        # 发送知识库文件导入接口
        await Gaea.do_knowledge_data_file_import(new_files,
                                                 organization_code=knowledge.organization_code,
                                                 task_id=task_id, hook_url=f'{Config.LETO_SERVER}/m/knowledge/data/file/import/callback')

        return knowledge_datas

    @staticmethod
    async def download_feishu_doc_url(feishu_url: FeiShuUrl) -> TaskResult:
        """ 飞书链接下载
        :param feishu_url: 飞书链接对象
        :return:
        """
        # 取得飞书的tenant_access_token
        tenant_access_token = await FeiShu.get_tenant_access_token()
        print(f'tenant_access_token: {tenant_access_token}')

        # 创建导出任务
        task_result = TaskResult()
        # 电子表格
        if feishu_url.type in ('sheet'):

            sheets = await FeiShu.get_sheets(feishu_url.token, tenant_access_token)

            task_results = list()
            for sheet in sheets:
                ticket = await FeiShu.create_export_task(feishu_url.export_file_extension, feishu_url.token, feishu_url.type, tenant_access_token, sub_id=sheet['sheet_id'])
                print(f'ticket: {ticket}')

                # 查询导出结果
                task_result: TaskResult = await FeiShu.get_export_task_result(ticket, feishu_url.token, tenant_access_token)
                print(f'task_result: {task_result.dict()}')
                if '-' in task_result.file_name and sheet['title'] in task_result.file_name:
                    task_result.file_name = ''.join(task_result.file_name.split('-')[:-1])

                # 下载文件
                task_result.binary_stream = await FeiShu.download_export_file(task_result.file_token, tenant_access_token)
                print(f'binary_stream: {task_result.binary_stream}')

                task_results.append(task_result)

            # 合并文件二进制流
            binary_stream_merged = io.BytesIO()
            task_result.sets(task_results[0])
            for v in task_results:
                binary_stream = v.binary_stream
                if v.binary_stream.startswith(b'\xef\xbb\xbf'):
                    binary_stream = v.binary_stream[3:]
                binary_stream_merged.write(binary_stream)
            binary_stream_merged.seek(0)
            task_result.binary_stream = binary_stream_merged.read()

            # # 保存本地文件
            # with open(f'/Users/chang/Downloads/{task_result.file_name}', 'wb') as f:
            #     f.write(task_result.binary_stream)
        # 多维表格
        elif feishu_url.type == 'bitable':
            tables = await FeiShu.get_tables(feishu_url.token, tenant_access_token)

            task_results = list()
            for table in tables:
                ticket = await FeiShu.create_export_task(feishu_url.export_file_extension, feishu_url.token, feishu_url.type, tenant_access_token, sub_id=table['table_id'])
                print(f'ticket: {ticket}')

                # 查询导出结果
                task_result: TaskResult = await FeiShu.get_export_task_result(ticket, feishu_url.token, tenant_access_token)
                print(f'task_result: {task_result.dict()}')

                # 下载文件
                task_result.binary_stream = await FeiShu.download_export_file(task_result.file_token, tenant_access_token)
                print(f'binary_stream: {task_result.binary_stream}')

                task_results.append(task_result)

            # 合并文件二进制流
            binary_stream_merged = io.BytesIO()
            task_result.sets(task_results[0])
            for v in task_results:
                binary_stream = v.binary_stream
                if v.binary_stream.startswith(b'\xef\xbb\xbf'):
                    binary_stream = v.binary_stream[3:]
                binary_stream_merged.write(binary_stream)
            binary_stream_merged.seek(0)
            task_result.binary_stream = binary_stream_merged.read()

            # # 保存本地文件
            # with open('/Users/chang/Downloads/output.csv', 'wb') as f:
            #     f.write(task_result.binary_stream)
        # 文档
        elif feishu_url.type == 'docx':
            ticket = await FeiShu.create_export_task(feishu_url.export_file_extension, feishu_url.token, feishu_url.type, tenant_access_token)
            print(f'ticket: {ticket}')

            # 查询导出结果
            task_result: TaskResult = await FeiShu.get_export_task_result(ticket, feishu_url.token, tenant_access_token)
            print(f'task_result: {task_result.dict()}')

            # 下载文件
            task_result.binary_stream = await FeiShu.download_export_file(task_result.file_token, tenant_access_token)
            print(f'binary_stream: {task_result.binary_stream}')

            # # 保存本地文件
            # with open('/Users/chang/Downloads/output.csv', 'wb') as f:
            #     f.write(task_result.binary_stream)
        else:
            raise BizError('不支持此飞书云文档类型')

        return task_result

    @staticmethod
    async def do_knowledge_data_feishu_flush(session, knowledge_data_id):
        """ 知识库数据集飞书链接更新
        :param session:
        :param knowledge_data_id:
        :return:
        """
        knowledge_data = await KnowledgeData.fetch_one(session, wheres=(KnowledgeData.id == knowledge_data_id, ~KnowledgeData.is_deleted))
        if knowledge_data.status not in ('success', 'fail'):
            raise BizError('飞书链接正在更新中')
        if not knowledge_data:
            raise BizError('知识库数据集不存在')
        if not knowledge_data.feishu_url:
            raise BizError('无飞书链接')

        knowledge = await Knowledge.fetch_one(session, wheres=(Knowledge.id == knowledge_data.knowledge_id, ~Knowledge.is_deleted))
        if not knowledge:
            raise BizError('知识库存不在')

        feishu_url = FeiShuUrl(knowledge_data.feishu_url)
        task_result: TaskResult = await KnowledgeService.download_feishu_doc_url(feishu_url)

        knowledge_data.file_url = ''
        knowledge_data.md_url = ''
        knowledge_data.status = 'init'
        knowledge_data.flush_start_time = datetime.now()
        knowledge_data.flush_end_time = None
        knowledge_data.fail_message = ''

        knowledge_details = await KnowledgeDetail.fetch_all(session, wheres=(KnowledgeDetail.knowledge_data_id == knowledge_data.id, ~KnowledgeDetail.is_deleted))
        knowledge_detail_codes = []
        for knowledge_detail in knowledge_details:
            knowledge_detail.is_deleted = True
            knowledge_detail_codes.append(knowledge_detail.code)

        # 删除向量数据
        await Apollo.delete_knowledge_batch(knowledge_detail_codes)

        # 插入日志
        await KnowledgeDataImportLog.create_modify(session, task_id=knowledge_data.task_id, knowledge_data_id=knowledge_data.id, commit=False)

        await session.commit()

        #
        new_files: list[UploadFile] = list()
        new_files.append(UploadFile(knowledge_data.file_name, file=io.BytesIO(task_result.binary_stream)))

        # 发送知识库文件导入接口
        await Gaea.do_knowledge_data_file_import(new_files,
                                                 organization_code=knowledge.organization_code,
                                                 task_id=knowledge_data.task_id, hook_url=f'{Config.LETO_SERVER}/m/knowledge/data/file/import/callback')

    @staticmethod
    async def set_feishu_flush_crontab(session, knowledge_data_id):
        """ 设置飞书刷新定时
        :return:
        """
        knowledge_data = await KnowledgeData.fetch_one(session, wheres=(KnowledgeData.id == knowledge_data_id))
        if not knowledge_data:
            return
        if not knowledge_data.flush_frequency or not knowledge_data.flush_frequency_value:
            return

        api_url = f'{Config.LETO_SERVER}/m/knowledge/data/feishu/flush'
        api_param = {'id': knowledge_data_id, 'mode': 'auto'}
        header = '"Content-Type: application/json"'
        command = f"curl -X POST -H {header} -d '{ujson.dumps(api_param)}' {api_url}"
        comment = f'{Config.PROJECT_NAME}: knowledge_data_id: {knowledge_data_id}'

        now = datetime.now()
        # 分钟
        if knowledge_data.flush_frequency == str(FlushFrequency.minute.value):
            run_time = now + timedelta(minutes=knowledge_data.flush_frequency_value)
        # 小时
        elif knowledge_data.flush_frequency == str(FlushFrequency.hour.value):
            run_time = now + timedelta(hours=knowledge_data.flush_frequency_value)
        elif knowledge_data.flush_frequency == str(FlushFrequency.day.value):
            run_time = now + timedelta(days=knowledge_data.flush_frequency_value)
        else:
            return

        # 设置定时任务
        await Adonis.set_cron(command, comment,
                              minute=run_time.minute,
                              hour=run_time.hour,
                              day=run_time.day,
                              month=run_time.month)

    @staticmethod
    async def set_feishu_flush_crontab_batch(session, knowledge_data_ids):
        """ 设置飞书刷新定时(批量)
        :param session:
        :param knowledge_data_ids:
        :return:
        """
        for knowledge_data_id in knowledge_data_ids:
            await KnowledgeService.set_feishu_flush_crontab(session, knowledge_data_id)


if __name__ == "__main__":
    """
    """
    # feishu_url = FeiShuUrl('https://cx390yjipvz.feishu.cn/sheets/Izqws5xb0hzKcpt3LfvckTNBnNg?sheet=7d7701')
    # task_result = asyncio.run(KnowledgeService.download_feishu_doc_url(feishu_url))
    # print(task_result.dict())
    # asyncio.run(KnowledgeService.set_feishu_flush_crontab(248))
    pass


