import json
import time
import logging
from typing import List
from tortoise.transactions import in_transaction
from common.core.cron import CronBase
from common.enums.agent import AgentEnum
from common.enums.models import ModelsEnum
from common.enums.wallet import WalletEnum
from common.models.users import UserModel, UserWalletModel
from common.models.dev import DevModelsModel
from common.models.agents import AgentKnowModel
from common.models.agents import AgentKnowArchiveModel
from common.postgres.public import PgKnowledgeModel, PgAttachmentModel
from common.cache.queue_cache import QueueCache
from common.chain.chain_server import ChatUtils, FlowsSchema
from common.chain.vector_server import VectorService

logger = logging.getLogger(__name__)


class Command(CronBase):
    @classmethod
    async def run(cls, **kwargs):
        aid = 0
        start_time = time.time()
        try:
            # 读取任务
            archive_id = await QueueCache.queue_pop(QueueCache.EM_JOB)
            if not archive_id:
                return None

            # 查询文档
            archive = await AgentKnowArchiveModel.filter(id=int(archive_id)).first()
            if not archive:
                return None
            if archive.is_delete:
                return await cls.handle_abnormal(aid=int(archive_id), error=str("文档已被删除"))
            if archive.status != AgentEnum.EMB_LINE:
                return None

            # 查知识库
            aid = int(archive.id)
            know = await AgentKnowModel.filter(id=archive.know_id, is_delete=0).first()
            if not know:
                return await cls.handle_abnormal(aid=aid, error="知识库已被删除")
            if know.is_disable:
                return await cls.handle_abnormal(aid=aid, error="知识库已被禁用")

            # 查询模型
            try:
                models = await DevModelsModel.check_models(
                    models=know.vector_model,
                    scene=ModelsEnum.TYPE_VECTOR
                )
            except Exception as e:
                return await cls.handle_abnormal(aid=aid, error=str(e))

            # 查询用户
            user = await UserModel.filter(id=know.user_id, is_delete=0).first()
            if not user:
                return await cls.handle_abnormal(aid=aid, error="账号已注销")
            if user.is_disable:
                return await cls.handle_abnormal(aid=aid, error="账号已冻结")
            if user.points <= 0 < models[str("price")]:
                return await cls.handle_abnormal(aid=aid, error="账号余额不足")

            # 标记处理
            archive.status = AgentEnum.EMB_ING
            archive.update_time = int(time.time())
            await archive.save()

            # 获取分片
            pgsql = PgKnowledgeModel.filter(archive_id=aid, is_delete=0).filter(status=0)

            # 循环处理
            count = int((await pgsql.count()) / 10) + 5
            for _ in range(count):
                lists = await pgsql.limit(10).order_by("chunk_index").all().values(*["uuid", "question"])
                if not lists:
                    break
                await cls.handle_embedding(aid, models, lists)

            # 获取消耗
            archive = await AgentKnowArchiveModel.filter(id=aid).first()
            total_use_tokens: int = archive.use_tokens
            total_use_points: int = ChatUtils.compute_price(total_use_tokens, models["price"])
            additional = [FlowsSchema(
                name="索引生成",
                scene="vector",
                alias=models["alias"],
                model=models["model"],
                task_time=f"{(time.time() - start_time):.2f}",
                use_points=total_use_points,
                total_tokens=total_use_tokens,
                prompt_tokens=total_use_tokens,
                completion_tokens=0
            ).model_dump()]

            # 就绪完成
            async with in_transaction("mysql"):
                # 标记成功
                archive.status = AgentEnum.EMB_YES
                archive.update_time = int(time.time())
                archive.fail_reason = ""
                await archive.save()

                # 扣费逻辑
                if models["price"] > 0:
                    # 用户扣费
                    if total_use_points > 0:
                        user = await UserModel.filter(id=user.id).first()
                        surplus_balance: int = max(user.points - total_use_points, 0)
                        await UserModel.filter(id=user.id).update(points=surplus_balance)
                    # 写入日志
                    if models["price"]:
                        await UserWalletModel.dec(
                            scene=WalletEnum.POINTS,
                            user_id=user.id,
                            change_type=WalletEnum.POINTS_DEC_KB_EMB,
                            change_amount=total_use_points,
                            remarks=WalletEnum.get_change_type_msg(WalletEnum.POINTS_DEC_KB_EMB),
                            project=archive.name,
                            source_sn=archive.code,
                            source_id=archive.id,
                            additional=json.dumps(additional, ensure_ascii=False)
                        )
        except Exception as e:
            logger.error(f"Error queue_vectorize: " + str(e))
            try:
                await cls.handle_abnormal(aid=aid, error=str(e))
            except Exception as e:
                logger.error(f"Error queue_vectorize abnormal: " + str(e))

    @classmethod
    async def handle_embedding(cls, aid: int, models: dict, lists):
        # 输入的内容
        inputs: List[str] = []
        for item in lists:
            inputs.append(item["question"])

        # 内容向量化
        vectorService = VectorService()
        embedding_arr = await vectorService.to_embed(models, inputs)

        # 累计的消耗
        total_use_tokens: int = 0

        # 更新向量库
        uuids: List[str] = []
        for i in range(len(embedding_arr)):
            item = lists[i]
            if models["price"] > 0:
                use_tokens = ChatUtils.compute_tokens(item["question"])
                total_use_tokens += use_tokens

            uuids.append(str(item["uuid"]))
            emb_str = "[" + ",".join(str(item) for item in embedding_arr[i]) + "]"
            await PgKnowledgeModel.filter(uuid=item["uuid"]).update(
                embedding=emb_str,
                dimension=len(embedding_arr[i]),
                vector_model=models["model"],
                vector_alias=models["alias"],
                status=1,
                update_time=int(time.time())
            )

        # 更新附件库
        attr_use_tokens = await cls.handle_attachment(uuids, models)
        total_use_tokens += attr_use_tokens

        # 更新消耗
        if total_use_tokens > 0:
            archive = await AgentKnowArchiveModel.filter(id=aid).first()
            archive.use_tokens += total_use_tokens
            archive.update_time = int(time.time())
            await archive.save()

    @classmethod
    async def handle_attachment(cls, uuids: List[str], models: dict):
        attachments = await (PgAttachmentModel
                             .filter(dataset_id__in=uuids)
                             .filter(is_delete=0)
                             .filter(status=0)
                             .all().values("uuid", "question"))

        if not attachments:
            return 0

        # 输入的内容
        inputs: List[str] = []
        for item in attachments:
            inputs.append(item[str("question")])

        # 内容向量化
        vectorService = VectorService()
        embedding_arr = await vectorService.to_embed(models, inputs)

        # 累计的消耗
        attr_use_tokens: int = 0

        # 更新向量库
        for i in range(len(embedding_arr)):
            item = attachments[i]
            if models["price"] > 0:
                use_tokens = ChatUtils.compute_tokens(str(item["question"]))
                attr_use_tokens += use_tokens

            emb_str = "[" + ",".join(str(item) for item in embedding_arr[i]) + "]"
            await PgAttachmentModel.filter(uuid=item["uuid"]).update(
                embedding=emb_str,
                dimension=len(embedding_arr[i]),
                vector_alias=models["alias"],
                vector_model=models["model"],
                status=1,
                update_time=int(time.time())
            )

        # 返回消耗Tokens
        return attr_use_tokens

    @classmethod
    async def handle_abnormal(cls, aid: int, error: str):
        if error in ["知识库已被删除", "账号已注销", "文档已被删除"]:
            await AgentKnowArchiveModel.filter(id=aid, is_delete=0).update(
                status=AgentEnum.EMB_FAIL,
                fail_reason=error,
                is_delete=1,
                delete_time=int(time.time())
            )
        else:
            await AgentKnowArchiveModel.filter(id=aid, is_delete=0).update(
                status=AgentEnum.EMB_FAIL,
                fail_reason=error,
                update_time=int(time.time())
            )
