# -*- coding: utf-8 -*-
# @Author	: changfeng
# @Date		: 2025/9/13 17:06
# @Last Modified by:   changfeng
# @Last Modified time: 2025/9/13 17:06
# Thanks for your comments!
import time
import logging
from typing import Optional, List

from bson import ObjectId
from pymongo import UpdateOne

from src.basedata.domain.po import MedicineLibraryPO, MedicineLibraryListPO
from src.basedata.domain.repositories import MedicineLibraryListAbstractRepository
from src.basedata.infrastructure.client.mongodb.repository import MongoDBRepository
from src.basedata.infrastructure.client.redis.repository import RedisRepository
from src.basedata.infrastructure.utils import const

logger = logging.getLogger(__name__)


class MotorMedicineLibraryListRepository(MedicineLibraryListAbstractRepository, MongoDBRepository, RedisRepository):
    def __init__(self, session, redis_pool):
        MongoDBRepository.__init__(self, session, const.MedicineList)
        RedisRepository.__init__(self, redis_pool)

    async def batch_save(self, data: List[MedicineLibraryListPO]):
        bulk_ops = []
        for po in data:
            # 条件：匹配 code
            filter_query = {"library_id": po.library_id, "code": po.code, "obsoleted": False}
            # 操作：存在则更新（$set 覆盖字段），不存在则新增
            update_value = po.dict(exclude_none=True)
            created_at = update_value.pop("created_at", None) or (time.time() * 1000)
            created_by = update_value.pop("created_by", None)

            update_query = {
                "$set": update_value,
                "$setOnInsert": {"created_at": created_at, "created_by": created_by}
            }
            # 添加 upsert 操作
            bulk_ops.append(UpdateOne(filter_query, update_query, upsert=True))

        if bulk_ops:
            await self.bulk_write(bulk_ops, ordered=False)

    async def save_data_to_redis(self, library_id, data: List[MedicineLibraryListPO]):
        # 保存数据到换成中
        codes = [item.code for item in data]
        all_med = await self.get_by_codes(library_id, codes)
        med_code_id_map = {m.get("code"): m.get("_id") for m in all_med}
        pipeline = self._create_pipeline()
        for item in data:
            if item.code not in med_code_id_map:
                continue
            pipeline.hmset(f'lib:med:{str(library_id)}:{item.code}', {
                'id': str(med_code_id_map.get(item.code)),
                'library_id': str(library_id),
                'code': item.code,
                'medicine_code': item.code,
                'med_nation_ins_code': item.med_nation_ins_code,
                'bar_code': item.bar_code,
                'medicine_name': item.medicine_name,
                'english_name': item.english_name,
                'trade_name': item.trade_name,
                'chemistry_name': item.chemistry_name,
                'acronymy': item.acronymy,
                'approval_no': item.approval_no,
                'spec': item.spec,
                'uom': item.uom,
                'dosage_form': item.dosage_form,
                'manufacturer': item.manufacturer,
                'package_spec': item.package_spec,
                'packing_uom': item.packing_uom,
                'packing_min_uom': item.packing_min_uom,
                'cn_west_flag': item.cn_west_flag,
                'basic_drug': item.basic_drug,
                'antibacterial': item.antibacterial,
                'antibac_type': item.antibac_type,
                'ddd': item.ddd,
                'total_qty': item.total_qty,
                'packing_min_qty': item.packing_min_qty,
                'injection': item.injection,
                'antitumor': item.antitumor,
                'toxic': item.toxic
            })
        try:
            await pipeline.execute(raise_on_error=True)
        except Exception as e:
            logger.error(f"medicine save to cache failed with：{str(e)}")

    async def get_by_codes(self, library_id: str, codes: List[str]):
        query = {
            "obsoleted": False,
            "code": {
                "$in": codes
            },
            "library_id": ObjectId(library_id)
        }

        return await self.query_records(query, projection={"code": 1, "_id": 1})
