#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2025/4/1 16:52
# @Author  : zzp
# @File    : CacheEntry
# @Software: PyCharm
from collections import OrderedDict
from typing import List
import torch
import torch.nn.functional as F
from datetime import datetime


def cosine_similarity(tensor1, tensor2):
    """计算余弦相似度并进行归一化"""
    tensor1_flat = tensor1.contiguous().view(1, -1)  # 展平成一维
    tensor2_flat = tensor2.contiguous().view(1, -1)
    sim = F.cosine_similarity(tensor1_flat, tensor2_flat).item()
    return sim


class CacheEntry:
    def __init__(self, record_id: int, content_semantics: torch.Tensor,
                 forwarding_interfaces: List[str], sim: torch.Tensor):
        self.record_id = record_id  # 记录id
        self.content_semantics = content_semantics  # 内容语义（Tensor）
        self.forwarding_interfaces = forwarding_interfaces  # 转发接口（列表）
        self.hit_count = 0  # 命中次数，初始为0
        self.timestamp = datetime.now()  # 加入缓存时间
        self.sim = sim  # 余弦相似度，初始为0

    def increment_hit_count(self):
        """增加命中次数"""
        self.hit_count += 1

    def update_forwarding_interfaces(self, new_interfaces: List[str]):
        """更新转发接口"""
        self.forwarding_interfaces = new_interfaces

    def __repr__(self):
        return (f"CacheEntry(record_id={self.record_id}, hit_count={self.hit_count}, "
                f"timestamp={self.timestamp}, interfaces={self.forwarding_interfaces})")


class LRUCache:
    def __init__(self, capacity: int):
        self.capacity = capacity
        self.cache = OrderedDict()
        self.theta = 0.6  # 命中阈值

    def check(self, new_content):
        """检验缓存是否命中"""
        if self.cache is not None and len(self.cache) > 0:
            for record_id, entry in self.cache.items():
                if cosine_similarity(new_content, entry.content_semantics) > self.theta:  # 命中
                    # 命中 更新当前缓存特征
                    new_content = (new_content + entry.content_semantics) / 2
                    forwarding_interfaces = entry.forwarding_interfaces
                    sim = entry.sim
                    self.cache[record_id] = CacheEntry(record_id, new_content, forwarding_interfaces, sim)
                    return True, record_id
        return False, -1

    def get(self, new_content: torch.Tensor):
        """获取缓存项，并更新LRU顺序"""
        isHit, record_id = self.check(new_content)
        if isHit:
            self.cache.move_to_end(record_id)
            # self.cache[record_id].increment_hit_count()
            return self.cache[record_id]
        return None

    def put(self, record_id: int, content_semantics: torch.Tensor, forwarding_interfaces: List[str], sim: torch.Tensor):
        """添加缓存项，若超出容量则淘汰最近最少使用项"""
        if record_id in self.cache:
            self.cache.move_to_end(record_id)
        else:
            if len(self.cache) >= self.capacity:
                self.cache.popitem(last=False)  # 淘汰最早使用的缓存项
            self.cache[record_id] = CacheEntry(record_id, content_semantics, forwarding_interfaces, sim)

    def __repr__(self):
        return f"LRUCache(capacity={self.capacity}, entries={list(self.cache.keys())})"
