#pragma once

#include <cstring>
#include <list>
#include <memory>
#include <mutex>
#include <unordered_map>

#include "CachePolicy.h"


namespace Cache{

template<typename Key,typename Value>class LruCache;
//结点
template<typename Key,typename Value>
class LruNode{
private:
    Key key_;
    Value value_;
    size_t accessCount_;
    std::shared_ptr<LruNode<Key,Value>>prev_;
    std::shared_ptr<LruNode<Key,Value>>next_;

public:
    LruNode(Key key,Value value):
    key_(key),
    value_(value),
    accessCount_(1),
    prev_(nullptr),
    next_(nullptr){};

    Key getKey()const{return key_;}
    Value getValue()const{return value_;}
    void setValue(const Value& value){value_=value;}
    size_t getAccessCount() const { return accessCount_; }
    void incrementAccessCount() { ++accessCount_; }

    friend class LruCache<Key,Value>;
};

template<typename Key,typename Value>
class LruCache : public CachePolicy <Key, Value >{

public:
    using LruNodeType=LruNode<Key,Value>;
    using NodePtr = std::shared_ptr<LruNodeType>;//列表
    using NodeMap = std::unordered_map<Key, NodePtr>;//哈希map

private:
    int          capacity_; // 缓存容量
    NodeMap      nodeMap_; // key -> Node 
    std::mutex   mutex_;
    NodePtr      head_; // 最远虚拟结点
    NodePtr      tail_;//最近虚拟结点

public:
    LruCache(int capacity):capacity_(capacity){
        initialzeList();
    }
    
    ~LruCache()override =default;

    //添加缓存
    void put(Key key,Value value)override{
        if(capacity_<=0)
            return;
        std::lock_guard<std::mutex> lock(mutex_);
        auto it=nodeMap_.find(key);
        if(it!=nodeMap_.end()){
            updateExistNode(it->second, value);
            return;
        }
        addNewNode(key,value);

    }
    bool get(Key key,Value& value)override{
        std::lock_guard<std::mutex>lock(mutex_);
        auto it=nodeMap_.find(key);
        if(it!=nodeMap_.end()){
            moveToMostRecent(it->second);
            value=it->second->getValue();
            return true;
        }
        return false;

    }

    Value get(Key key)override{
        Value value{};
        get(key,value);
        return value;
    }

    void remove(Key key){
        std::lock_guard<std::mutex>lock(mutex_);
        auto it=nodeMap_.find(key);
        if(it!=nodeMap_.end()){
            removeNode(it->second);
            nodeMap_.erase(it);
        }
    }

private:
    void initialzeList(){
        head_=std::make_shared<LruNodeType>(Key(),Value());
        tail_=std::make_shared<LruNodeType>(Key(),Value());
        head_->next_=tail_;
        tail_->prev_=head_;
    }

    void updateExistNode(NodePtr node,const Value& value){
        node->setValue(value);
        moveToMostRecent(node);
    }

    void addNewNode(const Key&key,const Value& value){
        if(nodeMap_.size()>=capacity_){
            exLeastRecent();
        }
        NodePtr newNode=std::make_shared<LruNodeType>(key,value);
        insertNode(newNode);
        nodeMap_[key]=newNode;
    }

    void moveToMostRecent(NodePtr node){
        removeNode(node);
        insertNode(node);
    }

    void removeNode(NodePtr node){
        node->prev_->next_=node->next_;
        node->next_->prev_=node->prev_;
    }

    void insertNode(NodePtr node){
        node->next_=tail_;
        node->prev_=tail_->prev_;
        tail_->prev_->next_=node;
        tail_->prev_=node;

    }

    void exLeastRecent(){
        NodePtr leastnode=head_->next_;
        removeNode(leastnode);
        nodeMap_.erase(leastnode->getKey());
    }

};

template<typename Key,typename Value>
class LruKCache :public LruKCache<Key, Value> {
public:
    using LruCacheType = LruCache<Key, Value>;

    LruKCache(int capacity, int hisCapacity, int k) :
        LruCacheType(capacity),
        historyList_(std::make_unique<LruCacheType>(hisCapacity)),
        k_(k){}

    Value get(Key key) {
        int historyCount = historyList_->get(key);
        historyList_->put(key, ++historyCount);
        return LruCacheType::get(key);
    }

    void put(Key key, Value value) {
        if (LruCacheType::get(key) != "") {
            LruCacheType::put(key, value);
        }
        int histoyCount = historyList_->get(key);
        historyList_->put(key, ++historyCount);
        if (historyCount >= k_) {
            historyList_->remove(key);
            LruCacheType::put(key, value);
        }
    }

private:
    int k_;
    std::unique_ptr<LruCache<Key,size_t>> historyList_;

};

template<typename Key,typename Value>
class HashLruCaches {
public:
    using LruCacheType = LruCache<Key, Value>;

    HashLruCaches(size_t capacity, int sliceNum)
        :capacity_(capacity),
        sliceNum_(sliceNum>0?sliceNum:std::thread::hardware_concurrency())
    {
        size_t sliceSize = std::ceil(capacity / static_cast<double>(sliceNum_));//每个分区大小
        for (int i = 0; i < sliceNum_; i++) {
            lruSliceCache_.emplace_back(new LruCacheType(sliceNum_))；
        }
    }

    void put(Key key, Value value) {
        size_t sliceIndex = Hash(key) % sliceNum_;
        return lruSliceCache_[sliceIndex]->put(key, value);
    }

    bool get(Key key, Value value) {
        size_t sliceIndex = Hash(key) % sliceNum_;
        return lruSliceCache_[sliceIndex]->get(key, value);
    }

    Value get(Key key) {
        Value value;
        memset(&value, 0, sizeof(value));
        get(key, value);
        return value;
    }


private:
    size_t Hash(Key key) {
        std::hash<Key> hashFunc;
        return hashFunc(key);
    }


private:
    size_t capacity_;//总容量
    int sliceNum_;//切片数量
    std::vector<std::unique_ptr<LruCache<Key, Value>>>lruSliceCache_;//切片缓存

};

}