package com.zhsource.system.service.impl;

import cn.hutool.core.util.ObjUtil;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;

import com.zhsource.constants.ArticleConstant;
import com.zhsource.system.domain.Logs;
import com.zhsource.system.mapper.LogsMapper;
import com.zhsource.system.query.LogsQuery;

import com.zhsource.system.service.ILogsService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Service;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;

/**
 * <p>@ClassName：LogsServiceImpl
 * <p>@Date 2023/10/17  13:01
 *
 * @Author zzl
 **/
@Slf4j
@Service
public class LogServiceImpl implements ILogsService {

    private static final ConcurrentHashMap<String, List<Logs>> logsTreeMap = new ConcurrentHashMap<>();
    @Autowired
    private LogsMapper logsMapper;

    @Autowired

    private RedisTemplate redisTemplate;

    @Override
    public List<Logs> list() {
        return logsMapper.list();
    }

    @Override
    public Logs selectById(Long id) {
        return logsMapper.selectById(id);
    }

    @Override
    public PageInfo<Logs> page(LogsQuery logsQuery) {
        // 1.下面这句代码表示开启分页，会将紧挨着这句代码的下一次数据库执行进行分页，切记一定要放在要分页的SQL语句前面执行
        PageHelper.startPage(logsQuery.getCurrentPage(), logsQuery.getPageSize());

        // 2.执行SQL语句带条件查询，SQL语句不需要进行分页
        List<Logs> page = logsMapper.page(logsQuery);
        // 3.将查询结果转换为PageInfo对象，PageInfo对象中有前端分页所需要的各种参数
        PageInfo<Logs> pageInfo = new PageInfo<>(page);
        // 4.响应总条数和分页信息
        return pageInfo;

    }


    @Override
    @CacheEvict(cacheNames = ArticleConstant.ARTICLE_TYPE_NAMES, key = ArticleConstant.ARTICLE_TYPE_CACHE_TREE)
    public void update(Logs logs) {
        logsMapper.update(logs);
    }


    @Override
    @CacheEvict(cacheNames = ArticleConstant.ARTICLE_TYPE_NAMES, key = ArticleConstant.ARTICLE_TYPE_CACHE_TREE)
    public void insert(Logs logs) {
        logsMapper.insert(logs);
    }

    @Override
    @CacheEvict(cacheNames = ArticleConstant.ARTICLE_TYPE_NAMES, key = ArticleConstant.ARTICLE_TYPE_CACHE_TREE)
    public void delete(Long id) {
        logsMapper.delete(id);
    }

    @Override
    public void batchDel(List<Long> ids) {
        logsMapper.batchDel(ids);
    }
}



