package com.itheima.service.impl;

import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.itheima.entity.SensitiveVocabulary;
import com.itheima.enums.RedisEnum;
import com.itheima.mapper.SensitiveVocabularyMapper;
import com.itheima.service.SensitiveVocabularyService;
import com.itheima.utils.SensitiveWordFilterUtil;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;

import javax.annotation.Resource;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;

/**
 * @author 51585
 * @description 针对表【sensitive_vocabulary】的数据库操作Service实现
 * @createDate 2022-12-08 13:50:34
 */
@Service
public class SensitiveVocabularyServiceImpl extends ServiceImpl<SensitiveVocabularyMapper, SensitiveVocabulary>
        implements SensitiveVocabularyService {

    @Resource
    private SensitiveVocabularyMapper sensitiveVocabularyMapper;
    @Resource
    private SensitiveWordFilterUtil sensitiveWordFilterUtil;
    @Resource
    private RedisTemplate redisTemplate;

    @Override
    public void txtImport(MultipartFile file) throws IOException {
        InputStream inputStream = file.getInputStream();
        InputStreamReader inputStreamReader = new InputStreamReader(inputStream);
        BufferedReader reader = new BufferedReader(inputStreamReader);
        List<String> list = new ArrayList<>();
        String word;
        while ((word = reader.readLine()) != null) {
            list.add(word);
        }
        sensitiveVocabularyMapper.batchInsert(list);
        sensitiveWordFilterUtil.createTrie();
        redisTemplate.delete(RedisEnum.SENSITIVE_VOCABULARY_TRIE.getRedis());
    }
}




