package com.swy.service.impl;

import com.swy.model.SensitiveWord;
import com.swy.service.SensitiveWordService;
import com.swy.model.ImportResult;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import javax.annotation.PostConstruct;
import java.io.*;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;

@Slf4j
@Service
public class SensitiveWordServiceImpl implements SensitiveWordService {

    @Value("${app.sensitive-words.file}")
    private String sensitiveWordsFilePath;
    
    @Value("${app.upload.dir}")
    private String uploadDir;
    
    // 敏感词集合
    private final Set<String> sensitiveWordSet = ConcurrentHashMap.newKeySet();
    
    @PostConstruct
    public void init() {
        // 初始化时加载敏感词
        reloadSensitiveWords();
    }

    @Override
    public List<SensitiveWord> getAllSensitiveWords() {
        return sensitiveWordSet.stream()
                .map(word -> new SensitiveWord(word))
                .collect(Collectors.toList());
    }

    @Override
    public void addSensitiveWord(String word) {
        if (word == null || word.trim().isEmpty()) {
            return;
        }
        String trimmed = word.trim();
        // 去重校验
        if (sensitiveWordSet.contains(trimmed)) {
            throw new IllegalArgumentException("敏感词已存在");
        }
        // 添加到内存中
        sensitiveWordSet.add(trimmed);
        // 持久化到文件
        saveSensitiveWordsToFile();
        log.info("添加敏感词: {}", word);
    }

    @Override
    public void deleteSensitiveWord(String word) {
        if (word == null || word.trim().isEmpty()) {
            return;
        }
        
        // 从内存中删除
        sensitiveWordSet.remove(word.trim());
        
        // 持久化到文件
        saveSensitiveWordsToFile();
        
        log.info("删除敏感词: {}", word);
    }

    @Override
    public void deleteSensitiveWords(List<String> words) {
        if (words == null || words.isEmpty()) {
            return;
        }

        log.info("批量删除敏感词: {} 个", words.size());
        boolean changed = false;
        for (String word : words) {
            if (word != null && !word.trim().isEmpty()) {
                // 从内存中删除
                if (sensitiveWordSet.remove(word.trim())) {
                    changed = true;
                }
            }
        }

        // 如果有任何词被成功删除，则持久化到文件
        if (changed) {
            saveSensitiveWordsToFile();
        }
        log.info("批量删除完成");
    }

    @Override
    public Set<String> getSensitiveWordSet() {
        return Collections.unmodifiableSet(sensitiveWordSet);
    }

    @Override
    public void reloadSensitiveWords() {
        synchronized (sensitiveWordSet) {
            sensitiveWordSet.clear();
            try {
                File file = new File(sensitiveWordsFilePath);
                if (!file.exists()) {
                    boolean created = file.createNewFile();
                    if (created) {
                        log.info("敏感词文件不存在，已创建新文件: {}", sensitiveWordsFilePath);
                    }
                    return;
                }
                
                // 尝试使用UTF-8读取
                try (BufferedReader reader = new BufferedReader(
                        new InputStreamReader(new FileInputStream(file), StandardCharsets.UTF_8))) {
                    String line;
                    while ((line = reader.readLine()) != null) {
                        if (!line.trim().isEmpty()) {
                            sensitiveWordSet.add(line.trim());
                        }
                    }
                } catch (Exception e) {
                    // 尝试使用GBK读取
                    try (BufferedReader reader = new BufferedReader(
                            new InputStreamReader(new FileInputStream(file), "GBK"))) {
                        String line;
                        while ((line = reader.readLine()) != null) {
                            if (!line.trim().isEmpty()) {
                                sensitiveWordSet.add(line.trim());
                            }
                        }
                    }
                }
                
                log.info("成功加载敏感词: {} 个", sensitiveWordSet.size());
            } catch (Exception e) {
                log.error("加载敏感词失败", e);
            }
        }
    }
    
    /**
     * 保存敏感词到文件
     */
    private void saveSensitiveWordsToFile() {
        try {
            // 确保文件存在
            File file = new File(sensitiveWordsFilePath);
            if (!file.exists()) {
                file.createNewFile();
            }
            
            // 使用 BufferedWriter 写入文件
            try (BufferedWriter writer = new BufferedWriter(
                    new OutputStreamWriter(new FileOutputStream(file), StandardCharsets.UTF_8))) {
                for (String word : sensitiveWordSet) {
                    writer.write(word);
                    writer.newLine();
                }
                writer.flush();
            }
            
            log.info("敏感词已保存到文件: {}", sensitiveWordsFilePath);
        } catch (IOException e) {
            log.error("保存敏感词到文件失败", e);
            throw new RuntimeException("保存敏感词失败: " + e.getMessage());
        }
    }

    @Override
    public List<SensitiveWord> searchSensitiveWords(String keyword, int page, int size) {
        List<String> filtered = sensitiveWordSet.stream()
                .filter(w -> keyword == null || keyword.isEmpty() || w.contains(keyword))
                .sorted()
                .collect(Collectors.toList());
        int from = Math.max(0, (page - 1) * size);
        int to = Math.min(filtered.size(), from + size);
        if (from > to) from = to;
        return filtered.subList(from, to).stream().map(SensitiveWord::new).collect(Collectors.toList());
    }

    @Override
    public int countSensitiveWords(String keyword) {
        return (int) sensitiveWordSet.stream()
                .filter(w -> keyword == null || keyword.isEmpty() || w.contains(keyword))
                .count();
    }

    @Override
    public com.swy.model.ImportResult importSensitiveWords(java.io.InputStream inputStream) throws java.io.IOException {
        int addedCount = 0;
        int duplicateCount = 0;
        List<String> duplicateWords = new ArrayList<>();
        try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8))) {
            String line;
            Set<String> wordsToProcess = new HashSet<>();
            while ((line = reader.readLine()) != null) {
                String trimmed = line.trim();
                if (!trimmed.isEmpty()) {
                    wordsToProcess.add(trimmed);
                }
            }
            synchronized (sensitiveWordSet) {
                for (String word : wordsToProcess) {
                    if (sensitiveWordSet.contains(word)) {
                        duplicateCount++;
                        duplicateWords.add(word);
                    } else {
                        sensitiveWordSet.add(word);
                        addedCount++;
                    }
                }
                if (addedCount > 0) {
                    saveSensitiveWordsToFile();
                }
            }
        } catch (IOException e) {
            log.error("读取导入文件失败", e);
            throw e;
        }
        log.info("批量导入完成，成功添加 {} 个词，去除重复 {} 个词", addedCount, duplicateCount);
        return new com.swy.model.ImportResult(addedCount, duplicateCount, duplicateWords);
    }

    @Override
    public void deleteWord(String word) {
        sensitiveWordSet.remove(word);
        saveSensitiveWordsToFile();
    }

    @Override
    public ImportResult importSensitiveWords(List<String> words) {
        log.info("开始批量导入敏感词，接收到的词列表大小: {}", words.size());
        log.info("接收到的词列表: {}", words);
        int addedCount = 0;
        int duplicateCount = 0;
        List<String> duplicateWords = new ArrayList<>();
        for (String word : words) {
            word = word.trim();
            if (!word.isEmpty()) {
                if (sensitiveWordSet.contains(word)) {
                    duplicateCount++;
                    duplicateWords.add(word);
                } else {
                    sensitiveWordSet.add(word);
                    addedCount++;
                }
            } else {
                log.debug("跳过空词");
            }
        }
        if (addedCount > 0) {
            log.info("有{}个新词被添加，准备保存到文件", addedCount);
            saveSensitiveWordsToFile();
        } else {
            log.info("没有新词被添加，跳过文件保存");
        }
        log.info("导入完成 - 新增: {}, 重复: {}", addedCount, duplicateCount);
        return new ImportResult(addedCount, duplicateCount, duplicateWords);
    }

    @Override
    public void cleanCache() {
        log.info("开始清理上传文件目录: {}", uploadDir);
        try {
            Path uploadPath = Paths.get(uploadDir);
            if (!Files.exists(uploadPath)) {
                log.info("上传目录不存在，无需清理");
                return;
            }

            Files.list(uploadPath)
                .filter(Files::isRegularFile)
                .forEach(file -> {
                    try {
                        Files.delete(file);
                        log.info("已删除文件: {}", file.getFileName());
                    } catch (IOException e) {
                        log.error("删除文件失败: {}", file.getFileName(), e);
                    }
                });
            
            log.info("缓存清理完成");
        } catch (IOException e) {
            log.error("清理缓存失败", e);
            throw new RuntimeException("清理缓存失败: " + e.getMessage());
        }
    }

    @Override
    public List<String> detectSensitiveWords(String text) {
        if (text == null || text.isEmpty()) {
            return Collections.emptyList();
        }

        List<String> foundWords = new ArrayList<>();
        for (String word : sensitiveWordSet) {
            if (text.contains(word)) {
                foundWords.add(word);
            }
        }
        return foundWords;
    }

    // 新增：模糊检测结果结构
    public static class FuzzyMatchResult {
        private final String sensitiveWord;
        private final String matchedSub;
        public FuzzyMatchResult(String sensitiveWord, String matchedSub) {
            this.sensitiveWord = sensitiveWord;
            this.matchedSub = matchedSub;
        }
        public String getSensitiveWord() { return sensitiveWord; }
        public String getMatchedSub() { return matchedSub; }
        @Override
        public String toString() { return sensitiveWord + ", 命中片段: " + matchedSub; }
    }

    // 新版模糊检测，返回敏感词和命中子串
    public List<FuzzyMatchResult> detectSensitiveWordsFuzzyDetail(String text) {
        if (text == null || text.isEmpty()) {
            return Collections.emptyList();
        }
        List<FuzzyMatchResult> found = new ArrayList<>();
        for (String word : sensitiveWordSet) {
            if (word.length() >= 3) {
                for (int i = 0; i < word.length() - 1; i++) {
                    String sub = word.substring(i, i + 2);
                    if (text.contains(sub)) {
                        found.add(new FuzzyMatchResult(word, sub));
                        break;
                    }
                }
            } else {
                if (text.contains(word)) {
                    found.add(new FuzzyMatchResult(word, word));
                }
            }
        }
        return found;
    }

    @Override
    public java.util.List<FuzzyMatchResult> detectSensitiveWordsFuzzy(String text) {
        return detectSensitiveWordsFuzzyDetail(text);
    }
} 