const fs = require('fs');
const path = require('path');

class Tokenizer {
  constructor() {
    this.wordIndex = {};
    this.configPath = path.join(__dirname, '../constant/tokenizer_config.json');
    this.loadConfig();
  }

  loadConfig() {
    try {
      const config = JSON.parse(fs.readFileSync(this.configPath, 'utf8'));
      this.wordIndex = config.word_index;
      this.oovToken = config.oov_token;
      this.numWords = config.num_words || 10000;
      console.log('Tokenizer配置加载成功');
    } catch (error) {
      console.error('加载tokenizer配置失败:', error);
      this.wordIndex = {};
      this.oovToken = '<OOV>';
      this.numWords = 10000;
    }
  }

  textsToSequences(texts) {
    return texts.map(text => {
      const words = text.toLowerCase().split(' ');
      return words
        .map(word => {
          const index = this.wordIndex[word];
          return index && index < this.numWords ? index : this.wordIndex[this.oovToken];
        })
        .filter(index => index !== undefined);
    });
  }
}

module.exports = new Tokenizer();