import { readdirSync } from 'node:fs';
import { defaultFieldsParser } from './parser/default-fields-parser';
import { categoryListTemplate, relationKeyMapTemplate } from './template';
import { checkDuplicateKeys } from './utils';
import { allFieldsParser } from './parser/all-fields-parser';

// 读取所有文档
const dirList = readdirSync('./doc');

// 遍历所有文档，生成对应的json文件
dirList.forEach((i) => {
  const fileName = /^(.*?)\.txt$/.exec(i)?.[1] as string;

  if (fileName) {
    process(fileName);
  }
  
});


/**
 * 数据写入
 */
function saveFile({ output, data }) {
  Bun.write(output.type, data.type);
  Bun.write(output.fields, data.fields);
  Bun.write(output.defaultFields, data.defaultFields);
}

async function process(fileName:string) {
  const entry = `./doc/${fileName}.txt`;
  const output = {
    fields: `./dist/fields/${fileName}.json`,
    type: `./dist/type/${fileName}.js`,
    defaultFields: `./dist/fields/${fileName}_default.json`
  };

  const data = await Bun.file(entry).text();

  // 列数据分组
  const columns = data.split('\n\n').map(i => i.trim());

  // 解析所有列, 分类，关联字段数据
  const { fieldsList, categoryList, relationKeyMap } = allFieldsParser(columns);

  // 解析默认列数据
  const defaultFields = defaultFieldsParser(columns, fieldsList);

  // 检测是否存在重复的key，避免出现重复的自定义列数据
  checkDuplicateKeys(fieldsList, fileName);
  checkDuplicateKeys(defaultFields, fileName);

  // 写入文件，保存数据
  saveFile({
    output,
    data: {
      type: categoryListTemplate(categoryList) + '\n\n' + relationKeyMapTemplate(relationKeyMap),
      fields: JSON.stringify(fieldsList),
      defaultFields: JSON.stringify(defaultFields)
    }
  });
}
