package com.huiquan.synonymy.job;

import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import com.huiquan.foundation.util.BusinessUtil;
import com.huiquan.management.constant.MigrateCode;
import com.huiquan.management.service.MigrateService;
import com.huiquan.synonymy.dao.SynonymyDao;
import com.huiquan.synonymy.domain.Synonymy;
import com.huiquan.vocab.constant.VocabConstant;
import com.huiquan.vocab.service.VocabElementRService;
import com.huiquan.vocab.service.VocabElementTService;

/**
 * Description:
 */
@Component
public class SynonymyJob {

	private static final Logger LOG = LoggerFactory.getLogger(SynonymyJob.class);

	@Autowired
	private SynonymyDao synonymyDao;
	@Autowired
	private MigrateService migrateService;
	@Autowired
	private VocabElementTService vocabElementTService;
	@Autowired
	private VocabElementRService vocabElementRService;
	@Value("${synonymy.element.script}")
	private String elementScript;

	/**
	 * 将小词同义词的文件根据数据库内容更新
	 */
	public void generateElement() {
		LOG.info("Synonymy job generate element begin!");

		// 初始化t同义词
		vocabElementTService.initTSynonymy();
		// 初始化R同义词
		vocabElementRService.initRSynonymy();
		// 获取所有数据
		Map<String, Object> param = new HashMap<>();
		param.clear();
		param.put("type", Synonymy.TYPE_ELEMENT);
		List<Synonymy> dataList = synonymyDao.retrieveList(param);

		// 根据属性分列表
		Map<String, List<Synonymy>> categoryMap = new HashMap<>();
		for (Synonymy data : dataList) {
			String property = data.getProperty();
			if (VocabConstant.submitSVNElementProperty.contains(property)) {
				if (categoryMap.get(property) == null) {
					List<Synonymy> subList = new ArrayList<>();
					subList.add(data);
					categoryMap.put(property, subList);
				} else {
					categoryMap.get(property).add(data);
				}
			}
		}

		// 根据属性生成各自的文件
		try {
			String bakFilePath = migrateService.getBakFilePathByCode(MigrateCode.ELEMENT_SYNONYM, false);
			for (String property : categoryMap.keySet()) {
				generateSubFile(property, categoryMap.get(property), bakFilePath);
			}

			// 调用shell脚本将文件从211转移到181上
			migrateService.migrateFileByCodeAndBak(MigrateCode.ELEMENT_SYNONYM, bakFilePath, false);

			// 调用181上脚本提交SVN
			BusinessUtil.excuteShell(new String[] { elementScript });
		} catch (Exception e) {
			e.printStackTrace();
		}
		LOG.info("Synonymy job generate element end!");
	}

	/**
	 * 对一个属性的列表进行文件编辑和传输
	 * 
	 * @param property
	 * @param list
	 * @param type
	 * @throws Exception
	 */
	private void generateSubFile(String property, List<Synonymy> list, String bakFilePath) throws Exception {

		String filePath = bakFilePath + File.separator + "dic_ty_" + property + ".txt";
		File synonymFile = createFile(filePath);

		StringBuilder sb = new StringBuilder();
		for (Synonymy synonymy : list) {
			String std = synonymy.getStd();
			String words = synonymy.getWords();
			if (std != null && !std.isEmpty() && !words.startsWith("\t" + std + "\t")) {
				words = words.replace("\t" + std + "\t", "\t");
				words = std + words;
			}
			sb.append(words.trim());
			sb.append("\n");
		}

		BufferedOutputStream synonymBOS = new BufferedOutputStream(new FileOutputStream(synonymFile));
		synonymBOS.write(sb.toString().getBytes("utf-8"));
		synonymBOS.flush();
		synonymBOS.close();
		LOG.info("generate {} file success", property);
	}

	/**
	 * 根据路径生成目录和文件
	 * 
	 * @param filePath
	 * @return
	 */
	private File createFile(String filePath) throws Exception {
		File file = new File(filePath);

		// 初始化上级目录
		if (!file.getParentFile().exists()) {
			file.getParentFile().mkdirs();
		}

		// 新建文件
		if (file.exists()) {
			file.delete();
		}
		file.createNewFile();
		return file;
	}
}
