package com.pro.exam.web.service;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.wltea.analyzer.sample.IKAnalzyerUtil;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.baidu.translate.demo.BaiduTranslateUtil;
import com.pro.exam.common.mapper.LexiconMapper;
import com.pro.exam.common.model.Lexicon;
import com.pro.exam.common.utils.DataUtils;

@Service("translateService")
public class TranslateService {
	@Autowired
	LexiconMapper lexiconMapper;
	
	public JSONArray translate(String language, String content) throws NullPointerException{
		JSONArray retContent=null;
		content=content.replaceAll("（", "(");
		content=content.replaceAll("）", ")");
		content=content.toLowerCase();
		if("en".equals(language.toLowerCase())){
			List<String> tradeWords = IKAnalzyerUtil.getTradeWords(content);
			for (int i = 0; i < tradeWords.size(); i++) {
				//Lexicon lexicon=lexiconMapper.selectByCN(tradeWords.get(i));
				Map<String,String> lexicon=DataUtils.ANALYZER_WORDS.get(tradeWords.get(i));
				if (lexicon == null) {
					System.out.println("++++++++++++++++++++++++++"+tradeWords.get(i)+"++++++++++++++++++++++++++");
				}else if (lexicon.get("en") != null && !lexicon.get("en").isEmpty()) {
					content=content.replace(tradeWords.get(i), lexicon.get("en"));
				}
			}
			retContent=BaiduTranslateUtil.baiduTranslate(language, content);
			System.out.println(retContent);
		}else if("jp".equals(language.toLowerCase())){
			String tagContent=IKAnalzyerUtil.addTags(content);
			JSONArray transContent=BaiduTranslateUtil.baiduTranslate(language, tagContent);
			String trans = null;
			trans=transContent.getJSONObject(0).getString("dst").replace("＠", "@");
			String rgex = IKAnalzyerUtil.START_STR+"(.*?)"+IKAnalzyerUtil.END_STR;
			List<String> subs=getSubUtil(tagContent, rgex);
			List<String> transSubs=getSubUtil(trans, rgex);
			for (int i = 0; i < subs.size(); i++) {
				//Lexicon lexicon=lexiconMapper.selectByCN(subs.get(i));
				if(subs.get(i).contains("ARABIC")){
					String trans1 = null;
					String transSub=IKAnalzyerUtil.START_STR+transSubs.get(i)+IKAnalzyerUtil.END_STR;
					trans1 = trans.replace(transSub, subs.get(i).replace("ARABIC", ""));
					trans=trans1;
					//					transContent = JSONArray.parseArray(trans1);
				}else{
					Map<String,String> lexicon=DataUtils.ANALYZER_WORDS.get(subs.get(i));
					String repTransSub=lexicon.get("jp");
					String trans1 = null;
					if ((repTransSub == null || repTransSub.isEmpty())) {
						trans1 = trans.replace(IKAnalzyerUtil.START_STR+transSubs.get(i)+IKAnalzyerUtil.END_STR, transSubs.get(i));
						//trans1 = trans.replace(IKAnalzyerUtil.START_STR, "");
						//trans1 = trans1.replace(IKAnalzyerUtil.END_STR, "");
					}else{
						trans1 = trans.replace(IKAnalzyerUtil.START_STR+transSubs.get(i)+IKAnalzyerUtil.END_STR, repTransSub);
					}
					trans =trans1;
				}
			}
			JSONObject obj1= transContent.getJSONObject(0);
			obj1.put("dst", trans);
			transContent.set(0, obj1);
			retContent=transContent;
		}
		return retContent;
	}
	
	public static List<String> getSubUtil(String soap,String rgex){  
        List<String> list = new ArrayList<String>();  
        Pattern pattern = Pattern.compile(rgex);// 匹配的模式  
        Matcher m = pattern.matcher(soap);  
        while (m.find()) {
            int i = 1;  
            list.add(m.group(i));  
            i++;  
        }  
        return list;  
    } 
	
	public void generate(){
        BufferedWriter writer = null;
        try {
            FileOutputStream writerStream = new FileOutputStream("D:/ext.dic");
            writer = new BufferedWriter(new OutputStreamWriter(writerStream, "UTF-8"));
            List<Lexicon> lexicons=lexiconMapper.selectAll();
    		for (int i = 0; i < lexicons.size(); i++) {
    			writer.write(lexicons.get(i).getChinese());
                writer.newLine();//换行
    		}
            writer.flush();
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        }catch (IOException e) {
            e.printStackTrace();
        }finally{
            try {
                writer.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
		
		
	}
}
