package com.gd.deron.smartrobot.baobao;


import com.gd.deron.smartrobot.baobao.app.MyApplication;
import com.gd.deron.smartrobot.baobao.db.DatabaseAsrDao;
import com.gd.deron.smartrobot.baobao.info.VoiceTitle;
import com.gd.deron.smartrobot.baobao.utils.BlueTools;
import com.gd.deron.smartrobot.baobao.utils.Config;
import com.gd.deron.smartrobot.baobao.utils.FileUtils;
import com.gd.deron.smartrobot.baobao.utils.FucUtil;
import com.gd.deron.smartrobot.baobao.utils.InstructionUtil;
import com.gd.deron.smartrobot.baobao.utils.JsonParser;
import com.gd.deron.smartrobot.baobao.utils.PlayMusicTool;
import com.gd.deron.smartrobot.baobao.utils.ToastUtils;
import com.iflytek.cloud.ErrorCode;
import com.iflytek.cloud.GrammarListener;
import com.iflytek.cloud.InitListener;
import com.iflytek.cloud.RecognizerListener;
import com.iflytek.cloud.RecognizerResult;
import com.iflytek.cloud.SpeechConstant;
import com.iflytek.cloud.SpeechError;
import com.iflytek.cloud.SpeechRecognizer;
import com.iflytek.cloud.util.ResourceUtil;
import com.iflytek.cloud.util.ResourceUtil.RESOURCE_TYPE;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.media.AudioManager;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.text.TextUtils;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.Window;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.view.animation.LinearInterpolator;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import android.widget.TextView;
import android.widget.Toast;
/**
 * 科大讯飞离线语音识别
 * */
public class AsrOfflineDialog extends Activity {

	// 离线语音识别模块
	private static String TAG = "AbnfDemo";

	private SpeechRecognizer mAsr;

	private Toast mToast;

	private String mLocalGrammar = null;

	private String grmPath = Environment.getExternalStorageDirectory().getAbsolutePath() + "/msc/test";

	private String mResultType = "json";

	private final String GRAMMAR_TYPE_BNF = "bnf";

	private String mEngineType = SpeechConstant.TYPE_LOCAL;

	private int ret = 0;

	private String mContent;// 语法、词典临时变量
	private ImageView mImageView;
	private Animation mAnimation;
	private TextView mTextView;
	private Intent wake;

	private boolean isHciAfr = false;
	
	private DatabaseAsrDao mAsrDao;
	
	@Override
	protected void onCreate(Bundle savedInstanceState) {
		super.onCreate(savedInstanceState);
		setContentView(R.layout.activity_asrdialog_off_line);
		getWindow().getDecorView().setSystemUiVisibility(10);
		mAsrDao = new DatabaseAsrDao(this);
		init();
	}
	private void init() {
		mImageView =  (ImageView) findViewById(R.id.loading);
		//wake = new Intent(AsrOfflineDialog.this,WakeService.class);
		mTextView = (TextView) findViewById(R.id.loading_txt);
		mTextView.setText("识别中");

		RelativeLayout stop = (RelativeLayout) findViewById(R.id.rl_asr_offline);

		stop.setOnClickListener(new OnClickListener() {

			@Override
			public void onClick(View v) {
				AsrOfflineDialog.this.finish();	
			}
		});

		formatGrammar();
	}
	


	@Override
	protected void onResume() {
		super.onResume();
		// 设置参数
		if (!setParam()) {
			showTip("参数有误");
			return;
		}
		try {
			ret = mAsr.startListening(mRecognizerListener);
			if (ret != ErrorCode.SUCCESS) {
				showTip("识别失败" + ret);
			}
		} catch (Exception e) {
			e.printStackTrace();
			Log.e(TAG, e.getLocalizedMessage());
		}
		
	}

	@SuppressLint("ShowToast")
	private void formatGrammar() {

		mAsr = SpeechRecognizer.createRecognizer(this, mInitListener);

		//mLocalGrammar = FucUtil.readFile(this, "call.bnf", "utf-8");//读取assest下的文件
		mLocalGrammar = FileUtils.readCallToSD();//读取assest下的文件
		if(mLocalGrammar==null)
			mLocalGrammar = FucUtil.readFile(this, "call.bnf", "utf-8");
		mToast = Toast.makeText(this, "", Toast.LENGTH_SHORT);
		// 构建语法
		mContent = new String(mLocalGrammar);

		mAsr.setParameter(SpeechConstant.PARAMS, null);
		// 设置文本编码格式
		mAsr.setParameter(SpeechConstant.TEXT_ENCODING, "utf-8");
		// 设置引擎类型
		mAsr.setParameter(SpeechConstant.ENGINE_TYPE, mEngineType);
		// 设置语法构建路径
		mAsr.setParameter(ResourceUtil.GRM_BUILD_PATH, grmPath);
		// 使用8k音频的时候请解开注释
		// mAsr.setParameter(SpeechConstant.SAMPLE_RATE, "8000");
		// 设置资源路径
		mAsr.setParameter(ResourceUtil.ASR_RES_PATH, getResourcePath());
		ret = mAsr.buildGrammar(GRAMMAR_TYPE_BNF, mContent, grammarListener);
		if (ret != ErrorCode.SUCCESS) {
			showTip("参数结构有误" + ret);
		}
		
		Log.e(TAG, "语法="+mContent);
	}

	/**
	 * 初始化监听器。
	 */
	private InitListener mInitListener = new InitListener() {

		@Override
		public void onInit(int code) {
			Log.d(TAG, "初始化监听器 code = " + code);
			if (code != ErrorCode.SUCCESS) {
				showTip("初始化" + code);
				Log.d(TAG, "语音识别初始化成功" );
				Log.e(TAG, "语音识别初始化成功" );
			}
		}
	};

	/**
	 * 构建语法监听器。
	 */
	private GrammarListener grammarListener = new GrammarListener() {
		@Override
		public void onBuildFinish(String grammarId, SpeechError error) {
			if (error == null) {
				//showTip("开始指令！" );
			} else {
				//1201这里又问题，待解决》》》》》》》》1202语法文件错误了，下次注意，真是日了，浪费哥时间
				showTip("语法构建失败："+ error.getErrorCode()+error.toString());
				Log.i(TAG, "语法构建失败：" + error.toString());
			}
		}
	};


	/**
	 * 识别监听器。
	 */
	private RecognizerListener mRecognizerListener = new RecognizerListener() {

	
		@Override
		public void onResult(final RecognizerResult result, boolean isLast) {
			if (null != result && !TextUtils.isEmpty(result.getResultString())) {
				
				String string = result.getResultString();
				Log.i(TAG, "recognizer result：" + string);
				String text = "";
				
				if (mResultType.equals("json")) {
					text = JsonParser.parseGrammarResult(string, mEngineType);
					String textString= JsonParser.parseGrammarResultToInt(string);

					try {
						int score = Integer.parseInt(textString.substring(0, 2));
						if(score>10){
							showTip(text);
							Log.i(TAG, "recognizer result text：" + text);
							Log.e(TAG, "recognizer result text：" + text);
							asrControl(text);
							
						}else{
							PlayMusicTool.getInstance().playMusicToTitle(VoiceTitle.PLEASE_AGAIN,AsrOfflineDialog.this);
							finish();
						}
					} catch (Exception e) {
						asrControl(text);
					}
					
					
				}
			} else {
				Log.d(TAG, "recognizer result : null");
			}
		}

		@Override
		public void onEndOfSpeech() {
			PlayMusicTool.getInstance().playMusicToId(AsrOfflineDialog.this, R.raw.stop);
		}

		@Override
		public void onBeginOfSpeech() {
			PlayMusicTool.getInstance().playMusicToId(AsrOfflineDialog.this, R.raw.start);
			//showTip(getString(R.string.begin_speak));
		}

		@Override
		public void onError(SpeechError error) {
			showTip("error" + error.getErrorCode()+error.toString());
			PlayMusicTool.getInstance().playMusicToId(AsrOfflineDialog.this, R.raw.error);
			finish();
		}

		@Override
		public void onEvent(int eventType, int arg1, int arg2, Bundle obj) {

		}

		@Override
		public void onVolumeChanged(int volume, byte[] arg1) {//volume - 当前音量值，范围[0-30]
			//showTip("当前正在说话，音量大小：" + arg0);
			if (volume >= 0 && volume <= 5)
				mImageView.setImageResource(R.drawable.voice_01);
			else if (volume >= 5 && volume <= 10)
				mImageView.setImageResource(R.drawable.voice_02);
			else if (volume >= 10 && volume <= 15)
				mImageView.setImageResource(R.drawable.voice_03);
			else if (volume >= 15 && volume <= 20)
				mImageView.setImageResource(R.drawable.voice_04);
			else if (volume >= 20 && volume <= 25)
				mImageView.setImageResource(R.drawable.voice_05);
			else
				mImageView.setImageResource(R.drawable.voice_06);
			
			BlueTools.sendData(InstructionUtil.LIGTH_LEFT_RIGHT_ONLY_ONE);
			
		}
	};
	private void showTip(final String str) {
		mToast.setText(str);
		mToast.show();
	}

	@SuppressLint("ShowToast")
	protected void asrControl(String text) {
		
		switch (text) {
		case "讲故事":
		case "讲个故事":
		case "我想听故事":
		case "我要听故事":
			PlayMusicTool.getInstance().playMusicToTales( AsrOfflineDialog.this,VoiceTitle.VOICE_TALES);
			finish();
			break;
		case "我想听儿歌":
		case "我要听儿歌":
			PlayMusicTool.getInstance().playMusicToTales( AsrOfflineDialog.this,VoiceTitle.VOICE_CHILDREN_SONG);
			finish();
			break;
		case "我想听英语启蒙":
		case "我要听英语启蒙":
			PlayMusicTool.getInstance().playMusicToTales( AsrOfflineDialog.this,VoiceTitle.VOICE_INITIATION);
			finish();
			break;
		case "背一首古诗":
		case "我想听古诗":
		case "我要听古诗":
			PlayMusicTool.getInstance().playMusicToTales( AsrOfflineDialog.this,VoiceTitle.VOICE_ANCIENT_LITERATURE);
			finish();
			break;
		case "左转":
		case "向左转":
			sendData(InstructionUtil.AIJIA_SITU_TURN_LEFT);
        	PlayMusicTool.getInstance().playMusicToTitle(VoiceTitle.YES_MASTER,AsrOfflineDialog.this);
        	finish();
			break;
		case "右转":
		case "向右转":
			sendData(InstructionUtil.AIJIA_SITU_TURN_RIGHT);
        	PlayMusicTool.getInstance().playMusicToTitle(VoiceTitle.YES_MASTER,AsrOfflineDialog.this);
        	finish();
			break;
		case "左走":
		case "向左走":
			sendData(InstructionUtil.AIJIA_TURN_LEFT_90_FORWARD);
        	PlayMusicTool.getInstance().playMusicToTitle(VoiceTitle.YES_MASTER,AsrOfflineDialog.this);
        	finish();
			break;
		case "右走":
		case "向右走":
			sendData(InstructionUtil.AIJIA_TURN_RIGHT_90_FORWARD);
        	PlayMusicTool.getInstance().playMusicToTitle(VoiceTitle.YES_MASTER,AsrOfflineDialog.this);
        	finish();
			break;
		case "向后走":
		case "向后转":
			sendData(InstructionUtil.AIJIA_GO_BACK_RORWARD);
        	PlayMusicTool.getInstance().playMusicToTitle(VoiceTitle.YES_MASTER,AsrOfflineDialog.this);
        	finish();
			break;
		case "前进":
			sendData(InstructionUtil.AIJIA_FORWARD);
        	PlayMusicTool.getInstance().playMusicToTitle(VoiceTitle.YES_MASTER,AsrOfflineDialog.this);
        	finish();
			break;
		case "后退":
			sendData(InstructionUtil.AIJIA_BACK);
        	PlayMusicTool.getInstance().playMusicToTitle(VoiceTitle.YES_MASTER,AsrOfflineDialog.this);
        	finish();
			break;
		case "跳只舞":
			sendData(InstructionUtil.AIJIA_BACK);
        	finish();
			break;
		default:
			addOrder(text);
			break;
		}
		
	}
	private void addOrder(String order){
		String answer = mAsrDao.findQuestToAnswer(order);
		if(answer!=null){
			ToastUtils.show(AsrOfflineDialog.this, answer);
			MyApplication.ASR = answer;
			broadcastSend(Config.ASR_ANSWER );
		}else{
			Log.e(TAG, "q="+order+">>>a="+answer);
			showTip("没有对应的指令，请再说一次！");
			MyApplication.ASR = "请再说一次！";
			broadcastSend(Config.ASR_ANSWER);
			//PlayMusicTool.getInstance().playMusicToTitle(VoiceTitle.PLEASE_AGAIN,AsrOfflineDialog.this);
		}
		finish();
	}
	/**向蓝牙设备发送数据*/
	private void sendData(String data){
		try {
			if(MyApplication.bleService!=null)
				MyApplication.bleService.sendData(data);
		} catch (Exception e) {
		}
	}
	/**发送广播*/
	private void broadcastSend(final String action) {
		final Intent inte = new Intent(action);
		sendBroadcast(inte);
	}
	
	/**
	 * 参数设置
	 * 
	 * @param param
	 * @return
	 */
	public boolean setParam() {
		boolean result = false;
		// 清空参数
		mAsr.setParameter(SpeechConstant.PARAMS, null);
		// 设置识别引擎
		mAsr.setParameter(SpeechConstant.ENGINE_TYPE, mEngineType);
		// 设置本地识别资源
		mAsr.setParameter(ResourceUtil.ASR_RES_PATH, getResourcePath());
		// 设置语法构建路径
		mAsr.setParameter(ResourceUtil.GRM_BUILD_PATH, grmPath);
		// 设置返回结果格式
		mAsr.setParameter(SpeechConstant.RESULT_TYPE, mResultType);
		// 设置本地识别使用语法id
		mAsr.setParameter(SpeechConstant.LOCAL_GRAMMAR, "call");
		// 设置识别的门限值
		mAsr.setParameter(SpeechConstant.MIXED_THRESHOLD, "30");
		// 使用8k音频的时候请解开注释
		// mAsr.setParameter(SpeechConstant.SAMPLE_RATE, "8000");
		result = true;

		return result;
	}

	// 获取识别资源路径
	private String getResourcePath() {
		StringBuffer tempBuffer = new StringBuffer();
		// 识别通用资源
		tempBuffer.append(ResourceUtil.generateResourcePath(this, RESOURCE_TYPE.assets, "asr/common.jet"));
		// 识别8k资源-使用8k的时候请解开注释
		// tempBuffer.append(";");
		// tempBuffer.append(ResourceUtil.generateResourcePath(this,
		// RESOURCE_TYPE.assets, "asr/common_8k.jet"));
		return tempBuffer.toString();
	}

	@Override
	protected void onDestroy() {
		super.onDestroy();
		if(mAsr!=null){
			if(mAsr.isListening())
				mAsr.stopListening();
			mAsr.cancel();
			mAsr.destroy();
		}
		
		if(mAnimation!=null)
			mAnimation.cancel();
		MyApplication.wakeService.startWakeAiJia();
		
	}
}
