package com.mall.manager.controller;

import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import javax.annotation.Resource;

import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.Logger;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;

import com.mall.hadoop.format.AlphabetOutputFormat;
import com.mall.hadoop.init.HadoopInit;
import com.mall.hadoop.job.babytop.BabyTopMap;
import com.mall.hadoop.job.babytop.BabyTopReduce;
import com.mall.hadoop.job.comjob.CommonJob;
import com.mall.hadoop.utils.HDFSUtil;
import com.mall.manager.service.AbilityStatistiService;
import com.mall.manager.service.BabySingleService;
import com.mall.manager.service.BabyTopService;
import com.mall.untils.DateUtils;
import com.mall.untils.PropertiesUtil;
import com.mongodb.DBObject;

/**
 * 宝宝单题信息
 * @author tgy
 *
 */
@RequestMapping("/manager")
@Controller
public class BabyTopController {
	
	Logger logger = Logger.getLogger(BabyTopController.class);
	
	@Resource
	BabySingleService babySingleService;
	
	@Resource
	BabyTopService babyTopService;
	
	@Resource
	AbilityStatistiService abilityStatistiService;
	
	/**
	 * 宝宝单题数据信息统计保存rds
	 * @return
	 */
	@RequestMapping("/babyTop")
	@ResponseBody
	public String babyClear(){
		String res = "文件不存在";
		try{
			Configuration conf = HadoopInit.getConfig();
			String jobName = "babyTop";
			String input = PropertiesUtil.getProperties("dataHdfsInputPath")+"baby_"+DateUtils.getDateFormat("yyyyMMdd")+".txt";
			String output = PropertiesUtil.getProperties("dataHdfsOutputPath");
			res = CommonJob.commonJobStart(conf, jobName, input, output, BabyTopMap.class, BabyTopReduce.class, AlphabetOutputFormat.class);
		}catch(Exception e){
			logger.info("宝宝单题信息统计出现异常："+e.getMessage());
		}
		return res;
	}
	/**
	 * 查询monogdb数据并上传到hdfs
	 * @return
	 */
	@RequestMapping("/findBabyTop")
	@ResponseBody
	public String babyUploadHdfs(){
		String res = "当前数据不存在";
		List<DBObject> list = new ArrayList<DBObject>();
		List<DBObject> html = babySingleService.findMongodb("HTML","html_collection","HTML_PC_CEPING_QUESTION");
		List<DBObject> sdk = babySingleService.findMongodb("SDK", "sdk_collection","SDK_CEPING_QUESTION");
		List<DBObject> pc = babySingleService.findMongodb("PC", "pc_collection","PC_CEPING_QUESTION");
		//查询baby信息
		List<DBObject> baby = abilityStatistiService.findMongodbBaby("php_baby");
		//查询班级信息
		List<DBObject> grade = abilityStatistiService.findMongodbBaby("php_class");
		//合并list
		list.addAll(html);
		list.addAll(sdk);
		list.addAll(pc);
		if(baby.size()>0&&grade.size()>0&&list.size()>0){
			//遍历php_class表key:classId,value:DBObject
			Map<String,DBObject> mapClass = new HashMap<String,DBObject>();
			for(DBObject dbo:grade){
				mapClass.put("class_"+dbo.get("id"), dbo);
			}
			Map<String,DBObject> mapGa = new HashMap<String,DBObject>();
			//遍历php_baby表key:babyId,value:DBObject
			Map<String,DBObject> mapBaby = new HashMap<String,DBObject>();
			for(DBObject dbo:baby){
				String classId = dbo.get("class_id").toString();
				mapBaby.put("baby_"+dbo.get("baby_id"), dbo);
				mapGa.put("baby_"+dbo.get("baby_id"), mapClass.get("class_"+classId));
			}
			String path = PropertiesUtil.getProperties("filePath");
			File file1 = new File(path);
			//创建目录
			if(!file1.exists()){
				file1.mkdirs();
			}
			File file2 = new File(path+"baby_"+DateUtils.getDateFormat("yyyyMMdd")+".txt");
			//创建文件
			if(!file2.exists()){
				try {
					file2.createNewFile();
				} catch (IOException e) {
					logger.info("创建文件出现异常："+e.getMessage());
				}
			}
			//写入数据
			RandomAccessFile f = null;
			try {
				f = new RandomAccessFile(file2, "rw");
				for(DBObject d:list){
					DBObject sch = mapBaby.get("baby_"+d.get("baby_id"));
					DBObject cl = mapGa.get("baby_"+d.get("baby_id"));
					if(sch!=null&&cl!=null){
						d.put("garden_id",sch.get("school_id"));
						d.put("garden_name", sch.get("school_name"));
						d.put("teacher_id", cl.get("id"));
						d.put("teacher_name", cl.get("teacher_name"));
						String str = d.toString()+"\r\n";
						f.write(str.getBytes("UTF-8"));
					}
				}
			} catch (Exception e) {
				logger.info("写入文件流出现异常："+e.getMessage());
			}finally{
				try {
					f.close();
				} catch (IOException e) {
					logger.info("关闭文件流出现异常："+e.getMessage());
				}
			}
			//开始上传数据到hdfs
			try{
				Configuration conf = HadoopInit.getConfig();
				String filePath = path+"baby_"+DateUtils.getDateFormat("yyyyMMdd")+".txt";
				String hdfsPath = PropertiesUtil.getProperties("dataHdfsInputPath");				
				res = HDFSUtil.copyTxtToHdfs(conf, filePath, hdfsPath,"baby_"+DateUtils.getDateFormat("yyyyMMdd"));
				//上传成功后，删除文件
				if(res!=null){
					file2.delete();
					//统计
					babyClear();
				}
			}catch(Exception e){
				logger.info("上传宝宝单题数据出现异常："+e.getMessage());
			}
		}
		return res;
	}
}










