package com.mall.manager.controller;

import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.List;
import java.util.Map;

import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;

import net.sf.json.JSONObject;

import org.apache.hadoop.conf.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.util.WebUtils;

import com.mall.hadoop.format.AlphabetOutputFormat;
import com.mall.hadoop.init.HadoopInit;
import com.mall.hadoop.job.comjob.CommonJob;
import com.mall.hadoop.job.grouptopic.GrouptopicMap;
import com.mall.hadoop.job.grouptopic.GrouptopicReduce;
import com.mall.hadoop.utils.HDFSUtil;
import com.mall.manager.service.GroupTopicService;
import com.mall.untils.DateUtils;
import com.mall.untils.Page;
import com.mall.untils.PropertiesUtil;


/**
 * 整套题测评统计表控制层
 * @ClassName: GroupTopicController
 * @Description: 
 * @author:wangwenyue wangwenyue@ide365.com
 * @date 2016年7月12日 下午2:28:45
 */
@RequestMapping("/manager")
@Controller
public class GroupTopicController {

	Logger logger=LoggerFactory.getLogger(DiffPeopleController.class);

	@Resource
	private GroupTopicService groupTopicService;
	
	
	@RequestMapping("/groupTopicData")
	@ResponseBody
	public String addData(){
		List<String> data = groupTopicService.addData();

		
		String fileName = "groupTopic_"+DateUtils.getDateFormat("yyyyMMdd");
		String path = PropertiesUtil.getProperties("filePath");//本地路径
		File file1 = new File(path);
		//创建目录
		if(!file1.exists()){
			file1.mkdirs();
		}
		File file2 = new File(path+fileName+".txt");
		//创建文件
		if(!file2.exists()){
			try {
				file2.createNewFile();
			} catch (IOException e) {
				e.printStackTrace();
				logger.info(e.toString());
			}
		}
		//写入数据
		RandomAccessFile f = null;
		try {
			f = new RandomAccessFile(file2, "rw");
			for (String res1 : data) {
				res1 +="\r\n";
				f.write(res1.getBytes("UTF-8"));
			}
		} catch (Exception e) {
			e.printStackTrace();
			logger.info(e.toString());
		}finally{
			try {
				f.close();
			} catch (IOException e) {
				e.printStackTrace();
				logger.info(e.toString());
			}
		}
		//开始上传数据到hdfs
		String res = null;
		try{
			Configuration conf = HadoopInit.getConfig();
			String filePath = path+fileName+".txt";
			String hdfsPath = PropertiesUtil.getProperties("groupTopicInputPath");
			if(new File(filePath).exists()){				
				res = HDFSUtil.copyTxtToHdfs(conf, filePath, hdfsPath,fileName);
				file2.delete();
			}else{
				res = "file not exists";
			}
		}catch(Exception e){
			e.printStackTrace();
			logger.info(e.toString());
		}
		return res;
	}
	
	

	/**
	 * 整套题测评统计信息  分页，信息来源，园所id，babyid
	 * @param request
	 * @return
	 */
	@RequestMapping("/topic_queryPage")
	@ResponseBody
	public String queryPage(HttpServletRequest request) {
		Map<String, Object> paraMap = WebUtils.getParametersStartingWith(request, "");
		Page page = groupTopicService.queryPage(paraMap);
		JSONObject fromObject = JSONObject.fromObject(page);
		return String.valueOf(fromObject);
	}
	
	
	@RequestMapping("/quu")
	@ResponseBody
	public String quu(){
		Configuration conf = HadoopInit.getConfig();
		String jobName = "dataClear22";
		String input = PropertiesUtil.getProperties("hdfsInputPath")+DateUtils.getDateFormat("yyyyMMdd")+".txt";
		String output = PropertiesUtil.getProperties("hdfsOutputPath");
		CommonJob.commonJobStart(conf, jobName, input, output, GrouptopicMap.class, GrouptopicReduce.class, AlphabetOutputFormat.class);
		return "";
	}
	
	

}
