package com.mall.manager.controller;

import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.ArrayList;
import java.util.List;

import javax.annotation.Resource;

import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.Logger;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;

import com.mall.hadoop.format.AlphabetOutputFormat;
import com.mall.hadoop.init.HadoopInit;
import com.mall.hadoop.job.comjob.CommonJob;
import com.mall.hadoop.job.diffpeople.DiffPeopleMap;
import com.mall.hadoop.job.diffpeople.DiffPeopleReduce;
import com.mall.hadoop.utils.HDFSUtil;
import com.mall.manager.service.DiffPeopleService;
import com.mall.untils.DateUtils;
import com.mall.untils.LoggerUtil;
import com.mall.untils.PropertiesUtil;


/**
 * 全国人数分类统计信息控制层
 * @ClassName: DiffPeopleController
 * @Description: 
 * @author:wangwenyue wangwenyue@ide365.com
 * @date 2016年7月12日 下午2:17:59
 */
@RequestMapping("/manager")
@Controller
public class DiffPeopleController {
	
	Logger logger = LoggerUtil.getLogger(DiffPeopleController.class);
	@Resource
	private DiffPeopleService diffPeopleService;
	
	/**
	 * 地图统计保存rds
	 * @return
	 */
	@RequestMapping("/diffPeoClear")
	@ResponseBody
	public String diffPeoClear(){
		String fileName = "diffPeo_"+DateUtils.getDateFormat("yyyyMMdd"); //文件系统上传的文件名字
		Configuration conf = HadoopInit.getConfig();
		String jobName = "diffPeo";
		String input = PropertiesUtil.getProperties("diffPeoInputPath")+fileName+".txt";
		String output = PropertiesUtil.getProperties("diffPeoOutputPath");
		return CommonJob.commonJobStart(conf, jobName, input, output, DiffPeopleMap.class, DiffPeopleReduce.class, AlphabetOutputFormat.class);
	}
	
	/**
	 * 查询monogdb数据并上传到hdfs
	 * @return
	 */  
	@RequestMapping("/findDiffPeo")
	@ResponseBody
	public String findDiffPeo(){
		List<String> stingList = new ArrayList<String>();
		List<String> findDiffPeo = diffPeopleService.findDiffPeo();  //园所
		List<String> findAllBaby = diffPeopleService.findAllBaby();//学生
		List<String> findAllClass = diffPeopleService.findAllClass();//班级
		stingList.addAll(findDiffPeo);
		stingList.addAll(findAllClass);
		stingList.addAll(findAllBaby);
		
		String fileName = "diffPeo_"+DateUtils.getDateFormat("yyyyMMdd");
		String path = PropertiesUtil.getProperties("filePath");//本地路径
		File file1 = new File(path);
		//创建目录
		if(!file1.exists()){
			file1.mkdirs();
		}
		File file2 = new File(path+fileName+".txt");
		//创建文件
		if(!file2.exists()){
			try {
				file2.createNewFile();
			} catch (IOException e) {
				e.printStackTrace();
				logger.info(e.toString());
			}
		}
		//写入数据
		RandomAccessFile f = null;
		try {
			f = new RandomAccessFile(file2, "rw");
			for (String res1 : stingList) {
				res1 +="\r\n";
				f.write(res1.getBytes("UTF-8"));
			}
		} catch (Exception e) {
			e.printStackTrace();
			logger.info(e.toString());
		}finally{
			try {
				f.close();
			} catch (IOException e) {
				e.printStackTrace();
				logger.info(e.toString());
			}
		}
		//开始上传数据到hdfs
		String res = null;
		try{
			Configuration conf = HadoopInit.getConfig();
			String filePath = path+fileName+".txt";
			String hdfsPath = PropertiesUtil.getProperties("diffPeoInputPath");
			if(new File(filePath).exists()){				
				res = HDFSUtil.copyTxtToHdfs(conf, filePath, hdfsPath,fileName);
				file2.delete();
			}else{
				res = "file not exists";
			}
		}catch(Exception e){
			e.printStackTrace();
			logger.info(e.toString());
		}
		return res;
	}
	
}
