package com.mall.manager.controller;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Resource;
import org.apache.hadoop.conf.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import com.mall.hadoop.format.AlphabetOutputFormat;
import com.mall.hadoop.init.HadoopInit;
import com.mall.hadoop.job.allpeoplestatistics.AllPeopleMap;
import com.mall.hadoop.job.allpeoplestatistics.AllPeopleReduce;
import com.mall.hadoop.job.allpeoplestatistics.AllpeopleStatisticsMap;
import com.mall.hadoop.job.allpeoplestatistics.AllpeopleStatisticsReduce;
import com.mall.hadoop.job.comjob.CommonJob;
import com.mall.hadoop.utils.HDFSUtil;
import com.mall.manager.service.AllPeopleStatisticsService;
import com.mall.untils.DateUtils;
import com.mall.untils.PropertiesUtil;
/**
 * @全球数量统计
 * @author liugb
 * @date 2016 7 14
 */
@RequestMapping("/manager")
@Controller
public class AllPeopleStatisticsController {
	Logger logger = LoggerFactory.getLogger(AllPeopleStatisticsController.class);
	@Resource
	AllPeopleStatisticsService allPeopleStatisticsService;
	
	/**
	 * 单题按天统计
	 * @return
	 */
	@RequestMapping("/allPeopleClear")
	@ResponseBody
	public String allPeopleClear(){
		Configuration conf = HadoopInit.getConfig();
		String jobName = "allPeople";
		String input = PropertiesUtil.getProperties("allpeopleHdfsInputPath")+"allpeople_"+DateUtils.getDateFormat("yyyyMMdd")+".txt";
		String output = PropertiesUtil.getProperties("allpeopleHdfsOutputPath");
		String res = CommonJob.commonJobStart(conf, jobName, input, output,AllpeopleStatisticsMap.class, AllpeopleStatisticsReduce.class, AlphabetOutputFormat.class);
		if(res.equals("success")){
			res = CommonJob.commonJobStart(conf, jobName, output+"/school_"+DateUtils.getDateFormat("yyyyMMdd")+".txt", PropertiesUtil.getProperties("hdfsPath"), AllPeopleMap.class, AllPeopleReduce.class, AlphabetOutputFormat.class);
		}
		return res;
	}
	
	/**
	 * 查询monogdb数据并上传到hdfs
	 * @return
	 */
	@RequestMapping("/findallpeople")
	@ResponseBody
	public String findallpeople(){		
		List<String> stingList = new ArrayList<String>();
		List<String> findschool = allPeopleStatisticsService.findAllSchool(); 
		List<String> findbody = allPeopleStatisticsService.findAllBaby();
		List<String> findTeacher = allPeopleStatisticsService.findAllClass();
		stingList.addAll(findschool);
		stingList.addAll(findbody);
		stingList.addAll(findTeacher);
		String fileName = "allpeople_"+DateUtils.getDateFormat("yyyyMMdd");
		String path = PropertiesUtil.getProperties("filePath");//本地路径
		File file1 = new File(path);
		//创建目录
		if(!file1.exists()){
			file1.mkdirs();
		}
		File file2 = new File(path+fileName+".txt");
		//创建文件
		if(!file2.exists()){
			try {
				file2.createNewFile();
			} catch (IOException e) {
				e.printStackTrace();
				logger.info(e.toString());
			}
		}
		//写入数据
		RandomAccessFile f = null;
		try {
			f = new RandomAccessFile(file2, "rw");
			for (String res1 : stingList) {
				res1 +="\r\n";
				f.write(res1.getBytes("UTF-8"));
			}
		} catch (Exception e) {
			e.printStackTrace();
			logger.info(e.toString());
		}finally{
			try {
				f.close();
			} catch (IOException e) {
				e.printStackTrace();
				logger.info(e.toString());
			}
		}
		//开始上传数据到hdfs
		String res = null;
		try{
			Configuration conf = HadoopInit.getConfig();
			String filePath = path+fileName+".txt";
			String hdfsPath = PropertiesUtil.getProperties("allpeopleHdfsInputPath");
			if(new File(filePath).exists()){				
				res = HDFSUtil.copyTxtToHdfs(conf, filePath, hdfsPath,fileName);
				file2.delete();
			}else{
				res = "file not exists";
			}
		}catch(Exception e){
			e.printStackTrace();
			logger.info(e.toString());
		}
		return res;
	}
	
}
