package com.mall.manager.controller;

import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.List;

import javax.annotation.Resource;

import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.Logger;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;

import com.mall.hadoop.format.AlphabetOutputFormat;
import com.mall.hadoop.init.HadoopInit;
import com.mall.hadoop.job.comjob.CommonJob;
import com.mall.hadoop.job.sellgarden.SellGardenMap;
import com.mall.hadoop.job.sellgarden.SellGardenReduce;
import com.mall.hadoop.job.sellstatic.SellStaticMap;
import com.mall.hadoop.job.sellstatic.SellStaticReduce;
import com.mall.hadoop.utils.HDFSUtil;
import com.mall.manager.service.SellStatisticService;
import com.mall.untils.DateUtils;
import com.mall.untils.PropertiesUtil;
import com.mongodb.DBObject;

/**
 * 按地区统计全国园所
 * @author tgy
 *
 */
@RequestMapping("/manager")
@Controller
public class SellStatistiController {

	Logger logger = Logger.getLogger(SellStatistiController.class);
	
	@Resource
	SellStatisticService sellStatisticService;
	
	/**
	 * 查询monogdb园所数据并上传到hdfs
	 * @return
	 */
	@RequestMapping("/findGardenInfo")
	@ResponseBody
	public String sellUploadHdfs(){
		String res = "文件不存在";
		String fileName = "school_"+DateUtils.getDateFormat("yyyyMMdd");
		List<DBObject> school = sellStatisticService.findMongodb("php_school");
		if(school!=null&&school.size()>0){
			String path = PropertiesUtil.getProperties("filePath");
			File file1 = new File(path);
			//创建目录
			if(!file1.exists()){
				file1.mkdirs();
			}
			File file2 = new File(path+fileName+".txt");
			//创建文件
			if(!file2.exists()){
				try {
					file2.createNewFile();
				} catch (IOException e) {
					logger.info("创建文件："+fileName+".txt出现异常："+e.getMessage());
				}
			}
			//写入数据
			RandomAccessFile f = null;
			try {
				f = new RandomAccessFile(file2, "rw");
				for(DBObject d:school){
					String str = d.toString()+"\r\n";
					f.write(str.getBytes("UTF-8"));
				}
			} catch (Exception e) {
				logger.info("写入数据到文件："+fileName+".txt出现异常："+e.getMessage());
			}finally{
				try {
					f.close();
				} catch (IOException e) {
					logger.info("关闭文件流出现异常："+e.getMessage());
				}
			}
			//开始上传数据到hdfs
			try{
				Configuration conf = HadoopInit.getConfig();
				String filePath = path+fileName+".txt";
				if(new File(filePath).exists()){				
					String hdfsPath = PropertiesUtil.getProperties("dataHdfsInputPath");
					res = HDFSUtil.copyTxtToHdfs(conf, filePath, hdfsPath,fileName);
					//上传成功，删除文件
					if(res!=null){
						file2.delete();
						sellStaticClear();
					}
				}else{
					logger.info("文件"+fileName+".txt"+"不存在");
				}
			}catch(Exception e){
				logger.info("上传文件："+fileName+".txt到hdfs出现异常："+e.getMessage());
			}
			res = "文件上传成功";
		}
		return res;
	}
	
	/**
	 * 按地区统计园所数量
	 * @return
	 */
	@RequestMapping("/sellClear")
	@ResponseBody
	public String sellStaticClear(){
		String res = "文件不存在";
		try{
			String fileName = "school_"+DateUtils.getDateFormat("yyyyMMdd")+".txt";
			Configuration conf = HadoopInit.getConfig();
			String jobName = "sellStatic";
			String input = PropertiesUtil.getProperties("dataHdfsInputPath")+fileName;
			String output = PropertiesUtil.getProperties("sellHdfsOutputPath");
			res = CommonJob.commonJobStart(conf, jobName, input, output, SellStaticMap.class, SellStaticReduce.class, AlphabetOutputFormat.class);
			//分析成功后，插入到rds数据库
			if(res.equals("success")){
				res = insertRds(jobName,output+fileName);
			}
		}catch(Exception e){
			logger.info("按地区统计园所数量出现异常："+e.getMessage());
		}
	    return res;
	}
	
	/**
	 * 插入数据库
	 * @param jobName
	 * @param fileName
	 * @return
	 */
	@RequestMapping("/insertRdsSell")
	@ResponseBody
	public String insertRds(String jobName,String fileName){
		String res = "文件不存在";
		try{
			Configuration conf = HadoopInit.getConfig();
			res = CommonJob.commonJobStart(conf, jobName, fileName, PropertiesUtil.getProperties("hdfsPath"), SellGardenMap.class, SellGardenReduce.class, AlphabetOutputFormat.class);
		}catch(Exception e){
			logger.info("统计园所数量插入数据库出现异常："+e.getMessage());
		}
		return res;
	}
}















