package com.mall.manager.controller;

import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Resource;
import org.apache.hadoop.conf.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import com.mall.hadoop.format.AlphabetOutputFormat;
import com.mall.hadoop.init.HadoopInit;
import com.mall.hadoop.job.comjob.CommonJob;
import com.mall.hadoop.job.tfirstgardenstatistics.FirstGardenStatisticsMap;
import com.mall.hadoop.job.tfirstgardenstatistics.FirstGardenStatisticsReduce;
import com.mall.hadoop.job.tfirstgardenstatistics.FirstLoginTimeMap;
import com.mall.hadoop.job.tfirstgardenstatistics.FirstLoginTimeReduce;
import com.mall.hadoop.utils.HDFSUtil;
import com.mall.manager.service.FirstGardenStatisticsService;
import com.mall.untils.DateUtils;
import com.mall.untils.PropertiesUtil;
/**
 * @全国园所首次登录统计
 * @author liugb
 * @Date 2016 7 13
 */
@Controller
@RequestMapping("/manager")
public class FirstGardenStatisticsController {
	Logger logger = LoggerFactory.getLogger(FirstGardenStatisticsController.class);
	@Resource
	FirstGardenStatisticsService firstGardenStatisticsService;
	/**
	 * 全国园所首次登录统计
	 * @return
	 */
	@RequestMapping("/firstGardenStatisticsClear")
	@ResponseBody
	public String firstGardenStatisticsClear(){
		Configuration conf = HadoopInit.getConfig();
		String jobName = "firstGardenStatistics";
		String input = PropertiesUtil.getProperties("firstGardenStatisticsHdfsInputPath")+"firstGardenStatistics_"+DateUtils.getDateFormat("yyyyMMdd")+".txt";
		String output = PropertiesUtil.getProperties("firstGardenStatisticsHdfsOutputPath");		
		String res = CommonJob.commonJobStart(conf, jobName, input, output,  FirstLoginTimeMap.class, FirstLoginTimeReduce.class, AlphabetOutputFormat.class);
		try {
			if(res.equals("success")){
				res = CommonJob.commonJobStart(conf, jobName, output+"/info"+".txt", PropertiesUtil.getProperties("hdfsPath"),FirstGardenStatisticsMap.class, FirstGardenStatisticsReduce.class, AlphabetOutputFormat.class);
			}			
		} catch (Exception e) {
			// TODO: handle exception
			e.printStackTrace();
		}
		return res;
	}
	
	/**
	 * 查询monogdb数据并上传到hdfs
	 * @return
	 */
	@RequestMapping("/findfirstGardenStatistics")
	@ResponseBody
	public String findfirstGardenStatistics(){
		List<String> stingList = new ArrayList<String>();
		List<String> findFirstGarden = firstGardenStatisticsService.findFirstGardenStatistics();   //查询首次登陆信息
		stingList.addAll(findFirstGarden);
		
		String fileName = "firstGardenStatistics_"+DateUtils.getDateFormat("yyyyMMdd");
		String path = PropertiesUtil.getProperties("filePath");//本地路径
		File file1 = new File(path);
		//创建目录
		if(!file1.exists()){
			file1.mkdirs();
		}
		File file2 = new File(path+fileName+".txt");
		//创建文件
		if(!file2.exists()){
			try {
				file2.createNewFile();
			} catch (IOException e) {
				e.printStackTrace();
				logger.info(e.toString());
			}
		}
		//写入数据
		RandomAccessFile f = null;
		try {
			f = new RandomAccessFile(file2, "rw");
			for (String res1 : stingList) {
				res1 +="\r\n";
				f.write(res1.getBytes("UTF-8"));
			}
		} catch (Exception e) {
			e.printStackTrace();
			logger.info(e.toString());
		}finally{
			try {
				f.close();
			} catch (IOException e) {
				e.printStackTrace();
				logger.info(e.toString());
			}
		}
		//开始上传数据到hdfs
		String res = null;
		try{
			Configuration conf = HadoopInit.getConfig();
			String filePath = path+fileName+".txt";
			String hdfsPath = PropertiesUtil.getProperties("firstGardenStatisticsHdfsInputPath");
			if(new File(filePath).exists()){				
				res = HDFSUtil.copyTxtToHdfs(conf, filePath, hdfsPath,fileName);
				file2.delete();
			}else{
				res = "file not exists";
			}
		}catch(Exception e){
			e.printStackTrace();
			logger.info(e.toString());
		}
		return res;
		
	}	

}
