package com.mall.manager.controller;

import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.Logger;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;

import com.mall.hadoop.format.AlphabetOutputFormat;
import com.mall.hadoop.init.HadoopInit;
import com.mall.hadoop.job.comjob.CommonJob;
import com.mall.hadoop.job.dataclear.DataSourceMap;
import com.mall.hadoop.job.dataclear.DataSourceReduce;
import com.mall.untils.DateUtils;
import com.mall.untils.PropertiesUtil;

/**
 * 源数据清洗，保存mongodb
 * @author tgy
 *
 */
@RequestMapping("/manager")
@Controller
public class DataClearController {
	
	Logger logger = Logger.getLogger(DataClearController.class);
	
	/**
	 * 数据清洗入口
	 * @return
	 */
	@RequestMapping("/dataClear")
	@ResponseBody
	public String dataSourceClear(){
		String res = "文件不存在";
		try{
			Configuration conf = HadoopInit.getConfig();
			String jobName = "dataClear";
			String input = PropertiesUtil.getProperties("hdfsInputPath")+DateUtils.getDateFormat("yyyyMMdd")+".txt";
			String output = PropertiesUtil.getProperties("hdfsOutputPath");
			res = CommonJob.commonJobStart(conf, jobName, input, output, DataSourceMap.class, DataSourceReduce.class, AlphabetOutputFormat.class);
		}catch(Exception e){
			logger.info("数据清洗出现异常："+e.getMessage());
		}
		return res;
	}
}




