package com.mall.manager.controller;

import java.io.File;

import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;

import net.sf.json.JSONObject;

import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.Logger;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;

import com.mall.hadoop.init.HadoopInit;
import com.mall.hadoop.utils.HDFSUtil;
import com.mall.untils.DateUtils;
import com.mall.untils.LoggerUtil;
import com.mall.untils.PropertiesUtil;

/**
 * 采集器类
 * @author tgy
 *
 */
@Controller
@RequestMapping("/manager")
public class GatherController {
	
	/**
	 * 数据采集器，收集所有数据
	 * @param request
	 * @param response
	 * @return
	 */
	@ResponseBody
	@RequestMapping("/getData")
	public String getDataForGather(@RequestBody String res ,HttpServletRequest request,HttpServletResponse response) {
		Logger logger=LoggerUtil.getLogger(GatherController.class);
		try{
			JSONObject fromObject = JSONObject.fromObject(res);
			logger.info(fromObject.toString());
		}catch(Exception e){
			return "error";
		}
		return "success";
	}
	
	/**
	 * 上传文件到hdfs系统
	 * @return
	 */
	@RequestMapping("/uploadHdfs")
	@ResponseBody
	public String uploadHDFS(){
		String res = "文件不存在";
		try{
			Configuration conf = HadoopInit.getConfig();
			String filePath = PropertiesUtil.getProperties("fileLocalPath")+DateUtils.getNowBefor()+".log";
			File file = new File(filePath);
			if(file.exists()){				
				String hdfsPath = PropertiesUtil.getProperties("hdfsInputPath");
				res = HDFSUtil.copyTxtToHdfs(conf, filePath, hdfsPath,DateUtils.getDateFormat("yyyyMMdd"));
				//上传完成，删除日志
				//if(res!=null){
				//	file.delete();
				//}
			}
		}catch(Exception e){
			e.printStackTrace();
		}
		return res;
	}
}










