package com.lvmama.rhino.job.comment;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.mortbay.log.Log;
import org.quartz.JobExecutionContext;
import org.springframework.beans.factory.annotation.Autowired;

import com.lvmama.rhino.job.BaseQuartzJobBean;
import com.lvmama.rhino.pojo.comment.entity.CommentDetail;
import com.lvmama.rhino.service.comment.SaveDataService;

/**
 * @author shenlinli
 * @ClassName: SaveViewpointJob
 * @Description: TODO
 * @date 2017年9月4日 下午6:19:19
 */

public class SaveHdfsByDayJob extends BaseQuartzJobBean  {

	@Autowired
	private SaveDataService saveDataService;

/*	@Value("${beginDate}")
	private String beginDate="2017-09-01 00:00:00";
	
	@Value("${endDate}")
	private String endDate="2017-10-31 00:00:00";*/
	
	//private final static Executor executor = Executors.newCachedThreadPool();// 启用多线程
	//@Scheduled(cron = "#{commentJobProperty[scheduledjobtimeHdfstime]}")
	public void invoke(JobExecutionContext context) {
		/*Date yesterday = DateUtil.getDateBeforeToday(new Date());
		Date date1 = new SimpleDateFormat("yyyyMMdd").parse(beginDate);
		Date date2 = new SimpleDateFormat("yyyyMMdd").parse(endDate);
		int num = DateUtil.getTwoDayInterval(date2, date1, true);*/
		String today = new SimpleDateFormat("yyyyMMdd").format(new Date());
		Log.info("hdfs文件夹日期："+today);
		//for (int i = 0; i < num + 1; i++) {
			//final int j = i;
			//int date = Integer.parseInt(DateUtil.getStrDate(j, date1));
		Log.info("开始获取hdfs文件夹日期为："+today);
		/*/user/hive/warehouse/review.db/comment_review_detail/dt_id=*/
		String uri = "/user/hive/warehouse/dm.db/comment_review_detail/par_day="
				+ today;
		Configuration conf = new Configuration();
		try {
			FileSystem hdfs = FileSystem.get(URI.create(uri), conf);
			Path path = new Path(uri);
			if(hdfs.isDirectory(path)){
				FileStatus[] fs = hdfs.listStatus(path);
				if (fs.length > 0) { 
					final Path[] paths = FileUtil.stat2Paths(fs);
					/*executor.execute(new Runnable() {
						@Override
						public void run() {*/
					try {
						Log.info("开始处理文件："+today);
						saveData(paths[0].toString());
					} catch (Exception e) {
						e.printStackTrace();
					}
					/*	}
					});*/
				}else{
					Log.info(today + "中不存在子文件夹");
				}
			}else{
				Log.info("文件夹为不存在："+today);
			}
		} catch (Exception e) {
			e.printStackTrace();
		}
	}

	/**
	 * @Title: saveData
	 * @Description: TODO(这里用一句话描述这个方法的作用)
	 * @param @param path 设定文件
	 * @return void 返回类型
	 * @throws
	 */
	public void saveData(String path) {
		List<CommentDetail> commentList = new ArrayList<CommentDetail>();
		List<CommentDetail> updateList = new ArrayList<CommentDetail>();
		try {
			Log.info("io开始---》读取文件夹路径："+path);
			commentList = getDataFromHdfs(path).get("commentList");
			updateList = getDataFromHdfs(path).get("updateList");
		} catch (Exception e1) {
			e1.printStackTrace();
		}
		/*synchronized (SaveDataService.class) {*/
			if (commentList.size() > 0) {
				Log.info("insert---------------------->>>>>>>>>>>>>>");
				saveDataService.insertCommentList(commentList);
			}
			if (updateList.size() > 0) {
				Log.info("update---------------------->>>>>>>>>>>>>>");
				saveDataService.updateCommentList(updateList);
			}
			Log.info("io结束----》读取文件夹路径："+path);
		/*}*/
	}

	/**
	 * @Title: getFs
	 * @Description: TODO(这里用一句话描述这个方法的作用)
	 * @param @return
	 * @param @throws Exception 设定文件
	 * @return FileSystem 返回类型
	 * @throws
	 */
	public FileSystem getFs() throws Exception {
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(conf);
		return fs;
	}

	/**
	 * @Title: getDataFromHdfs
	 * @Description: 从hdfs上读取数据
	 * @param @param path
	 * @param @return
	 * @param @throws Exception 设定文件
	 * @return List<CommentDetail> 返回类型
	 * @throws
	 */
	public Map<String, List<CommentDetail>> getDataFromHdfs(String path)
			throws Exception {
		BufferedReader bufferedReader = null;
		String lineTxt = null;
		List<CommentDetail> commentList = new ArrayList<CommentDetail>();
		List<CommentDetail> updateList = new ArrayList<CommentDetail>();
		Map<String, List<CommentDetail>> map = new HashMap<String, List<CommentDetail>>();
		try {
			FileSystem fs = this.getFs();
			Path readPath = new Path(path);
			FSDataInputStream in = fs.open(readPath);
			bufferedReader = new BufferedReader(new InputStreamReader(in));
			while ((lineTxt = bufferedReader.readLine()) != null) {
				char separatorChar = (char) Integer.parseInt("001"); // 分隔符CHAR
				String separator = String.valueOf(separatorChar);
				String[] str = lineTxt.split(separator);
				CommentDetail comment = new CommentDetail();
				comment.setCommentId(Long.parseLong(("\\N".equals(str[5])) ? "0"
						: str[5]));
				//根据commentId判断是否为点赞更新数据
				if (saveDataService.queryByCommentId(comment).size() > 0) {
					Log.info("update-------------------->>>>>>>>>>>>>>>>>>>>>>>>>>点赞更新数据");
					comment.setZanNum(Math.round(Double.parseDouble(str[7])));
					updateList.add(comment);
				} else {
					Log.info("update-------------------->>>>>>>>>>>>>>>>>>>>>>>>>>新评论数据");
					comment = arrayToObject(str);
					commentList.add(comment);
				}
			}
			map.put("commentList", commentList);
			map.put("updateList", updateList);
			Log.info("getDataFromHdfs：获取评论数量" + commentList.size() +";更新条数"+updateList.size());
		} catch (Exception e) {
			e.printStackTrace();
		} finally {
			if (bufferedReader != null) {
				try {
					bufferedReader.close();
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
		}
		return map;
	}

	public static CommentDetail arrayToObject(String[] dataStr) {
		CommentDetail comment = new CommentDetail();
		long productId = Long.parseLong(("\\N".equals(dataStr[0].trim())) ? "0"
				: dataStr[0].trim());
		String productName = "";
		if (StringUtils.isNotEmpty(dataStr[1].trim())
				&& (!"null".equals(dataStr[1].trim()))) {
			productName = dataStr[1].trim();
		}
		long orderId = Long.parseLong(("\\N".equals(dataStr[2].trim())) ? "0"
				: dataStr[2].trim());

		if (!(productId == 0 && orderId == 0 && StringUtils
				.isEmpty(productName))) {
			comment.setProductId(productId);
			comment.setProductName(productName);
			comment.setOrderId(orderId);
			comment.setCategoryId(("\\N".equals(dataStr[3].trim())) ? "0"
					: dataStr[3].trim());
			comment.setCommentId(Long.parseLong(("\\N".equals(dataStr[5].trim())) ? "0"
					: dataStr[5].trim()));
			comment.setContent(("\\N".equals(dataStr[6].trim())) ? ""
					: dataStr[6].trim());
			comment.setZanNum(Math.round(Double.parseDouble(dataStr[7])));
			try {
				if (!"\\N".equals(dataStr[8].trim())) {
					comment.setOperTime(new SimpleDateFormat("yyyy-MM-dd")
							.parse(dataStr[8].trim()));
				}
				if (!"\\N".equals(dataStr[4].trim())) {
					comment.setCreateDate(new SimpleDateFormat(
							"yyyy-MM-dd HH:mm:ss").parse(dataStr[4].trim()));
				}
			} catch (ParseException e) {
				e.printStackTrace();
			}
			return comment;
		}
		return null;
	}
}
