package com.surfilter.massdata.spark.util;

import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Map;

import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.spark.sql.DataFrame;

import com.act.sparkanalyz.input.ISparkInput;
import com.act.sparkanalyz.service.impl.SparkService.ServiceContext;
import com.surfilter.massdata.spark.input.HiveSparkInput;

public class GetTime implements ISparkInput{

	private static final Log log = LogFactory.getLog(GetTime.class);
	@Override
	public Map<String, DataFrame> read(ServiceContext serviceContext,
			Map<String, String> commandMap) throws Exception {
		// TODO Auto-generated method stub
		return null;
	}
	
	
	public static Date getPartitionTime(Map<String, String> commandMap){
		Date date = new Date();
		String dayStr = commandMap.get("-d");// 得到value
		if (StringUtils.isNotBlank(dayStr)) {
			SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHH");
			try {
				date = sdf.parse(dayStr);
			} catch (ParseException e) {
				if (log.isErrorEnabled()) {
					log.error(e.getMessage(), e);
				}
			}
		}
		
//		String ds = DateUtil.getCurrentymd(date);
//		int hour = DateUtil.getCurrentHour(date);
		return date;
	}

}
