package org.study.bjsxt.spark.util;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.hive.HiveContext;
import org.study.bjsxt.spark.conf.ConfigurationManager;
import org.study.bjsxt.spark.constant.Constants;
import org.study.spark.spark.test.MockData;

import com.alibaba.fastjson.JSONObject;

/**
 * Spark工具类
 * 
 * @author Administrator
 *
 */
public class SparkUtils {

	/**
	 * 根据当前是否本地测试的配置 决定，如何设置SparkConf的master
	 */
	public static void setMaster(SparkConf conf) {
		boolean local = ConfigurationManager.getBoolean(Constants.SPARK_LOCAL);
		// if(local) {
		// conf.setMaster("local[3]");
		// }
	}

	/**
	 * 获取SQLContext 如果spark.local设置为true，那么就创建SQLContext；否则，创建HiveContext
	 * 
	 * @param sc
	 * @return
	 */
	public static SQLContext getSQLContext(JavaSparkContext sc) {
		boolean local = ConfigurationManager.getBoolean(Constants.SPARK_LOCAL);
		if (local) {
			return new HiveContext(sc);
		} else {
			return new HiveContext(sc);
		}
	}

	/**
	 * 生成模拟数据 如果spark.local配置设置为true，则生成模拟数据；否则不生成
	 * 
	 * @param sc
	 * @param sqlContext
	 */
	public static void mockData(JavaSparkContext sc, SQLContext sqlContext) {
		boolean local = ConfigurationManager.getBoolean(Constants.SPARK_LOCAL);
		/**
		 * 如何local为true 说明在本地测试 应该生产模拟数据 RDD-》DataFrame-->注册成临时表0 false
		 * HiveContext 直接可以操作hive表
		 */
		if (local) {
			MockData.mock(sc, sqlContext);
		}
	}

	/**
	 * 获取指定日期范围内的卡口信息
	 * 
	 * @param sqlContext
	 * @param taskParamsJsonObject
	 */
	public static JavaRDD<Row> getCameraRDDByDateRange(SQLContext sqlContext, JSONObject taskParamsJsonObject) {
		// 从taskParamsJsonObject对象中获取Constants.PARAM_START_DATE对应的值
		String startDate = ParamUtils.getParam(taskParamsJsonObject, Constants.PARAM_START_DATE);
		String endDate = ParamUtils.getParam(taskParamsJsonObject, Constants.PARAM_END_DATE);

		String sql = "SELECT * " + "FROM monitor_flow_action " + "WHERE date>='" + startDate + "' " + "AND date<='"
				+ endDate + "'";

		Dataset<Row> monitorDF = sqlContext.sql(sql);

		/**
		 * repartition可以提高stage的并行度
		 */
		// return actionDF.javaRDD().repartition(1000);
		return monitorDF.javaRDD();
	}

	public static JavaRDD<Row> getCameraRDDByDateRangeAndCars(SQLContext sqlContext, JSONObject taskParamsJsonObject) {
		String startDate = ParamUtils.getParam(taskParamsJsonObject, Constants.PARAM_START_DATE);
		String endDate = ParamUtils.getParam(taskParamsJsonObject, Constants.PARAM_END_DATE);
		String cars = ParamUtils.getParam(taskParamsJsonObject, Constants.FIELD_CARS);
		String[] carArr = cars.split(",");
		String sql = "SELECT * " + "FROM monitor_flow_action " + "WHERE date>='" + startDate + "' " + "AND date<='"
				+ endDate + "' " + "AND car IN (";

		for (int i = 0; i < carArr.length; i++) {
			sql += "'" + carArr[i] + "'";
			if (i < carArr.length - 1) {
				sql += ",";
			}
		}
		sql += ")";

		System.out.println("sql:" + sql);
		Dataset<Row> monitorDF = sqlContext.sql(sql);

		/**
		 * repartition可以提高stage的并行度
		 */
		// return actionDF.javaRDD().repartition(1000);

		return monitorDF.javaRDD();
	}

}
