package cn.item.buckle;

import cn.item.buckle.confg.ConfigEnum;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * 加载数据到hive,
 * 打包运行：./spark-submit --master spark://node00:7077 --class cn.item.buckle.Data2Hive ../jars/hadoop-test-1.0-SNAPSHOT.jar 10
 */
public class Data2Hive {

	private static final Logger log = LoggerFactory.getLogger(Data2Hive.class);


	public static void main(String[] args) {
		SparkConf conf = new SparkConf();
		//yarn-cluster
		conf.setMaster(ConfigEnum.SPARK_STANDALONE_ADDR.getValue())
		// conf.setMaster("local")
				.setAppName("save_data_to_hive");
		SparkSession sparkSession = SparkSession.builder()
				.config(conf)
				.config("hive.metastore.uris", ConfigEnum.HIVE_METASTORE_ADDR.getValue())
				//关闭权限校验
				.config(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED.toString(), false)
				.config("spark.sql.warehouse.dir", ConfigEnum.HIVE_WAREHOUSE_DIR.getValue())
				// .config(HiveConf.ConfVars.METASTOREWAREHOUSE.toString(), "hdfs:/user/hive_remote/warehouse")
				.enableHiveSupport().getOrCreate();
		sparkSession.sql(" create database if not exists " + ConfigEnum.HIVE_DATABASE.getValue());
		sparkSession.sql("use " + ConfigEnum.HIVE_DATABASE.getValue());
		sparkSession.sql("drop table if exists " + ConfigEnum.MONITOR_FLOW_ACTION.getValue());
		sparkSession.sql("create table if not exists " + ConfigEnum.MONITOR_FLOW_ACTION.getValue() +
				"(day date,\n" +
				"monitor_id string,\n" +
				"camera_id string,\n" +
				"car string,\n" +
				"action_time string,\n" +
				"speed int,\n" +
				"road_id string,\n" +
				"area_id string\n" +
				")\n" +
				"ROW FORMAT DELIMITED \n" +
				"FIELDS TERMINATED BY '\\t'\n" +
				"LINES TERMINATED BY '\\n'");
		String local = "local".equalsIgnoreCase(ConfigEnum.LOAD_DATA_SOURCE.getValue()) ? "local" : "";
		String overwrite = "true".equalsIgnoreCase(ConfigEnum.LOAD_DATA_OVERWRITE.getValue()) ? "overwrite" : "";
		String loadSql = "load data " + local +" inpath '" + ConfigEnum.MONITOR_FLOW_ACTION_FILE.getValue() + "' " + overwrite + " into table " + ConfigEnum.MONITOR_FLOW_ACTION.getValue();
		log.info("load data----" + loadSql);
		sparkSession.sql(loadSql);
		sparkSession.sql("drop table if exists " + ConfigEnum.MONITOR_CAMERA_INFO.getValue());
		sparkSession.sql("create table if not exists " + ConfigEnum.MONITOR_CAMERA_INFO.getValue() + "(\n" +
				"monitor_id string,\n" +
				"camera_id string\n" +
				")\n" +
				"ROW FORMAT DELIMITED \n" +
				"FIELDS TERMINATED BY '\\t'\n" +
				"LINES TERMINATED BY '\\n'");
		String loadSql2 = "load data " + local +" inpath '" + ConfigEnum.MONITOR_CAMERA_INFO_FILE.getValue() + "' " + overwrite + " into table " + ConfigEnum.MONITOR_CAMERA_INFO.getValue();
		log.info("load data----" + loadSql2);
		sparkSession.sql(loadSql2);
		System.out.println("=======data2Hive finish======");
		sparkSession.stop();
	}
}

