package com.linkstec.spark;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.spark.sql.SparkSession;

//import com.linkstec.spark.base.MotConst;

public class SparkSessionTest {
	private static final Logger logger = LogManager.getLogger();
	public static String createSql = "CREATE TABLE IF NOT EXISTS default.src (key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' ";
	public static void main(String[] args) {
		SparkSession spark = SparkSession.builder().appName("Test").master("local").enableHiveSupport()
				.config("spark.sql.warehouse.dir", "hdfs://localhost:9000/user/hive/warehouse")
				.getOrCreate();
	        //"hive.metastore.warehouse.dir=hdfs://ubuntu1:9000/user/hive/warehouse" 
	        
	        //查询表前10条数据
	        spark.sql("use default");
	        //表 删
//	        spark.sql("drop table E_EVENT_FLOW").show();
//	        spark.sql("drop table BANK_TRANS").show();
//	        spark.sql(MotConst.createEventFlowSql);
	        //表 增
//	        spark.sql(MotConst.createBnckTransSql);
	        spark.sql("show databases").show();
	        spark.sql("show tables").show();
	        spark.sql("describe E_EVENT_FLOW").show();
	        //表 改
//	        spark.sql("ALTER TABLE BANK_TRANS ADD COLUMNS (CUST_NO2 STRING COMMENT '客户号2')");
	        //表 查
	        spark.sql("describe BANK_TRANS").show();
	        spark.sql("select * from E_EVENT_FLOW limit 10").show();
	        logger.info("+++++++++++++++++++++++++++++++++++++++++++++++++++++=");
	        //增
//	        spark.sql("insert into BANK_TRANS (TRANS_FLOW_ID,CUST_NO) values(123,'22904882')");
	        spark.sql("insert into BANK_TRANS values(123,'22904882','111','200元','1','20190421','1624')");
	        //删
//	        spark.sql("truncate table BANK_TRANS");
//	        spark.sql("delete from BANK_TRANS where CUST_NO = '22904882'");
	        //改 
//	        spark.sql("update BANK_TRANS set BALANCE = '300' where CUST_NO = '22904882'");
	        //查
	        spark.sql("select * from BANK_TRANS limit 10").show();
	        spark.stop();
	}
}