package com.inji.spark.biz.listener;

import java.io.Serializable;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.ForeachFunction;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoder;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;

import com.inji.spark.biz.dto.SparkApiLogReq;
import com.inji.spark.biz.pojo.ApiCodeEnum;
import com.inji.spark.biz.pojo.CreditCardDetail;
import com.inji.spark.biz.pojo.CreditReportData;
import com.inji.spark.biz.pojo.TpApiLog;
import com.inji.spark.biz.service.FcBfangService;
import com.inji.spark.biz.util.JsonMapper;

/**
 *初始化spark上下文
 * 
 * @author liukz
 *
 */
public class SparkContextInit implements Serializable {
	
	private static final long serialVersionUID = -3595006255521716695L;

	private static Logger logger = LoggerFactory.getLogger(SparkContextInit.class);
	
	private SparkSession sparkSession;
	
	@Value("${inji.spark.masters}")
	private String masters ;
	@Value("${spark.driver.host}")
	private String sparkHost ;
	
	/*
	 * 初始化Load 创建sparkContext, sqlContext, hiveContext
	 */
	public SparkContextInit() {
		initSparkSession();
	}


	private void initSparkSession() {
//		SparkConf sparkConf = new SparkConf().setAppName("initSparkSession");
//	    sparkConf.setMaster("spark://172.16.201.240:7077");
//	    String [] jars = {"D:\\sparkInit.jar"};
//	    sparkConf.setJars(jars);
//	    sparkConf.setIfMissing("spark.driver.host", "172.16.18.34");
//	    sparkConf.set("spark.executor.memory", "512m");
		logger.info("spark初始化开始!");
		SparkConf sparkConf = new SparkConf().setAppName("initSparkSession");
		sparkConf.setMaster("local[2]");
		sparkSession = SparkSession.builder().config(sparkConf)
				.config("spark.some.config.option", "config-value").getOrCreate();
		logger.info("spark初始化完成!");

	}
	
	
	/**
	 * 接收参数并更新对象
	 * @throws Exception
	 */
	public Object getApiRespContent(SparkApiLogReq req) throws Exception {
		String url = "jdbc:mysql://172.16.201.22:3306/mathcn_lkz?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull";
		String fromTable = "tp_api_log";

		Properties props = new Properties();
		props.put("user", "root");
		props.put("password", "123456");
		props.put("driver", "com.mysql.jdbc.Driver");
		props.put("url",url);
		Encoder<TpApiLog> inoutEncoder = Encoders.bean(TpApiLog.class);
		List<String> selectList = req.getSelectList();
		List<String> toDFList = req.getToDFList();
		if(selectList == null||selectList.size()<=0){
			logger.error("selectList is null!");
			return null;
		}
		if(toDFList == null||toDFList.size()<=0){
			logger.error("toDFList is null!");
			return null;
		}
		Dataset<Row> dateList =sparkSession.sqlContext().read().jdbc(url, fromTable, props)
				.select(selectList.get(0), selectList.get(1),selectList.get(2)).where(req.getWhereStr()).toDF(toDFList.get(0),toDFList.get(1),toDFList.get(2));
		// dateList.show();
		Dataset<TpApiLog> mapDataset = dateList.map(new MapFunction<Row, TpApiLog>() {
			@Override
			public TpApiLog call(Row row) throws Exception {
				TpApiLog tpApiLog = new TpApiLog();
				// 这是一个遍历操作，row即表示为当前行数据，get（i）表示当前行的第几列
				tpApiLog.setOrderId(row.get(0).toString());
				tpApiLog.setRespContent(row.get(1).toString());
				tpApiLog.setApiCode(row.get(2).toString());
				return tpApiLog;
			}
			// 转换为基本类型时用Encoders>STRING()等对应的基本类型
			// 当使用Encoders.javaSerialization（）时当前类需要实现序列化
		}, inoutEncoder);
		List<TpApiLog> collect = mapDataset.collectAsList();
		mapDataset.foreach(new ForeachFunction<TpApiLog>() {
			@Override
			public void call(TpApiLog t) throws Exception {
				t.setOrderId("111");
			}
		});
		if(collect ==null || collect.size()<=0){
			return null;
		}
		TpApiLog tpApiLog= collect.get(0);
		String className = ApiCodeEnum.getCodeName(tpApiLog.getApiCode());
		CreditReportData creditReportData = JsonMapper.buildNonDefaultMapper().fromJson(tpApiLog.getRespContent(),
				(Class<CreditReportData>)Class.forName(className));
		List<CreditCardDetail> listCardDetail = creditReportData.getListCardDetail();
		System.out.println(tpApiLog.getOrderId());
		System.out.println(listCardDetail);
		/*//处理逻辑后更新表uc_basic_info
		Connection connection = DriverManager.getConnection(url,"root","123456");
		PreparedStatement ps = connection.prepareStatement("update uc_basic_info set user_id='212611',  industry=? where (id='212611')");
		ps.setString(1, "2");
		ps.executeUpdate();*/
		
		/*JavaRDD<String> personData = JavaSparkContext.fromSparkContext(
				sparkSession.sparkContext()).parallelize(
				Arrays.asList(UUID.randomUUID().toString().replace("-", "")+" 5 212611"));
		JavaRDD<Row> basicInfoRDD = personData.map(new Function<String, Row>() {
			public Row call(String line) throws Exception {
				String[] splited = line.split(" ");
				return RowFactory.create(splited[0],
						splited[1], splited[2]);
			}
		});
		List structFields = new ArrayList();
		structFields.add(DataTypes.createStructField("id",DataTypes.StringType,true));
        structFields.add(DataTypes.createStructField("industry",DataTypes.StringType,true));
        structFields.add(DataTypes.createStructField("user_id",DataTypes.StringType,true));
        StructType structType = DataTypes.createStructType(structFields);
        Dataset basicInfoDF = sparkSession.sqlContext().createDataFrame(basicInfoRDD,structType);
        basicInfoDF.write().mode(SaveMode.Append).jdbc(url,"uc_basic_info",props);*/
		FcBfangService fs = new FcBfangService();
		fs.houseLoanCount(creditReportData, req.getUserId());
		fs.noPledgeLoanAmount(creditReportData, req.getUserId());
		
		return creditReportData;
		
	}

	/**
	 * 使用spark-sql从db中读取数据
	 */
	public void initDb() throws Exception {
		String url = "jdbc:mysql://172.16.201.22:3306/mathcn_lkz?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull";
		String fromTable = "tp_api_log";

		Properties props = new Properties();
		props.put("user", "root");
		props.put("password", "123456");
		props.put("driver", "com.mysql.jdbc.Driver");
		props.put("url",url);
		Encoder<TpApiLog> inoutEncoder = Encoders.bean(TpApiLog.class);

		Dataset<Row> dateList =sparkSession.sqlContext().read().jdbc(url, fromTable, props)
				.select("order_id", "resp_content","api_code").where("api_code ='bfangCreditQuery'").toDF("orderId", "respContent","apiCode");
		// dateList.show();
		Dataset<TpApiLog> mapDataset = dateList.map(new MapFunction<Row, TpApiLog>() {
			@Override
			public TpApiLog call(Row row) throws Exception {
				TpApiLog tpApiLog = new TpApiLog();
				// 这是一个遍历操作，row即表示为当前行数据，get（i）表示当前行的第几列
				tpApiLog.setOrderId(row.get(0).toString());
				tpApiLog.setRespContent(row.get(1).toString());
				tpApiLog.setApiCode(row.get(2).toString());
				return tpApiLog;
			}
			// 转换为基本类型时用Encoders>STRING()等对应的基本类型
			// 当使用Encoders.javaSerialization（）时当前类需要实现序列化
		}, inoutEncoder);
		
		List<TpApiLog> collect = mapDataset.collectAsList();
		
		for (TpApiLog tpApiLog : collect) {
			String className = ApiCodeEnum.getCodeName(tpApiLog.getApiCode());
			CreditReportData creditReportData = JsonMapper.buildNonDefaultMapper().fromJson(tpApiLog.getRespContent(),
					(Class<CreditReportData>)Class.forName(className));
			System.out.println(creditReportData.getPbocInfoC().getBsApplyId());
			

		}

		
	}

	public static void main(String[] args) throws Exception {
		
		System.out.println(" ---------------------- start db2db ------------------------");
		String[] selects  = {"order_id", "resp_content","api_code"};
		String[] toDFs  = {"orderId", "respContent","apiCode"};
		SparkApiLogReq req = new SparkApiLogReq();
		req.setApiCode("bfangCreditQuery");
		req.setFromTable("tp_api_log");
		req.setSelectList(Arrays.asList(selects));
		req.setWhereStr("api_code ='bfangCreditQuery'");
		req.setToDFList(Arrays.asList(toDFs));
		SparkContextInit sparkContextInit = new SparkContextInit();
		sparkContextInit.getApiRespContent(req);
		System.out.println(" ---------------------- finish db2db ------------------------");
	}
}