package com.hna.eking.SparkUtils;

import java.util.ArrayList;
import java.util.List;

import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class SparkRunFile {
	public SparkRunFile(){
		spark = SparkSession.builder().appName("SparkSession").getOrCreate();
		MinPartition = 1;
		Separator = ",";
	}
	
	public SparkRunFile(String AppName){
		spark = SparkSession.builder().appName(AppName).getOrCreate();
		MinPartition = 1;
		Separator = ",";
	}
	
	public void setMinPartition(int Min){
		MinPartition = Min;
	}
	
	public void RunSql(String TableName, String Sql){		
		JavaRDD<String> OrderRDD = spark.sparkContext().textFile(FilePath, MinPartition).toJavaRDD();
		
		JavaRDD<Row> rowRDD = OrderRDD.map(new Function<String, Row>() {
			
			public Row call(String record) throws Exception {
				String[] attr = record.split(",");
				RowUtils row = new RowUtils();
				int i = 0;
				for(String str : attr){
					row.AppendColume(schema.apply(i).name(), str);
					i++;
				}
				return row;
			}

		});

		Dataset<Row> OrderDataFrame = spark.createDataFrame(rowRDD, schema);
		OrderDataFrame.createOrReplaceTempView(TableName);
		Dataset<Row> results = spark.sql(Sql);
	}
	
	public void  GetTableInfo(String TableName){
		FilePath = "";
		ColumnStr = "";
	}
	
	private String StrSql;
	private String ColumnInfo;
	private SparkSession spark;
	private int MinPartition;
	private String FilePath;
	private String ColumnStr;
	private String Separator;
	private StructType schema;
	private static final Logger logger = LoggerFactory.getLogger(SparkRunFile.class);
}
