package org.eking.bigdata.spark;

import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.execution.RowBuffer;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.launcher.SparkAppHandle;
import org.apache.spark.launcher.SparkLauncher;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;

public class SampleSpark {

	public static void main(String[] args) {
		// TODO Auto-generated method stub
		System.out.println("start....");
		// lancher();
		// runContext("yarn");
		RunSql();
		//
	}

	public static void runContext(String Master) {
		System.out.println("run master is:" + Master);

		SparkConf conf = new SparkConf().setAppName("SparkTest");
		// conf.setMaster(Master);
		// conf.set("", "client");
		// conf.setJars(pathtojar);
		JavaSparkContext sc = new JavaSparkContext(conf);
		JavaRDD<String> distFile = sc.textFile("hdfs://node1:8020/test/dataA/student.txt");
		// distFile.cache();
		// distFile.partitioner();
		long LineNum = distFile.count();
		// distFile.saveAsTextFile("hdfs://node1:8020/test/result/student.txt");
		System.out.print("__com_Eking-Result-Json-String-" + LineNum);
		// System.out.println("success" );
		sc.stop();
	}

	public static void RunSql() {
		SparkSession spark = SparkSession.builder().appName("TesstSparkSession").getOrCreate();
		JavaRDD<String> OrderRDD = spark.sparkContext().textFile("hdfs://node1:8020/test/lele/SuperMall.txt", 1).toJavaRDD();

		// String schemaString =
		// "rowid,orderid,order_Date,SendData,SendType,CustID,CustName,CustType,City,Provence,Country,"
		// + "Area,ProdID,ProdType,SubType,ProdName,Amount,Count,Sale,profit";
		String schemaString = "rowid,UserName,sex,age";
		List<StructField> fields = new ArrayList<>();
		for (String fieldName : schemaString.split(",")) {
			StructField field = DataTypes.createStructField(fieldName, DataTypes.StringType, true);
			fields.add(field);
		}
		StructType schema = DataTypes.createStructType(fields);

		JavaRDD<Row> rowRDD = OrderRDD.map(new Function<String, Row>() {

			@Override
			public Row call(String record) throws Exception {
				String[] attr = record.split(",");
				
				return RowFactory.create(attr[0]);
			}

		});

		Dataset<Row> OrderDataFrame = spark.createDataFrame(rowRDD, schema);
		OrderDataFrame.createOrReplaceTempView("OrderList");

		Dataset<Row> results = spark.sql("SELECT UserName,sex,age FROM OrderList where rowid = '1'");

		results.show();

	}

	public static void lancher() {
		// Process spark;
		System.setProperty("HADOOP_CONF_DIR", "");
		try {
			SparkLauncher launcher = new SparkLauncher();
			launcher.setAppResource("exm.jar").setMaster("yarn");

			launcher.addAppArgs("yarn");

			// SparkAppHandle handle = launcher.startApplication();
			//
			// while(handle.getState() != SparkAppHandle.State.FINISHED) {
			// Thread.sleep(1000L);
			// System.out.println("applicationId is: "+ handle.getAppId());
			// System.out.println("current state: "+ handle.getState());
			//// handle.stop();
			// }
			// spark.waitFor();
			Process process = launcher.launch();

			InputStream stdInput = process.getInputStream();
			InputStream errInput = process.getErrorStream();
			dumpInput(stdInput);
			dumpInput(errInput);
			process.waitFor();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (InterruptedException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		System.out.println("end....");
	}

	private static void dumpInput(InputStream input) throws IOException {
		byte[] buff = new byte[1024];

		while (true) {
			int len = input.read(buff);

			if (len < 0) {
				break;
			}

			System.out.println("run: " + new String(buff, 0, len));
		}
	}
}
