package com.sql;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.SaveMode;
import org.omg.CORBA.DATA_CONVERSION;

//java版本的读txt中的elem1,elem2,elem3记录
public class JavaReadTxt {
    public static void main(String[] args) {
        SparkConf conf=new SparkConf();
        conf.setMaster("local").setAppName("readTxt");
        JavaSparkContext sc=new JavaSparkContext(conf);
        SQLContext sqlContext=new SQLContext(sc);
        JavaRDD<String> rdd = sc.textFile("./sparksql/SQL_FOR_PERSON");
        JavaRDD<IPerson> personRDDs = rdd.map(new Function<String, IPerson>() {
            @Override
            public IPerson call(String line) throws Exception {
                String[] splits = line.split(",");
                IPerson iPerson = new IPerson();
                iPerson.setUsername(splits[0]);
                iPerson.setAge(Integer.valueOf(splits[1]));
                iPerson.setGender(splits[2]);
                return iPerson;
            }
        });
        personRDDs.foreach(new VoidFunction<IPerson>() {
            @Override
            public void call(IPerson iPerson) throws Exception {
                System.out.println(iPerson);
            }
        });

        DataFrame dataFrame = sqlContext.createDataFrame(personRDDs, IPerson.class);
//        dataFrame.printSchema();
//        dataFrame.registerTempTable("person");
////        dataFrame.show();
//        DataFrame sql = sqlContext.sql("select * from person where age>18");
//        sql.show();
//
//        JavaRDD<Row> javaRDD = dataFrame.javaRDD();
//
//        javaRDD.foreach(new VoidFunction<Row>() {
//            @Override
//            public void call(Row row) throws Exception {
//                System.out.println("row = " + row.get(0)+row.get(1)+row.get(2));
//            }
//        });
        dataFrame.show();
        dataFrame.write().mode(SaveMode.Overwrite).format("parquet").save("./sparksql/save_parquet");
//            dataFrame.saveAsParquetFile("./sparksql/save_parquet");

        sc.close();
    }
}

