package com.spark.zhou.demo.sparksql.sqlcontext;

import com.spark.zhou.demo.bean.Person;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.AnalysisException;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;

/**
 * @Description: txt格式数据
 * @Author: ZhOu
 * @Date: 2018/5/29
 */
public class TxtData {
    private static final String RES_PATH = JsonData.class.getClass().getResource("/").getPath();
    private static final String TXT_PATH = RES_PATH + "person.txt";

    public static void main(String[] args) throws AnalysisException {
        SparkConf conf = new SparkConf().setAppName("TXT_PERSON").setMaster("local");
        JavaSparkContext sc = new JavaSparkContext(conf);

        SQLContext sqlContext = SQLContext.getOrCreate(sc.sc());

        JavaRDD<String> lines = sc.textFile(TXT_PATH);

        JavaRDD<Person> personJavaRDD = lines.map(new Function<String, Person>() {
            @Override
            public Person call(String v1) throws Exception {
                String[] split = v1.split(",");
                Person person = new Person();
                person.setId(Integer.valueOf(split[0].trim()));
                person.setName(split[1]);
                person.setAge(Integer.valueOf(split[2].trim()));
                return person;
            }
        });

        Dataset<Row> rowDataset = sqlContext.createDataFrame(personJavaRDD,Person.class);
        rowDataset.createTempView("person");
        rowDataset.show();

        Dataset<Row> dataset = sqlContext.sql("select id,name,age from person where age>14");
        dataset.show();

    }
}
