package Demo2;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;

import java.util.ArrayList;
import java.util.List;

/**
 * Created by lenovo on 2017/10/13.
 *
 */
public class SparkSQL_JavaDemo {
    public static void main(String[] args){
        SparkConf conf = new SparkConf().setAppName("SparkSQL_JavaDemo").setMaster("local[*]").set("spark.testing.memory","2147480000");
        JavaSparkContext sc = new JavaSparkContext(conf);
        SQLContext sqlContext = new SQLContext(sc);

        JavaRDD<String> fileRDD = sc.textFile("F://sqltest.txt");
        JavaRDD<Row> mapRDD = fileRDD.map(new Function<String, Row>() {

            @Override
            public Row call(String v1) throws Exception {
                String[] str = v1.split(",");
//                Student student =new Student();
//                student.setId(str[0]);
//                student.setName(str[1]);
//                student.setAge(Integer.valueOf(str[2]));
//                return student;
                return RowFactory.create(str[0],str[1],str[2]);
            }
        });
//        mapRDD.foreach(new VoidFunction<Student>() {
//            @Override
//            public void call(Student student) throws Exception {
//                System.out.println(student.getName());
//            }
//        });

     //   sqlContext.createDataFrame(mapRDD,Student.class);
        String schemaString = "id,name,age";
        List<StructField> fields = new ArrayList<StructField>();
        for(String fieldName: schemaString.split(",")){
            fields.add(DataTypes.createStructField(fieldName,DataTypes.StringType,true));
        }
        StructType schema = DataTypes.createStructType(fields);

        DataFrame df = sqlContext.createDataFrame(mapRDD,schema);

        df.registerTempTable("student");

        DataFrame results = sqlContext.sql("select name from student");

       List<String> names = results.javaRDD().map(new Function<Row, String>() {

            @Override
            public String call(Row row) throws Exception {
                return "Name:"+row.getString(0);
            }
        }).collect();
        for (int i=0;i<names.size();i++){
            System.out.println(names.get(i));
        }
}
}
