package StructuredStreaming;

import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.streaming.DataStreamReader;
import org.apache.spark.sql.types.StructType;

/**
 * @program: MySpark
 * @description
 * @author: tkk fendoukaoziji
 * @create: 2019-04-21 18:58
 **/
public class StructuredDataRow {
    public static void main(String[] args) {
        Logger.getLogger("org.apache.spark").setLevel(Level.WARN); //设置日志级别warn

        SparkSession spark = SparkSession
                .builder()
                .appName("JavaStructuredNetworkWordCount").master("local[2]")
                .getOrCreate();
        Dataset<Row> socketDF = spark.readStream().format("socket")
                .option("host", "node3")
                .option("port", 9999)
                .load();
        socketDF.isStreaming();
        socketDF.printSchema();
        StructType userSchema = new StructType().add("name", "string").add("age", "integer");
        Dataset<Row> csvDF = spark.readStream().option("sep", ";")
                .schema(userSchema).csv("D:\\project\\sparkTotal\\MySpark\\SparkOfficial\\csv");
    }
}
