package demo;

import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.rdd.RDD;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

public class SparkLog {

    public static void main(String[] args) {
         // SparkSession spark = SparkSession.builder().appName("DataFrameApp").master("local[2]").getOrCreate();
         // RDD<String> log = spark.sparkContext().textFile("F:\\lzc\\SparkSQL\\sparksql\\src\\main\\scala\\sparksql\\10000.log",1);

        SparkSession spark = SparkSession.builder().getOrCreate();
        RDD<String> log = spark.sparkContext().textFile(args[0],1);

        JavaRDD<Log> logRDD = log.toJavaRDD().map(line -> {
            String[] split = line.split(" ");
            String ip = split[0];
            String time = split[3] + " " + split[4];
            String url = split[11].replace("\"","");
            String trafic = split[9];
            return new Log(time, ip, url, Integer.valueOf(trafic));
        });

        Dataset<Row> logDF = spark.createDataFrame(logRDD, Log.class);
        logDF.show(10,false);


        logDF.write().format("csv").save(args[1]); // 保存文件
    }
}
