package com.freez.spark.tool;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.functions;
import org.apache.spark.sql.types.DataTypes;

/**
 * FREEDOM  2021 人生苦短，不妨一试
 *
 * @Classname: GenerateData.java
 * @Author: zcs
 * @Date: 2021年-12月-08日 周三 13:43
 * @Description: 数据生成
 */
public class GenerateData {

    public static Dataset<Row> generateDataset(SparkSession spark, String path) {
        //环境设置

        //读取本地数据源生成dataset  read().format(“数据格式”).load("数据位置");
        Dataset<Row> dataset = spark.read().format("csv").load(path);
        dataset = dataset.withColumn("_c0", dataset.col("_c0").cast(DataTypes.IntegerType));
        dataset = dataset.withColumn("_c1", dataset.col("_c1").cast(DataTypes.StringType));
        dataset = dataset.withColumn("_c2", dataset.col("_c2").cast(DataTypes.StringType));
        dataset = dataset.withColumn("_c3", dataset.col("_c3").cast(DataTypes.IntegerType));
        dataset = dataset.withColumn("_c4", dataset.col("_c4").cast(DataTypes.StringType));
        dataset = dataset.withColumn("_c5", dataset.col("_c5").cast(DataTypes.LongType));
        dataset = dataset.withColumn("_c6", dataset.col("_c6").cast(DataTypes.TimestampType));
        dataset = dataset.withColumn("_c7", functions.to_timestamp(dataset.col("_c7"), "MM月dd日yyyy年"));
        dataset = dataset.withColumn("_c8", dataset.col("_c8").cast(DataTypes.StringType));
        dataset = dataset.withColumn("_c9", dataset.col("_c9").cast(DataTypes.BooleanType));
        dataset = dataset.withColumn("_c10", dataset.col("_c10").cast(DataTypes.DoubleType));
        return dataset;
    }
}