from pyspark.sql import SparkSession, DataFrame
from pyspark.sql.types import StructType


class CommonUtil:

    # 定义从文件导入dataframe的方法
    def readCsvIntoDataSet(self, spark: SparkSession, path: str, schema: StructType):
        dataset = spark.read \
            .format("csv") \
            .option("header", "true") \
            .schema(schema) \
            .load(path)
        return dataset

    # 将dataframe写出到mysql，表结构会自动创建
    def writeIntoMysql(self, resultDF: DataFrame, tbname: str):
        resultDF.write.mode("overwrite"). \
            format("jdbc"). \
            option("url", "jdbc:mysql://master:3306/testdb?useSSL=false&useUnicode=true"). \
            option("dbtable", tbname). \
            option("user", "root"). \
            option("password", "Emoney_123"). \
            save()
