package com.zz.util;

//import org.apache.spark.sql.SparkSession;
//import com.clickhouse.spark.ClickHouseSpark;

/**
 * @author: li chao
 * @Desc:
 * @create: 2024-05-27 14:34
 **/
public class hiveToClickHouse {

    public static void main(String[] args) {
//        // 初始化Spark会话
//        val spark = SparkSession.builder()
//                .appName("HiveToClickHouse")
//                .config("spark.sql.warehouse.dir", "<your_spark_warehouse_directory>")
//                .enableHiveSupport()
//                .getOrCreate()
//
//        // 从Hive表读取数据
//        val hiveTableDF = spark.table("<your_hive_table>")
//
//        // 定义ClickHouse的配置
//        val clickhouseConf = Map(
//                "clickhouse.host" ->"<clickhouse_host>",
//                "clickhouse.port" ->"<clickhouse_port>",
//                "clickhouse.database" ->"<clickhouse_database>",
//                "clickhouse.user" ->"<clickhouse_user>",
//                "clickhouse.password" ->"<clickhouse_password>"
//        )
//
//        // 使用Spark-ClickHouse-Connector将数据写入ClickHouse
//        ClickHouseSpark.write(hiveTableDF, clickhouseConf)
//
//        // 关闭Spark会话
//        spark.stop()
    }
}
