package com.demo.userprofile.component.service.democode;

import java.io.Serializable;

import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

import lombok.AllArgsConstructor;

/**
 * Spark读取Hive数据并写入Redis
 *
 * @author userprofile_demo
 */
public class HiveToReidsBySpark {

    // 用户标签数据
    @AllArgsConstructor
    public static class LabelInfo implements Serializable {
        private Long key;
        private String value;
        // ...
    }

    public static void main() {
        SparkSession spark = SparkSession
                .builder()
                .appName("Spark Read Hive Data To Redis")
                .enableHiveSupport()
                .getOrCreate();

        // 查询Hive表数据
        Dataset<Row> sqlDF = spark.sql("SELECT user_id, province FROM userprofile_demo.userprofile_label_province");

        // 转换数据为对象内容
        Dataset<LabelInfo> stringsDS = sqlDF.map(
                (MapFunction<Row, LabelInfo>) row -> {
                    return new LabelInfo(row.getLong(0), row.getString(1));
                },
                Encoders.bean(LabelInfo.class));

        // 遍历数据并写入Redis中
        stringsDS.foreach(item -> {
            // 解析
            String key = String.format("province:uid:%s", item.key);
            // 使用redis客户端写入数据
            // redisClient.set(key, item.value);
        });
    }

}
