package com.demo.userprofile.component.service.democode;

import java.util.Map;

import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.hadoop.mapreduce.HadoopInputFormat;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.util.Collector;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.Job;
import org.apache.parquet.example.data.Group;
import org.apache.parquet.hadoop.ParquetInputFormat;

/**
 * Flink读取Hive数据并写入Redis
 *
 * @author userprofile_demo
 */
public class HiveToRedisByFlink {

    public static void main(String[] args) throws Exception {
        // redis连接
        try {
            String inputFile = "Hive数据表HDFS路径";

            // 批处理任务
            final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
            Job job = Job.getInstance();
            HadoopInputFormat<Void, Group> hadoopInputFormat = new HadoopInputFormat<Void, Group>(
                    new ParquetInputFormat(), Void.class, Group.class, job);
            ParquetInputFormat.addInputPath(job, new Path(inputFile));
            DataSource<Tuple2<Void, Group>> hiveTableData = env.createInput(hadoopInputFormat);

            // 处理读取到的Hive表数据
            hiveTableData.flatMap(new RichFlatMapFunction<Tuple2<Void, Group>, Tuple2<Void, Group>>() {
                @Override
                public void open(Configuration parameters) throws Exception {
                    super.open(parameters);
                }

                @Override
                public void flatMap(Tuple2<Void, Group> voidGroupTuple2,
                        Collector<Tuple2<Void, Group>> collector) throws InterruptedException {
                    Group val = voidGroupTuple2.f1;
                    try {
                        // 解析数据为map格式：{"user_id": 100, "province": "山东"}
                        // Map<String, String> tempMap = ToolsUtils.stringToMap(val.toString());
                        Map<String, String> tempMap = null;
                        insertIntoCache(tempMap);
                    } catch (Exception e) {
                        // logger.error("flatMap catch exception", e);
                    }
                }

                // 写入数据到Redis
                public void insertIntoCache(Map<String, String> rawData) {
                    String key = String.format("province:uid:%s", rawData.get("user_id"));
                    // redisClient.set(key, rawData.get("province"));
                }

                @Override
                public void close() throws Exception {
                    super.close();
                }
            }).writeAsText("输出文件地址", FileSystem.WriteMode.OVERWRITE);

            // 执行任务
            env.execute("DemoCode");
        } catch (Exception e) {
            // logger.error("catch exception", e);
        }
    }

}
