package com.demo.userprofile.component.service.democode;

import java.util.List;
import java.util.Map;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.example.data.Group;
import org.apache.parquet.example.data.simple.SimpleGroup;
import org.apache.parquet.hadoop.ParquetReader;
import org.apache.parquet.hadoop.example.GroupReadSupport;
import org.apache.parquet.schema.GroupType;

import com.google.common.collect.Lists;
import com.google.common.collect.Maps;

/**
 * 自研代码读取HDFS文件并写入Redis
 *
 * @author userprofile_demo
 */
public class HiveToRedisByHDFS {

    // 获取Hadoop配置
    public static Configuration getConfiguration() {
        Configuration conf = new Configuration();
        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        try {
            conf.addResource("mountTable.xml配置文件路径");
            conf.addResource("core-site.xml配置文件路径");
            conf.addResource("hdfs-site.xml配置文件路径");
        } catch (Exception e) {
            // logger.error("getConfiguration catch exception", e);
        }
        return conf;
    }

    public static void main(String[] args) {
        try {
            String filePath = "Hive数据表HDFS路径";
            List<String> properties = Lists.newArrayList("user_id", "province");
            // Parquet格式数据读取配置
            Configuration conf = getConfiguration();
            Path file = new Path(filePath);
            ParquetReader.Builder<Group> builder = ParquetReader.builder(new GroupReadSupport(), file)
                    .withConf(conf);
            ParquetReader<Group> reader = builder.build();
            SimpleGroup group = null;
            GroupType groupType = null;
            // 读取HDFS文件并逐行解析数据内容
            while ((group = (SimpleGroup) reader.read()) != null) {
                groupType = group.getType();
                Map<String, String> dataMap = Maps.newHashMapWithExpectedSize(properties.size());
                for (int i = 0; i < groupType.getFieldCount(); i++) {
                    String colName = groupType.getFieldName(i);
                    if (properties.contains(colName)) {
                        String colValue = group.getValueToString(i, 0);
                        dataMap.put(colName, colValue);
                    }
                }
                // 数据写入Redis
                if (!dataMap.isEmpty()) {
                    String key = String.format("province:uid:%s", dataMap.get("user_id"));
                    // redisClient.set(key, dataMap.get("province"));
                }
            }
        } catch (Exception e) {
            // print sth
        }
    }
}
