package cn.texous.demo.dsj.util;


import cn.texous.demo.dsj.model.EventsVo;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.example.data.Group;
import org.apache.parquet.example.data.simple.SimpleGroupFactory;
import org.apache.parquet.hadoop.ParquetFileWriter;
import org.apache.parquet.hadoop.ParquetReader;
import org.apache.parquet.hadoop.ParquetWriter;
import org.apache.parquet.hadoop.example.GroupReadSupport;
import org.apache.parquet.hadoop.example.GroupWriteSupport;
import org.apache.parquet.hadoop.metadata.CompressionCodecName;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.OriginalType;
import org.apache.parquet.schema.PrimitiveType;
import org.apache.parquet.schema.Types;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.SaveMode;

import javax.annotation.PostConstruct;
import java.io.IOException;
import java.util.List;
import java.util.Random;

/**
 *
 */
public class ParquetUtils {

    private static String hadoopHomeDirPath;
    private String hadoopHomeDir;

    @PostConstruct
    public void init() {
        hadoopHomeDirPath = this.hadoopHomeDir;
    }

    /***/
    public static MessageType getMessageTypeFromCode() throws Exception {
        MessageType messageType =
                Types.buildMessage()
                        .required(PrimitiveType.PrimitiveTypeName.BINARY)
                        .as(OriginalType.UTF8).named("id")
                        .required(PrimitiveType.PrimitiveTypeName.BINARY)
                        .as(OriginalType.UTF8).named("name")
                        .required(PrimitiveType.PrimitiveTypeName.INT32).named("age")
                        .requiredGroup()
                        .required(PrimitiveType.PrimitiveTypeName.BINARY)
                        .as(OriginalType.UTF8).named("test1")
                        .required(PrimitiveType.PrimitiveTypeName.BINARY)
                        .as(OriginalType.UTF8).named("test2")
                        .named("group1")
                        .named("trigger");
        return messageType;
    }

    /**
     * spark将json数据写入qarquet
     *
     * @param jsonPath json
     * @param parquetPath path
     * @param sc sc
     * @return re
     * @throws Exception e
     */
    public static Dataset<Row> appendJsonToParquet(String jsonPath,
                                                   String parquetPath,
                                                   JavaSparkContext sc) throws Exception {
        SQLContext sqlContext = new SQLContext(sc);
        Dataset<Row> studentInfo1 = sqlContext.read().format("json").load(jsonPath);
        studentInfo1.write().mode(SaveMode.Append).save(parquetPath);
        Dataset<Row> studentInfo = sqlContext.read()
                .option("mergeSchema", "true").parquet(parquetPath);
        return studentInfo;
    }

    /***/
    public static void writeParquet(String pathName, MessageType messageType,
                                    List<Group> groupList) throws Exception {
        if (groupList == null || groupList.size() < 1) {
            return;
        }
        System.setProperty("hadoop.home.dir", hadoopHomeDirPath);
        // 1. 声明parquet的messageType
        System.out.println(messageType.toString());

        // 2. 声明parquetWriter
        Path path = new Path(pathName);
        Configuration configuration = new Configuration();
        GroupWriteSupport.setSchema(messageType, configuration);
        GroupWriteSupport writeSupport = new GroupWriteSupport();

        // 3. 写数据
        ParquetWriter<Group> writer = null;
        try {
            writer = new ParquetWriter<Group>(path,
                    ParquetFileWriter.Mode.CREATE,
                    writeSupport,
                    CompressionCodecName.UNCOMPRESSED,
                    128 * 1024 * 1024,
                    5 * 1024 * 1024,
                    5 * 1024 * 1024,
                    ParquetWriter.DEFAULT_IS_DICTIONARY_ENABLED,
                    ParquetWriter.DEFAULT_IS_VALIDATING_ENABLED,
                    ParquetWriter.DEFAULT_WRITER_VERSION,
                    configuration);
            for (Group group : groupList) {
                writer.write(group);
            }
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            if (writer != null) {
                try {
                    writer.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }

    }

    /***/
    public static void readParquet(String pathName) {
        //        System.setProperty("hadoop.home.dir", hadoopHomeDirPath);
        // 1. 声明readSupport
        GroupReadSupport groupReadSupport = new GroupReadSupport();
        Path path = new Path(pathName);
        // 2.通过parquetReader读文件
        ParquetReader<Group> reader = null;
        long i = 0;
        try {
            reader = ParquetReader.builder(groupReadSupport, path).build();
            Group group = null;
            while ((group = reader.read()) != null) {
                System.out.println(group);
                System.out.println(++i);
            }

        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            if (reader != null) {
                try {
                    reader.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
    }


    private Group getGroup_Events(MessageType messageType, EventsVo events) {
        Random random = new Random();
        Group group = new SimpleGroupFactory(messageType).newGroup();
        group.append("event_time", events.getEventTime() == null ? "" : events.getEventTime())
                .append("create_id", events.getCreateId() == null ? "" : events.getCreateId())
                .append("camp", events.getCamp() == null ? "" : events.getCamp())
                .append("app_id", events.getAppId() == null ? "" : events.getAppId())
                .append("ad_set_id", events.getAdSetId() == null ? "" : events.getAdSetId())
                .append("ch_id", events.getChId() == null ? "" : events.getChId())
                .append("user_id", events.getUserId() == null ? "" : events.getUserId())
                .append("offer_id", events.getOfferId() == null ? "" : events.getOfferId())
                .append("creative_id", events.getCreativeId() == null ? "" : events.getCreativeId())
                .append("event_type", events.getEventType() == null ? "" : events.getEventType())
                .append("geo", events.getGeo() == null ? "" : events.getGeo())
                .append("device", events.getDevice() == null ? "" : events.getDevice())
                .append("os_version", events.getOsVersion() == null ? "" : events.getOsVersion())
                .append("device_ip", events.getDeviceIp() == null ? "" : events.getDeviceIp())
                .append("ua", events.getUa() == null ? "" : events.getUa())
                .append("sdkImpId", events.getSdkClickId() == null ? "" : events.getSdkClickId());
        return group;
    }


    private MessageType getMessageTypeFromCode_Events() throws Exception {
        MessageType messageType = Types.buildMessage()
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                .as(OriginalType.UTF8).named("event_time")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                .as(OriginalType.UTF8).named("create_id")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                .as(OriginalType.UTF8).named("camp")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                .as(OriginalType.UTF8).named("app_id")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                .as(OriginalType.UTF8).named("ad_set_id")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                .as(OriginalType.UTF8).named("ch_id")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                .as(OriginalType.UTF8).named("user_id")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                .as(OriginalType.UTF8).named("offer_id")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                .as(OriginalType.UTF8).named("creative_id")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                .as(OriginalType.UTF8).named("event_type")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                .as(OriginalType.UTF8).named("geo")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                .as(OriginalType.UTF8).named("device")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                .as(OriginalType.UTF8).named("os_version")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                .as(OriginalType.UTF8).named("device_ip")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                .as(OriginalType.UTF8).named("ua")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                .as(OriginalType.UTF8).named("sdkImpId")
                .named("trigger");
        return messageType;
    }

    public static void main(String[] args) {
        final String PATH_NAME = "/tmp/parquet/events-26146772.parquet";
        ParquetUtils.readParquet(PATH_NAME);
    }

}
