package cn.com.zetatech.loader.entity;

import org.apache.avro.generic.GenericRecord;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.avro.AvroParquetReader;
import org.apache.parquet.hadoop.ParquetReader;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class FileParquetToCsv {

    public static void main(String[] args) {
        String parquetFilePath = "C:\\Users\\86130\\Downloads\\3b4f3f7233e68e9f-ems-ws-48853be000000000.parq";
        String csvFilePath = "C:\\Users\\86130\\Downloads\\output1.csv";

        try (BufferedWriter writer = new BufferedWriter(new FileWriter(csvFilePath))) {
            File file = new File(parquetFilePath);
            Path path = new Path(file.getAbsolutePath());

            Configuration configuration = new Configuration();
            ParquetReader<GenericRecord> reader = AvroParquetReader.<GenericRecord>builder(path)
                    .withConf(configuration)
                    .build();

            GenericRecord record;
            boolean headerWritten = false;

            while ((record = reader.read()) != null) {
                Map<String, Object> dataMap = new HashMap<>();
                List<Map<String, Object>> dataMapAll = new ArrayList<>();

                // Iterate over each field in the record and store it in the map
                for (String fieldName : record.getSchema().getFields().stream()
                        .map(field -> field.name()).toArray(String[]::new)) {
                    Object value = record.get(fieldName);

                    // Handle ByteBuffer conversion
                    if (value instanceof ByteBuffer) {
                        ByteBuffer byteBuffer = (ByteBuffer) value;

                        // Convert ByteBuffer to String
                        String stringValue = StandardCharsets.UTF_8.decode(byteBuffer).toString();
                        dataMap.put(fieldName, stringValue);
                    } else {
                        dataMap.put(fieldName, value);
                    }
                }

                // Apply row-to-column transformation
                List<Map<String, Object>> transformedRows = RowToColumnTransform.transformRowToColumns(dataMap);
                dataMapAll.addAll(transformedRows);

                // Write the header (field names) if not written yet
                if (!headerWritten && !dataMapAll.isEmpty()) {
                    writer.write(String.join(",", dataMapAll.get(0).keySet()));
                    writer.newLine();
                    headerWritten = true;
                }

                // Write the data values for each transformed row
                for (Map<String, Object> row : dataMapAll) {
                    writer.write(String.join(",", row.values().stream()
                            .map(String::valueOf)
                            .toArray(String[]::new)));
                    writer.newLine();
                }
            }

            reader.close();

            System.out.println("CSV file written to: " + csvFilePath);
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
}
