package com.ksc.wordcount.datasourceapi;

import com.ksc.wordcount.task.KeyValue;
import org.apache.http.util.ByteArrayBuffer;

import java.io.*;
import java.nio.charset.StandardCharsets;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.time.format.FormatStyle;
import java.util.stream.Stream;
import org.apache.avro.Schema;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.specific.SpecificDatumWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.stream.Stream;

public class TextPartionWriter implements PartionWriter<KeyValue>, Serializable {

    private String destDest;

    private int partionId;

    private String applicationId;

    public TextPartionWriter(String destDest, int partionId,String applicationId) {
        this.destDest = destDest;
        this.partionId = partionId;
        this.applicationId=applicationId;
    }

    //把partionId 前面补0，补成length位
    public String padLeft(int partionId, int length) {
        String partionIdStr = String.valueOf(partionId);
        int len = partionIdStr.length();
        if (len < length) {
            for (int i = 0; i < length - len; i++) {
                partionIdStr = "0" + partionIdStr;
            }
        }
        return partionIdStr;
    }

    //todo 实现 将reducetask的计算结果写入结果文件中
//    @Override
//    public void write(Stream<KeyValue> stream) throws IOException {
//
//        // 1. 定义Avro模式
//        Schema avroSchema = new Schema.Parser().parse(new File("src/main/java/com/ksc/wordcount/avro/urlTopN.avsc"));
//
//        // 2. 创建Avro编码器
//        DatumWriter<GenericRecord> datumWriter = new SpecificDatumWriter<>(avroSchema);
//        DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter);
//
//        // 将结果文件名改为applicationId方便thrift服务寻找
//        File file = new File(destDest + File.separator + applicationId + ".avro");
//
//        try {
//            // 3. 将Avro数据写入到文件中
//            dataFileWriter.create(avroSchema, file);
//
//            // 把stream中的数据写入到Avro文件中
//            stream.forEach(keyValue -> {
//                GenericRecord avroData = new GenericData.Record(avroSchema);
//                avroData.put("url", keyValue.getKey());
//                avroData.put("count", keyValue.getValue());
//
//                try {
//                    dataFileWriter.append(avroData);
//                } catch (IOException e) {
//                    e.printStackTrace();
//                }
//            });
//        } catch (IOException e) {
//            e.printStackTrace();
//        }
////        //将结果文件名改为applicationId方便thrift服务寻找
////        File file = new File(destDest + File.separator + destDest + ".txt");
////
////        try (FileOutputStream fos = new FileOutputStream(file)) {
////            //把stream中的数据写入到file中
////            stream.forEach(keyValue -> {
////                try {
////                    fos.write((keyValue.getKey() + "\t" + keyValue.getValue() + "\n").getBytes(StandardCharsets.UTF_8));
////                } catch (Exception e) {
////                    e.printStackTrace();
////                }
////            });
////        }
//    }

    @Override
    public void write(Stream<KeyValue> stream) throws IOException {
        Schema schema=new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"KeyValue\", \"fields\": [{\"name\": \"key\", \"type\": \"string\"}, {\"name\": \"value\", \"type\": \"int\"}]}");

        File file = new File(destDest + File.separator + applicationId + ".avro");

        DatumWriter<GenericData.Record> datumWriter = new SpecificDatumWriter<>(schema);
        DataFileWriter<GenericData.Record> dataFileWriter = new DataFileWriter<>(datumWriter);
        try {
            dataFileWriter.create(schema, file);
            // 将 Stream 中的数据写入 Avro 文件
            stream.forEach(kv -> {
                GenericData.Record avroRecord = new GenericData.Record(schema);
                avroRecord.put("key", kv.getKey());
                avroRecord.put("value", kv.getValue());
                try {
                    dataFileWriter.append(avroRecord);
                } catch (IOException e) {
                    e.printStackTrace();
                }
            });
        } finally {
            dataFileWriter.flush();
            dataFileWriter.close();
            stream.close();
        }
    }
}
