package com.flink.hbase.sql2hdfs.converter;

import com.flink.hbase.sql2hdfs.model.UserData;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.flink.api.common.functions.MapFunction;

import java.time.format.DateTimeFormatter;

/**
 * UserData 到 Avro GenericRecord 转换器
 * 用于将 UserData 对象转换为 Parquet 可写入的 Avro 格式
 */
public class UserDataToAvroConverter implements MapFunction<UserData, GenericRecord> {
    private static final long serialVersionUID = 1L;
    
    private final Schema avroSchema;
    private static final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
    
    public UserDataToAvroConverter(Schema avroSchema) {
        this.avroSchema = avroSchema;
    }
    
    @Override
    public GenericRecord map(UserData userData) throws Exception {
        GenericRecord record = new GenericData.Record(avroSchema);
        
        // 基础字段
        record.put("user_id", userData.getUserId());
        record.put("user_name", userData.getUserName());
        record.put("age", userData.getAge());
        record.put("gender", userData.getGender());
        record.put("email", userData.getEmail());
        record.put("phone", userData.getPhone());
        record.put("address", userData.getAddress());
        record.put("city", userData.getCity());
        record.put("country", userData.getCountry());
        record.put("department", userData.getDepartment());
        record.put("position", userData.getPosition());
        record.put("salary", userData.getSalary());
        
        // 时间字段转换为字符串
        record.put("create_time", userData.getCreateTime() != null ? 
                  userData.getCreateTime().format(DATE_TIME_FORMATTER) : null);
        record.put("update_time", userData.getUpdateTime() != null ? 
                  userData.getUpdateTime().format(DATE_TIME_FORMATTER) : null);
        
        // UDF 计算的字段
        record.put("user_level", userData.getUserLevel());
        record.put("age_group", userData.getAgeGroup());
        record.put("salary_level", userData.getSalaryLevel());
        record.put("process_time", userData.getProcessTime());
        record.put("partition_date", userData.getPartitionDate());
        
        return record;
    }
    
    /**
     * 创建 Avro Schema
     */
    public static Schema createAvroSchema() {
        return org.apache.avro.SchemaBuilder
            .record("UserData")
            .namespace("com.flink.hbase.sql2hdfs.model")
            .fields()
            .requiredString("user_id")
            .optionalString("user_name")
            .optionalInt("age")
            .optionalString("gender")
            .optionalString("email")
            .optionalString("phone")
            .optionalString("address")
            .optionalString("city")
            .optionalString("country")
            .optionalString("department")
            .optionalString("position")
            .optionalDouble("salary")
            .optionalString("create_time")
            .optionalString("update_time")
            .optionalString("user_level")
            .optionalString("age_group")
            .optionalString("salary_level")
            .optionalString("process_time")
            .requiredString("partition_date")
            .endRecord();
    }
} 