package com.example.streamsqlpark.entity;

import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import com.example.streamsqlpark.common.conf.FlinkVersion;
import com.example.streamsqlpark.common.util.DeflaterUtils;
import com.example.streamsqlpark.common.util.PropertiesUtils;
import com.fasterxml.jackson.annotation.JsonIgnore;
import lombok.Data;
import org.apache.commons.io.FileUtils;

import java.io.File;
import java.io.IOException;
import java.util.Date;
import java.util.Map;

/**
 * @author voloiono
 * @date 2024/4/19 12:26
 */
@Data
@TableName("t_flink_env")
public class FlinkEnv {
    @TableId(type = IdType.AUTO)
    private Long id;

    private String flinkName;

    private String flinkHome;

    private String flinkConf;

    private String description;

    private String scalaVersion;

    private String version;

    /** is default */
    private Boolean isDefault;

    private Date createTime;

    private transient FlinkVersion flinkVersion;

    private transient String streamParkScalaVersion = scala.util.Properties.versionNumberString();

    public void doSetFlinkConf(){
        try {
        File yaml = new File(this.flinkHome.concat("/conf/flink-conf.yaml"));
            String conf = FileUtils.readFileToString(yaml);
            this.flinkConf= DeflaterUtils.zipString(conf);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }

    public void doGetVersion(){
        this.setVersion(this.getFlinkVersion().version());
        this.setScalaVersion(this.getFlinkVersion().scalaVersion());
        if (!streamParkScalaVersion.startsWith(this.getFlinkVersion().scalaVersion())){
            throw new UnsupportedOperationException(
                    String.format(
                            "streampark当前的scala版本是 %s,但是添加的flink的scala版本是 %s，版本不匹配，请检查",
                            streamParkScalaVersion,this.getFlinkVersion().scalaVersion()
                    )
            );
        }
    }

    public Map<String,String> convertFlinkYamlAsMap(){
        String yaml = DeflaterUtils.unzipString(flinkConf);
        return PropertiesUtils.loadFlinkConfYaml(yaml);
    }

    public FlinkVersion getFlinkVersion(){
        if (this.flinkVersion==null){
            this.flinkVersion=new FlinkVersion(this.flinkHome);
        }
        return this.flinkVersion;
    }

    public void unzipFlinkConf() {
        this.flinkConf = DeflaterUtils.unzipString(this.flinkConf);
    }

    @JsonIgnore
    public String getLargeVersion() {
        return this.version.substring(0, this.version.lastIndexOf("."));
    }

    @JsonIgnore
    public String getVersionOfFirst() {
        return this.version.split("\\.")[0];
    }

    @JsonIgnore
    public String getVersionOfMiddle() {
        return this.version.split("\\.")[1];
    }

    @JsonIgnore
    public String getVersionOfLast() {
        return this.version.split("\\.")[2];
    }
}
