package com.example.netty.common;

import com.example.netty.vo.*;
import com.influxdb.client.InfluxDBClient;
import com.influxdb.client.QueryApi;

import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;

public class FluxQuery {

    private static Map<String,String> full=new HashMap();
    private static Map<String,String> time=new HashMap();

    static {
        full.put("mo", "month");
        full.put("d", "monthDay");
        full.put("h", "hour");
        full.put("m", "minute");

        time.put("mo", "1mo");
        time.put("d", "1d");
        time.put("h", "1h");
        time.put("m", "2m");
    }

    public static  <T> T newFindOne(InfluxDBClient influxDBClient,String bucketName,String startDuration,String measurementName,Class<T> clazz){
        if("-24h".equals(startDuration)){
            startDuration="-10m";
        }
        QueryApi queryApi = influxDBClient.getQueryApi();
        String flux = String.format("from(bucket: \"%s\")\n" +
                        "  |> range(start: %s)\n" +
                        "  |> filter(fn: (r) => r[\"_measurement\"] == \"%s\")\n" +
                        "  |> last()" +
                        "  |> pivot(rowKey:[\"_time\"], columnKey: [\"_field\"], valueColumn: \"_value\")",
                bucketName, startDuration, measurementName);
        List<T> result = queryApi.query(flux,clazz);
        return result.size()==0?null:result.get(0);
    }

    public static  <T> T findOneData(InfluxDBClient influxDBClient,String bucketName,String time,String measurementName,Class<T> clazz) throws Exception {
        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        Date startTime = sdf.parse(time);
        Date stopTime = sdf.parse(time);
        stopTime.setTime(stopTime.getTime() + 59 * 1000);
        SimpleDateFormat utcsdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
        utcsdf.setTimeZone(TimeZone.getTimeZone("UTC"));
        String start = utcsdf.format(startTime);
        String stop = utcsdf.format(stopTime);
        System.out.println(start);
        System.out.println(stop);
        QueryApi queryApi = influxDBClient.getQueryApi();
        String flux = String.format("from(bucket: \"%s\")\n" +
                        "  |> range(start: %s,stop: %s)\n" +
                        "  |> filter(fn: (r) => r[\"_measurement\"] == \"%s\")\n" +
                        "  |> last()" +
                        "  |> pivot(rowKey:[\"_time\"], columnKey: [\"_field\"], valueColumn: \"_value\")",
                bucketName, start,stop, measurementName);
        System.out.println(flux);
        List<T> result = queryApi.query(flux,clazz);
        return result.size()==0?null:result.get(0);
    }

    public static String splitFields(String fields){
        String []  fieldList=fields.split("_");
        List<String> fieldJoin=new ArrayList<>();
        for (String field:fieldList){
            fieldJoin.add(String.format("r[\"_field\"] == \"%s\"",field));
        }
        return String.join(" or " ,fieldJoin);
    }

    public static String time(String unit){
        String date=String.format("date.%s(t:date.add(d:-%s, to:r._time))",full.get(unit),time.get(unit));
        return date;
    }

    public static List<FluxVO> data(InfluxDBClient influxDBClient,String bucketName,String start,String stop,String measurementName,String fields,String unit){
        QueryApi queryApi = influxDBClient.getQueryApi();
        String flux=String.format("import \"date\"\n" +
                                  "import \"math\"\n" +
                                  "from(bucket: \"%s\")\n" +
                                  "|> range(start: %s,stop: %s)\n" +
                                  "|> filter(fn: (r) => r[\"_measurement\"] == \"%s\")\n" +
                                  "|> filter(fn: (r) => %s)\n" +
                                  "|> map(fn: (r) => ({ r with _value: float(v: r._value)}))\n" +
                                  "|> timeShift(duration: 8h)\n" +
                                  "|> aggregateWindow(every: %s, fn: mean)\n" +
                                  "|> filter(fn: (r) => date.add(d:-%s,to:r._time) <= date.add(d:8h,to:now()))\n" +
                                  "|> fill(value:0.0)\n" +
                                  "|> map(fn: (r) => ({ r with _value: math.round(x: r._value * 100.0) / 100.0,_time:%s}))",
                                  bucketName,start,stop,measurementName,splitFields(fields),time.get(unit),time.get(unit),time(unit));
        System.out.println(flux);
        List<FluxVO> fluxVOList = queryApi.query(flux, FluxVO.class);
        return fluxVOList;

    }

    public static List<FluxAllVO> all(InfluxDBClient influxDBClient,String bucketName,String start,String stop,String measurementName,String fields){
        QueryApi queryApi = influxDBClient.getQueryApi();
        String flux=String.format(
                "from(bucket: \"%s\")\n" +
                "|> range(start: %s,stop: %s)\n" +
                "|> filter(fn: (r) => r[\"_measurement\"] == \"%s\")\n" +
                "|> filter(fn: (r) => %s)", bucketName,start,stop,measurementName,splitFields(fields));
        List<FluxAllVO> fluxMVOList = queryApi.query(flux, FluxAllVO.class);
        return fluxMVOList;
    }


    public static List<FluxMVO> dataMinute(InfluxDBClient influxDBClient, String start, String stop, String measurementName, String fields, String unit){
        QueryApi queryApi = influxDBClient.getQueryApi();
        String flux=String.format("import \"date\"\n" +
                "import \"math\"\n" +
                "from(bucket: \"my-first\")\n" +
                "|> range(start: %s,stop: %s)\n" +
                "|> filter(fn: (r) => r[\"_measurement\"] == \"%s\")\n" +
                "|> filter(fn: (r) => %s)\n" +
                "|> map(fn: (r) => ({ r with _value: float(v: r._value)}))\n" +
                "|> timeShift(duration: 8h)\n" +
                "|> aggregateWindow(every: %s, fn: mean)\n" +
                "|> fill(value:0.0)\n" +
                "|> map(fn: (r) => ({ r with _value: math.round(x: r._value * 100.0) / 100.0,_time:date.add(d:-8h,to:r._time)}))",start,stop,measurementName,splitFields(fields),unit);
        System.out.println(flux);
        List<FluxMVO> fluxMVOList = queryApi.query(flux, FluxMVO.class);
        return fluxMVOList;

    }

    public static FVO interval(InfluxDBClient influxDBClient, String start, String stop, String measurementName, String field, double min, double max){
        String flux=String.format("data = from(bucket: \"my-first\")\n" +
                "  |> range(start: %s, stop: %s)\n" +
                "  |> filter(fn: (r) => r[\"_measurement\"] == \"%s\")\n" +
                "  |> filter(fn: (r) => r[\"_field\"] == \"%s\")\n" +
                "  |> map(fn: (r) => ({ r with _value: float(v: r._value) }))\n" +
                "  |> filter(fn: (r) => r._value >= %f and r._value <= %f)\n" +
                "\n" +
                "mean_value = data\n" +
                "  |> mean(column: \"_value\")\n" +
                "  |> map(fn: (r) => ({ _time: r._time, _measurement: r._measurement, _field: \"avg\", _value: r._value }))\n" +
                "\n" +
                "count_value = data\n" +
                "  |> count(column: \"_value\")\n" +
                "  |> map(fn: (r) => ({ _time: r._time, _measurement: r._measurement, _field: \"count\", _value: float(v: r._value) }))\n" +
                "\n" +
                "union(tables: [mean_value, count_value])\n" +
                "  |> pivot(rowKey:[], columnKey: [\"_field\"], valueColumn: \"_value\")",start,stop,measurementName,field,min,max);
        System.out.println(flux);
        QueryApi queryApi = influxDBClient.getQueryApi();
        List<FVO> v = queryApi.query(flux, FVO.class);
        return v==null || v.size()==0?new FVO():v.get(0);
    }

    public static List<FVOWind> intervalWind(InfluxDBClient influxDBClient,String bucketName,String start, String stop, String measurementName, String fields, double min, double max, Integer time, Integer offset){
        String flux=String.format("from(bucket: \"%s\")\n" +
                "  |> range(start: %s, stop: %s)\n" +
                "  |> filter(fn: (r) => r[\"_measurement\"] == \"%s\")\n" +
                "  |> filter(fn: (r) => %s)\n" +
                "  |> map(fn: (r) => ({ r with _value: float(v: r._value) }))\n" +
                "  |> filter(fn: (r) => r._value >= %f and r._value <= %f)\n" +
                "  |> aggregateWindow(every: %dm, fn:mean, createEmpty: true, offset: %dm )\n" +
                "  |> fill(value:0.0)",bucketName,start,stop,measurementName,splitFields(fields),min,max,time,offset);
        System.out.println(flux);
        QueryApi queryApi = influxDBClient.getQueryApi();
        List<FVOWind> v = queryApi.query(flux, FVOWind.class);
        return v;
    }

    public static void main(String[] args) throws ParseException {
        String time = "2024-03-05 7:06:00";
        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        Date startTime = sdf.parse(time);
        Date stopTime = sdf.parse(time);
        stopTime.setTime(stopTime.getTime() + 59 * 1000);
        SimpleDateFormat utcsdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
        utcsdf.setTimeZone(TimeZone.getTimeZone("UTC"));
        String start = utcsdf.format(startTime);
        String stop = utcsdf.format(stopTime);
        System.out.println(start);
        System.out.println(stop);
    }
}
