package com.flink;

import com.facebook.thrift.TException;
import com.vesoft.nebula.client.graph.data.DateTimeWrapper;
import com.vesoft.nebula.client.graph.data.DateWrapper;
import com.vesoft.nebula.client.graph.data.TimeWrapper;
import com.vesoft.nebula.client.graph.data.ValueWrapper;
import com.vesoft.nebula.client.meta.MetaClient;
import com.vesoft.nebula.client.storage.data.BaseTableRow;
import org.apache.flink.connector.nebula.connection.NebulaClientOptions;
import org.apache.flink.connector.nebula.connection.NebulaGraphConnectionProvider;
import org.apache.flink.connector.nebula.connection.NebulaMetaConnectionProvider;
import org.apache.flink.connector.nebula.connection.NebulaStorageConnectionProvider;
import org.apache.flink.connector.nebula.sink.NebulaBatchOutputFormat;
import org.apache.flink.connector.nebula.sink.NebulaSinkFunction;
import org.apache.flink.connector.nebula.source.NebulaSourceFunction;
import org.apache.flink.connector.nebula.statement.EdgeExecutionOptions;
import org.apache.flink.connector.nebula.statement.ExecutionOptions;
import org.apache.flink.connector.nebula.statement.VertexExecutionOptions;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.types.Row;

import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;

/**
 * @Author zhangtao
 * @create 2023/10/27 13:57
 */
public class NebulaSync {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        NebulaClientOptions nebulaClientOptions =
                new NebulaClientOptions.NebulaClientOptionsBuilder()
                        .setMetaAddress("10.218.220.83:9559")
                        .setGraphAddress("10.218.220.83:9669")
                        .build();
        NebulaStorageConnectionProvider storageConnectionProvider =
                new NebulaStorageConnectionProvider(nebulaClientOptions);

        NebulaGraphConnectionProvider graphConnectionProvider =
                new NebulaGraphConnectionProvider(nebulaClientOptions);
        NebulaMetaConnectionProvider metaConnectionProvider =
                new NebulaMetaConnectionProvider(nebulaClientOptions);
        MetaClient metaClient = null;
        try {
            metaClient = metaConnectionProvider.getMetaClient();
        } catch (TException e) {
            throw new IOException("get metaClient error, ", e);
        }
        Map<String, Integer> schema = metaConnectionProvider.getTagSchema(metaClient, "basketballplayer", "team");
        System.out.println(schema);
//        ExecutionOptions vertexExecutionOptions = new VertexExecutionOptions.ExecutionOptionBuilder()
//                .setGraphSpace("basketballplayer")
//                .setTag("team")
//                .setFields(Arrays.asList())
//                .setLimit(2)
//                .builder();
//
//        NebulaSourceFunction vertexSourceFunction = new NebulaSourceFunction(storageConnectionProvider)
//                .setExecutionOptions(vertexExecutionOptions);
//
//        DataStreamSource<BaseTableRow> vertexStreamSource = env.addSource(vertexSourceFunction);
//
//        DataStream<Row> vertexDataStream = vertexStreamSource.map(row -> {
//            List<ValueWrapper> values = row.getValues();
//            org.apache.flink.types.Row record = new Row(values.size());
//            for (int i = 0; i < values.size(); i++) {
//                record.setField(i, getValue(values.get(i)));
//            }
//            return record;
//        });
//        vertexDataStream.print();
//
//        ExecutionOptions addVertexExecutionOptions = new VertexExecutionOptions.ExecutionOptionBuilder()
//                .setGraphSpace("test")
//                .setTag("team")
//                .setIdIndex(0)
//                .setFields(Arrays.asList("name"))
//                .setPositions(Arrays.asList(1))
//                .setBatch(1)
//                .builder();
//        NebulaSinkFunction vertexSinkFunction = new NebulaSinkFunction(
//                new NebulaBatchOutputFormat(graphConnectionProvider, metaConnectionProvider)
//                        .setExecutionOptions(addVertexExecutionOptions));
//        vertexDataStream.addSink(vertexSinkFunction);


        ExecutionOptions edgeExecutionOptions = new EdgeExecutionOptions.ExecutionOptionBuilder()
                .setGraphSpace("basketballplayer")
                .setEdge("serve")
                .setFields(Arrays.asList())
                .setLimit(2)
                .builder();

        NebulaSourceFunction edgeSourceFunction = new NebulaSourceFunction(storageConnectionProvider)
                .setExecutionOptions(edgeExecutionOptions);

        DataStreamSource<BaseTableRow> edgeStreamSource = env.addSource(edgeSourceFunction);

        DataStream<Row> edgeDataStream = edgeStreamSource.map(row -> {
            List<ValueWrapper> values = row.getValues();
            org.apache.flink.types.Row record = new Row(values.size());
            for (int i = 0; i < values.size(); i++) {
                record.setField(i, getValue(values.get(i)));
            }
            return record;
        });
        edgeDataStream.print();

        ExecutionOptions addEdgeExecutionOptions = new EdgeExecutionOptions.ExecutionOptionBuilder()
                .setGraphSpace("test")
                .setEdge("serve")
                .setSrcIndex(0)
                .setDstIndex(1)
                .setRankIndex(2)
                .setFields(Arrays.asList("start_year", "end_year"))
                .setPositions(Arrays.asList(3, 4))
                .setBatch(2)
                .builder();
        NebulaSinkFunction edgeSinkFunction = new NebulaSinkFunction(
                new NebulaBatchOutputFormat(graphConnectionProvider, metaConnectionProvider)
                        .setExecutionOptions(addEdgeExecutionOptions));
//        edgeDataStream.addSink(edgeSinkFunction);


        env.execute("NebulaStreamSource");
    }

    public static Object getValue(ValueWrapper value) throws UnsupportedEncodingException {
        if (value.isLong()) {
            return value.asLong();
        } else if (value.isBoolean()) {
            return value.asBoolean();
        } else if (value.isDouble()) {
            return value.asDouble();
        } else if (value.isString()) {
            return value.asString();
        } else if (value.isTime()) {
            TimeWrapper timeWrapper = value.asTime();
            return timeWrapper.getLocalTimeStr();
        } else if (value.isDate()) {
            DateWrapper dateWrapper = value.asDate();
            return dateWrapper.toString();
        } else if (value.isDateTime()) {
            DateTimeWrapper dateTimeWrapper = value.asDateTime();
            return dateTimeWrapper.getUTCDateTimeStr();
        } else if (value.isVertex()) {
            return value.asNode();
        } else if (value.isEdge()) {
            return value.asRelationship();
        } else if (value.isPath()) {
            return value.asPath();
        } else if (value.isMap()) {
            return value.asMap();
        } else if (value.isEmpty() || value.isNull()) {
            return "";
        } else {
            return value.getValue();
        }
    }
}
