package com.zyh.flink.day05;

import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.state.ReducingState;
import org.apache.flink.api.common.state.ReducingStateDescriptor;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.queryablestate.client.QueryableStateClient;

import java.util.concurrent.CompletableFuture;

public class StateQueryableClientTest {
    public static void main(String[] args) throws Exception {
        //1 创建client对象
        //hadoop10配置的是taskManager的ip,不是jobManager的ip
        QueryableStateClient client = new QueryableStateClient("hadoop10", 9069);
        //2 准备参数
            //a 作业在flink集群上的id
        JobID jobID = JobID.fromHexString("82b93343ef77f5f45df63520c71a53da");
        //b 状态的可查询名
        String queryName = "queryable-word-count";
            //c 查询的是哪个key的状态值
        String key = "hello";
        TypeInformation<String> keyInformation = Types.STRING;
        //d 确定要查询的状态的状态描述符,和flink作业中的状态描述符相同
        ReducingStateDescriptor<Integer> stateDescriptor = new ReducingStateDescriptor<Integer>("wordcount",(v1, v2) -> v1 + v2, Types.INT);
        //3 发起请求
        CompletableFuture<ReducingState<Integer>> kvState = client.getKvState(jobID, queryName, key, keyInformation, stateDescriptor);
        ReducingState<Integer> reducingState = kvState.get();

        Integer count = reducingState.get();
        System.out.println("count = " + count);
    }
}
