package com.hh.xx.hive;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hive.service.cli.ICLIService;
import org.apache.hive.service.cli.OperationHandle;
import org.apache.hive.service.cli.RowSet;
import org.apache.hive.service.cli.SessionHandle;
import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService;
import org.apache.hive.service.cli.thrift.ThriftCLIServiceClient;

import java.util.Arrays;

/**
 *
 * add hadoop-hdfs dependencies to pom.xml
 */
public class HiveCliService {
    protected static String USERNAME = "anonymous";
    protected static String PASSWORD = "anonymous";

    public static void main(String[] args) throws Exception {
        HiveConf hiveConf = new HiveConf();
        hiveConf.addResource("/Users/tzp/Documents/env/apache-hive-3.1.2-bin/conf/hive-site.xml");
        hiveConf.set("fs.defaultFS", "hdfs://localhost:9000");

        EmbeddedThriftBinaryCLIService service = new EmbeddedThriftBinaryCLIService();
        service.init(hiveConf);
        ICLIService icliService = service.getService();
        //如下生成client, 将上述icliService换成cliServiceClient也可以用
        //ThriftCLIServiceClient cliServiceClient = new ThriftCLIServiceClient(service);

        SessionHandle sessionHandle = icliService.openSession(USERNAME, PASSWORD, null);

        OperationHandle operationHandle = icliService.executeStatement(sessionHandle, "select * from u_data_ex limit 2", null);
        RowSet results = icliService.fetchResults(operationHandle);

        for (Object[] result : results) {
            System.out.println(Arrays.asList(result));
        }

    }

}