package paas.storage.serviceTest;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import paas.storage.distributedColumnDatabase.IConnection;
import paas.storage.distributedColumnDatabase.IData;
import paas.storage.distributedColumnDatabase.IManagement;
import paas.storage.distributedColumnDatabase.ITable;
import paas.storage.result.CreateResponse;
import paas.storage.result.DataQueryResponse;
import paas.storage.result.ExportDataResponse;
import paas.storage.result.ImportDataResponse;
import paas.storage.util.JSONHelper;

import java.io.IOException;
import java.util.Map;

/**
 * 备注
 *
 * @author xufeng
 * @email 525207937@qq.com
 * @date 2021/2/2 17:11
 */
@Service
public class IManagementTest {
    private static Logger log = LoggerFactory.getLogger(IDataTest.class);

    @Autowired
    private IData iData;
    @Autowired
    private IConnection iConnection;
    @Autowired
    private IManagement iManagement;
    @Autowired
    private ITable iTable;
    /**
     * 当前项目目录下的exportfile 文件夹
     */
    private String path = "local://" + System.getProperty("user.dir") + "exportfile";


    private String localSrcCsv = path + "/test.csv";
    private String localDestCsv = path + "/local.csv";
    private String locaDestOrc = path + "/local.orc";
    private String locaDestParquet = path+"/local.parquet";

    private String hdfsDestCsv = "dfs://doushabao12/export_test.csv";
    private String hdfsDestOrc = "dfs://doushabao12/export_test.orc";
    private String hdfsDestParquet = "dfs://doushabao12/export_test.parquest";


    /**
     * 总体测试 中文指定 gbk格式
     * 2.从本地导入数据到hbase
     * 3.测试从hbase导出到本地csv
     * 4.测试从hbase导出到本地orc
     * 5.测试从hbase导出到本地Parquet
     * <p>
     * 6.测试从hbase导出到hdfs csv
     * 7.测试从hbase导出到hdfs orc
     * 8.测试从hbase导出到hdfs Parquet
     * <p>
     * 9.测试从hdfs导入到hbase csv
     * 10.测试从hdfs导入到hbase orc
     * 11.测试从hdfs导入到hbase Parquet
     * <p>
     * windows运行 没有hadoop环境会报错 (null) entry in command string: null chmod 0644
     */
    public void all() throws IOException {
        //创建连接
        Map map = (Map) JSONHelper.ResolveJsonFileToObject("test.json");
        CreateResponse createResponse = iConnection.create("", map.get("token").toString(), map.get("config").toString());
        //618532e7-e1f3-4ffd-83d0-3a23c56318bb
        String connectionId = createResponse.getConnectionId();
        Map<String, Object> importTable = (Map<String, Object>) map.get("importTable");

        //1.导入的表
        String database = importTable.get("database").toString();
        String table = importTable.get("table").toString();
        String columnFamilyList = "";
        //输出到当前目录
        //2.先导入一批数据方便导出其他格式  windows磁盘 这样 local://D:/local.csv
        ImportDataResponse importDataResponse = iManagement.importData(connectionId, database, table, "CSV", localSrcCsv);
        log.info("导入csv数据" + importDataResponse.toString());


        //3.测试从hbase导出到本地csv
        ExportDataResponse exportDataResponse = iManagement.exportData(connectionId, database, table, "CSV", localDestCsv);
        log.info(exportDataResponse.toString());

        //3.测试从hbase导出到本地ORC
        ExportDataResponse exportDataResponse1 = iManagement.exportData(connectionId, database, table, "ORC", locaDestOrc);
        log.info(exportDataResponse1.toString());
        //4.测试从hbase导出到本地Parquet
        ExportDataResponse exportDataResponse2 = iManagement.exportData(connectionId, database, table, "Parquet", locaDestParquet);
        log.info(exportDataResponse2.toString());


        //5.测试从hbase导出到hdfs csv ok
        ExportDataResponse exportDataResponse3 = iManagement.exportData(connectionId, database, table, "csv", hdfsDestCsv);
        log.info(exportDataResponse3.toString());

        //6.测试从hbase导出到hdfs orc ok
        ExportDataResponse exportDataResponse4 = iManagement.exportData(connectionId, database, table, "orc", hdfsDestOrc);
        log.info(exportDataResponse4.toString());

        //7.测试从hbase导出到hdfs Parquet ok
        ExportDataResponse exportDataResponse5 = iManagement.exportData(connectionId, database, table, "Parquet", hdfsDestParquet);
        log.info(exportDataResponse5.toString());

        //9.测试从hdfs导入到hbase csv
        iManagement.importData(connectionId, database, table, "csv", hdfsDestCsv);
        String conditions = "{}";
        columnFamilyList = "[\"user:username\",\"user:phone\",\"user:addr\"]";
        ;
        log.info("根据过滤器筛选查询参数{},{},", conditions, columnFamilyList);
        DataQueryResponse response3 = iData.query(connectionId, database, table, conditions, "", "", columnFamilyList, "");
        log.info("根据过滤器筛选查询结果{},", response3.toString());

        //10.测试从hdfs导入到hbase orc
        iManagement.importData(connectionId, database, table, "orc", hdfsDestOrc);
        log.info("根据过滤器筛选查询参数{},{},", conditions, columnFamilyList);
        DataQueryResponse response4 = iData.query(connectionId, database, table, conditions, "", "", columnFamilyList, "");
        log.info("根据过滤器筛选查询结果{},", response4.toString());
        //11.测试从hdfs导入到hbase Parquet

        log.info("根据过滤器筛选查询参数{},{},", conditions, columnFamilyList);
        DataQueryResponse response5 = iData.query(connectionId, database, table, conditions, "", "", columnFamilyList, "");
        log.info("根据过滤器筛选查询结果{},", response5.toString());


        //从本地导入orc数据到hbase
        ImportDataResponse importDataResponse1 = iManagement.importData(connectionId, database, table, "orc", locaDestOrc);
        DataQueryResponse response6 = iData.query(connectionId, database, table, conditions, "", "", columnFamilyList, "");
        log.info("查询本地orc导入数据" + response6.toString());
        //从本地导入orc数据到hbase
        ImportDataResponse importDataResponse2 = iManagement.importData(connectionId, database, table, "Parquet", locaDestParquet);
        log.info("查询本地prc导入数据" + importDataResponse2.toString());
        DataQueryResponse response7 = iData.query(connectionId, database, table, conditions, "", "", columnFamilyList, "");
        log.info("根据过滤器筛选查询结果{},", response7.toString());
        log.info("因为数据是相同的,看三次返回的版本号都不同.说明导入成功");

    }

}
