package com.navinfo.platform.crosscity.service;

import com.navinfo.platform.common.service.ICommonService;
import com.navinfo.platform.common.service.ILoadDataChannel;
import com.navinfo.platform.common.service.ILoadDataService;
import com.navinfo.platform.common.service.impl.LoadDataFromHive;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

import java.io.File;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.Collections;
import java.util.List;
import java.util.Map;

public enum CrossCityLoadDataService implements ILoadDataService {

    INS;
    private ILoadDataChannel hiveChannel = LoadDataFromHive.INSTANCE;
    @Override
    public List<Object> loadData(SparkSession spark, Map<String, String> configMap) {
        String sql = configMap.get("hdfs.data.hql");
        System.out.println("执行的sql为："+sql);
        Dataset<Row> basicDataInfo =  hiveChannel.readData(spark, sql);
        return Collections.singletonList(basicDataInfo);
    }
}
