package org.apache.dolphinscheduler.api.service.impl;


import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.ExampleDataService;
import org.apache.dolphinscheduler.common.utils.HadoopUtils;
import org.apache.dolphinscheduler.common.utils.SparkSqlUtils;
import org.apache.dolphinscheduler.dao.entity.ExampleData;
import org.apache.dolphinscheduler.dao.entity.MemoryAddress;
import org.apache.dolphinscheduler.dao.mapper.ExampleDataMapper;
import org.apache.dolphinscheduler.service.memoryaddress.MemoryAddressService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.stereotype.Service;

import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;

import static org.apache.dolphinscheduler.common.Constants.DATA_LIST;

@Component
@Service
public class ExampleDataServicelmpl extends BaseServiceImpl implements ExampleDataService {

    private static final Logger logger = LoggerFactory.getLogger(ExampleDataServicelmpl.class);
    @Autowired
    private ExampleDataMapper exampleDataMapper;
    @Autowired
    private MemoryAddressService memoryAddressService;

    @Override
    public Map<String, Object> selectByIds(int id) {
        Map<String, Object> result = new HashMap<>();
        ExampleData exampleData = exampleDataMapper.selectByIds(id);
        if (null != exampleData) {
            String path = exampleData.getOutPath();
            String[] columns = SparkSqlUtils.getInstance().readParquet(path);
            exampleData.setColumns(columns);
            putMsg(result, Status.SUCCESS);
            result.put("data", exampleData);
        } else {
            putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS);
        }

        return result;
    }

    public Map<String, Object> selectByNodeId(int id, String tag) {
        Map<String, Object> result = new HashMap<>();
        ExampleData exampleData = new ExampleData();
        List<MemoryAddress> list = memoryAddressService.getOutPathByNodeId(id);
        for (int i = 0; i < list.size(); i++) {
            String tags = list.get(i).getTag();
            if (tags.equals(tag)) {
                String path = list.get(i).getOutPath();
                String[] columns = SparkSqlUtils.getInstance().readParquet(path);
                exampleData.setColumns(columns);
                putMsg(result, Status.SUCCESS);
                result.put("data", exampleData);
            } else {
                putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS);
            }
        }
        return result;
    }

    public Map<String, Object> selectList() {
        Map<String, Object> result = new HashMap<>();
        List<ExampleData> list = exampleDataMapper.selectList();
        if (null != list) {
            putMsg(result, Status.SUCCESS);
            result.put("data", list);
        } else {
            putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS);
        }
        return result;
    }

    public Map<String, Object> showResult(int nodeId) {
        Map<String, Object> returnResult = new HashMap<>();
        JSONObject result = new JSONObject();
        JSONArray jsonArray = new JSONArray();
        try {
            List<MemoryAddress> list = memoryAddressService.getOutPathByNodeId(nodeId);
            for (int i = 0; i < list.size(); i++) {
                String outPath = list.get(i).getOutPath();
                result = SparkSqlUtils.getInstance().showParquet(outPath);
                jsonArray.add(result);
            }
        } catch (Exception e) {
            logger.info(e.getMessage(), e);
        }


        if (null != result) {
            putMsg(returnResult, Status.SUCCESS);
            returnResult.put("data", jsonArray);
        } else {
            putMsg(returnResult, Status.INTERNAL_SERVER_ERROR_ARGS);
        }
        return returnResult;
    }

    @Override
    public Map<String, Object> getColumnsById(int dataSourceType, int dataSourceId) {
        Map<String, Object> result = new HashMap<>();
        if (dataSourceType != 1) {
            putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS);
            return result;
        }
        ExampleData exampleData = exampleDataMapper.selectByIds(dataSourceId);
        if(null == exampleData){
            putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS);
            return result;
        }
        try {
            Map<String,String> map= HadoopUtils.getInstance().readPartForSchema(exampleData.getOutPath());
            result.put(DATA_LIST, map);
            putMsg(result, Status.SUCCESS);
        } catch (Exception e) {
            e.printStackTrace();
        }
        return result;
    }

    public static void main(String[] args) {
        //SparkSql sparkSql= new SparkSql();
        String path = "D:\\data\\housing.parquet";
        SparkSqlUtils.getInstance().readParquet(path);
        //pathMap
    }


}
