package avicit.bdp.dcs.datasource.service;

import avicit.bdp.common.utils.database.ColumnInfo;
import avicit.bdp.dcs.stream.dto.MeasureParaDto;
import avicit.bdp.dcs.stream.service.MsgTemplateService;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;

/**
 * @author liyb
 * 数据查询服务类
 */
@Service
public class DatasourceQueryService {

    private static final Logger logger = LoggerFactory.getLogger(DatasourceQueryService.class);

    @Autowired
    private MsgTemplateService msgTemplateService;

    /**
     * 获取指定目录下文件的第一行，即设备数据文件的schema
     *
     * @param fs         hdfs
     * @param folderPath path
     * @return 数据文件的schema
     * @throws IOException io exception
     */
    private static String getFileFirstLine(FileSystem fs, Path folderPath,
                                           boolean findFileFlag) throws IOException {

        String firstLine = "";
        if (fs.exists(folderPath)) {
            FileStatus[] fileStatus = fs.listStatus(folderPath);
            for (int i = 0; i < fileStatus.length && !findFileFlag; i++) {
                FileStatus file = fileStatus[i];
                if (file.isDirectory()) {
                    Path oneFilePath = file.getPath();
                    firstLine = getFileFirstLine(fs, oneFilePath, findFileFlag);
                    if (StringUtils.isNotBlank(firstLine)) {
                        return firstLine;
                    }
                } else {
                    // 读取文件第一行，获取文件schema；
                    FSDataInputStream input = fs.open(file.getPath());
                    firstLine = new BufferedReader(new InputStreamReader(input)).readLine();
                    findFileFlag = true;
                    return firstLine;
                }
            }
        }
        return firstLine;
    }

    /**
     * 根据kafka模板id获取列信息
     * @param templateId
     * @return 列
     */
    public List<ColumnInfo> getKafkaTemplateColumnList(String templateId) {
        List<ColumnInfo> fullColumnList = new ArrayList<>();
        List<MeasureParaDto> measureParaDtoList = msgTemplateService.getParasByTemplateId(templateId, null);
        if (CollectionUtils.isNotEmpty(measureParaDtoList)) {
            for (MeasureParaDto measureParaDto : measureParaDtoList) {
                ColumnInfo columnInfo = new ColumnInfo();
                columnInfo.setName(measureParaDto.getName());
                columnInfo.setType(measureParaDto.getType().getType());
                columnInfo.setComment(measureParaDto.getRemark());
                fullColumnList.add(columnInfo);
            }
        }
        return fullColumnList;
    }

}
