import os
import pickle

from config.config import config
from utils.read import read_pkl
from utils.remote import get_ssh_client
import logger
from utils.write import write_pkl

log = logger.get_logger()

def main_data_filter():
    # 读取gt文件
    isLocalFile = config["input"]["is_local_file"]
    gtDataFilePath = config["input"]["gt_data_file_path"]
    gtDataFileName = os.path.split(gtDataFilePath)[1].split(".")[0]
    log.info("gt_data_file_path:%s" % gtDataFilePath)
    print("gt_data_file reading..........")
    if "false".__eq__(isLocalFile):
        hostname = config["input"]["hostname"]
        port = config["input"]["port"]
        username = config["input"]["username"]
        password = config["input"]["password"]

        client = get_ssh_client(hostname, port, username, password)
        # 获取sftp session
        sftpClient = client.open_sftp()
        with sftpClient.open(gtDataFilePath, "rb") as file:
            dataSet = pickle.load(file)
    else:
        # 读取文件
        dataSet = read_pkl(gtDataFilePath)
    log.debug(dataSet.index.name)
    log.info("gt_data_file read finished")

    # 列选择
    attrs = config["cluster"]["attrs"]
    print("===start data filter according to the attrs===")
    log.info(attrs)
    data = dataSet[attrs]
    print("===data filter according to the attrs finished===")
    # 将经过属性筛选的数据保存，便于进一步分析
    print("write filter result start....")
    projectPath = config["project_path"]
    dataDirPath = "%s/%s" % (projectPath, config["cluster"]["attrs_select_result_dir"])
    savePath = r'%s/%s.pkl' % (dataDirPath, gtDataFileName)
    print("属性筛选后结果存储路径：%s" % savePath)
    write_pkl(savePath, data)
    log.info("write filter result success")


if __name__ == '__main__':
    # 读取gt数据,选取列
    main_data_filter()
