import os
import sys
import threading
import time
from collections import Counter, defaultdict
from typing import Optional

from fastapi import APIRouter, Form, UploadFile, File

from src.application.vo import ResponseVO, HostVO, SourceVO, ColumnVO, DatasetVO, PrivateCRUDVO
from src.database.config import DataSet, Source, Machine, Column, DataDict
from src.database.dao import push_to_neo4j, update_to_neo4j, private_crud, deep_copy_neo4j

data_path = "../../data/"

router = APIRouter(
    prefix='/users',
    tags=['private']
)


# TODO: 增加鉴权
# TODO: 增加数据集同名时的提示，增加字段表与上传文件不匹配时的提示
# FIXME: 数据集、主机、数据源的名称中不能包含空格，或增加单引号等转义
# FIXME: 数据集、主机、数据源的名称中的特殊字符应该转义，避免XSS
# FIXME: 数据集、主机、数据源的名称不允许重复


# 比较两个文件并把结果生成json
def compare_file(file1, file2, offset):
    if file1 == "" or file2 == "":
        print('file path cannot be empty：first file path：{0}, second file path：{1}'.format(file1, file2))
        sys.exit()

    print("comparing  {0} and {1}".format(file1, file2))
    # filename2_list = str(file2).split("/")
    # arg3_list = filename2_list[6].split(".")
    # version = arg3_list[0]
    # print(version)
    counts = Counter()  # hold counts of each first four value pairs
    data = defaultdict(list)  # hold data format 
    changedata = {}
    # dic_1 = changedata.setdefault('add', {})
    # dic_2 = changedata.setdefault('label', {})
    # dic_3 = changedata.setdefault('version', version)
    # dic_4 = changedata.setdefault('dataset', filename2_list[3])
    # dic_5 = changedata.setdefault('machine', filename2_list[4])
    # dic_6 = changedata.setdefault('source', filename2_list[5])
    with open(file1, "r") as f_tsv:
        for row in f_tsv:
            split = list(map(str.strip, row.split('\t')))
            key = tuple(split[:4])  # time,start,end,relation作为键
            rowdata = split[:]

            if key == tuple(['time', 'start', 'end', 'relation_type']):
                data['format'].append(rowdata)
            else:
                counts[key] += 1
            # data[file1].append((key, rowdata))
    add_cnt = 0
    label_cnt = 0
    with open(file2) as f_tsv:
        for row in f_tsv:
            split = list(map(str.strip, row.split('\t')))
            key = tuple(split[:4])  # time,start,end,relation作为键
            counts[key] += 1
            # rowdata = split[:]

            if counts[key] == 1 and key != tuple(['time', 'start', 'end', 'relation_type']):
                add_cnt = add_cnt + 1
                # add_idx = "add_" + str(add_cnt)
                # dic_1_0 = dic_1.setdefault(add_idx, {})
                # for i in range(len(rowdata)):
                # format_list = data['format']

                # dic_2_0 = dic_1_0.setdefault(format_list[0][i], rowdata[i])

                # changedata['add'].append(rowdata)
            if key == tuple(['time', 'start', 'end', 'relation_type']):
                continue
            label_cnt = label_cnt + 1
            # label_idx = "label_" + str(label_cnt)
            # dic_1_2 = dic_2.setdefault(label_idx, {})

            # for i in range(len(rowdata)):
            # format_list = data['format']

            # dic_2_2 = dic_1_2.setdefault(format_list[0][i], rowdata[i])

            # changedata['label'].append(rowdata)
    th = threading.Thread(target=update_to_neo4j, args=(changedata, offset))
    th.start()


# 文件上传
@router.post("/{username}/file_upload")
async def file_upload(username: str, offset: Optional[float] = Form(None), file: UploadFile = File(...)):
    print(username)
    # FIXME: 需要判断是否本人上传，总不能上传到别人的私有数据集里吧，目前先用用户名凑合一下，按理来说应该是token
    #
    # 如果为公开数据集，不允许上传，并按先前的格式在message中说明拒绝理由（cannot modify public dataset）
    # 只允许在数据集为私有时进行上传
    #
    # 上传文件时需要进行判断
    # 1. 如果对方本次请求提供了offset，使用本次提供的offset
    # 2. 如果对方本次请求未提供offset，查询对方主机是否已提供过offset
    #   2.1 如有预先提供的offset，使用预先提供的offset
    #   2.2 如无预先提供的offset，拒绝该请求，并按先前的格式在message中说明拒绝理由（no offset）
    start = time.time()
    try:
        res = await file.read()

        path_str = str(file.filename).replace("-", "/")
        if path_str.count("/") != 3 or not path_str.endswith(".tsv"):
            return {"message": "wrong filename", "time": time.time() - start, "filename": file.filename}
        dir_str = path_str[0:path_str.rindex("/")]
        path_str = data_path + path_str
        dir_str = data_path + dir_str
        if not os.path.exists(dir_str):
            return {"message": "not exists: dataset or machine or source", "time": time.time() - start,
                    "filename": file.filename, "path_str": path_str}

        dataset_name = file.filename.split('-')[0]
        d = DataSet(dataset_name)
        d = d.load_from_config()

        # 必须上传到具有权限的数据集中
        # CHECKME: 此处的逻辑需要替换成token解析等等
        # 似乎还没有增删合作者的操作？
        is_owner = username == d.owner
        is_collaborator = hasattr(d, 'collaborators') and username in d.collaborators
        if not is_owner and not is_collaborator:
            return {"message": "permission denied", "time": time.time() - start, "filename": file.filename}

        # 判断是否为公开数据集，若是，则不能上传，返回错误
        if hasattr(d, 'is_public') and d.is_public:
            return {"message": "cannot modify public dataset", "time": time.time() - start, "filename": file.filename}

        with open(path_str, "wb") as f:
            print("write file")
            print(path_str)
            f.write(res)

        # 计算offset的值
        # print(path_str)
        if offset is None:
            dataset_name, host_name, src_name, file_name = file.filename.split('-')
            dataset = DataSet(dataset_name).load_from_config()

            offset_ = None
            if hasattr(dataset.machines[host_name], 'offset'):
                offset_ = dataset.machines[host_name].offset
            if offset_ is None:
                return {
                    "message": "no offset has been provided for your host",
                    "time": time.time() - start,
                    "filename": file.filename
                }
            offset = offset_

        # namelist = path_str.split("/")
        # fileversion = namelist[-1]
        # print(fileversion)
        # versionidlist = list(filter(str.isdigit, str(fileversion)))
        # versionstr = versionidlist[0]

        # versionid = int(versionidlist[0])

        # if versionid==1:#第一次上传此类文件

        arg_list = str(file.filename).split("-")
        # 在存入数据库的时候进行偏移量计算
        th = threading.Thread(target=push_to_neo4j, args=(path_str, offset, arg_list[0], arg_list[1], arg_list[2],))
        th.start()
        # else :#需要compare
        #     file1=path_str
        #     lastversion=fileversion.replace(str(versionid),str(versionid-1))
        #     file2=path_str.replace(fileversion,lastversion)
        #     compare_file(file2,file1,offset)

        return {"message": "success", "time": time.time() - start, "filename": file.filename}
    except Exception as e:
        print(e)
        return {"message": str(e), "time": time.time() - start, "filename": file.filename}


# 提供数据增删接口
@router.post('/{username}/privatecrud')
async def crud_private_datasets(username: str, req: PrivateCRUDVO):
    offset = req.offset
    crud = req.crud
    start = time.time()

    dataset = crud['dataset_name']
    host = crud['host_name']
    source = crud['source_name']
    delete_relation = None
    delete_node = None
    add = None

    if 'delete_relation' in crud.keys():
        delete_relation = crud['delete_relation']

    if 'add' in crud.keys():
        add = crud['add']
    if 'delete_node' in crud.keys():
        delete_node = crud['delete_node']

    d = DataSet(dataset)
    try:
        d = d.load_from_config()
    except FileNotFoundError:
        return ResponseVO(404, "Dataset Not Found.")

    # 似乎还没有增删合作者的操作？
    if (hasattr(d, 'is_public') and d.is_public) or d.owner != username:
        # 数据集为共有数据集或者数据集不为该用户所有
        return ResponseVO(702, "Illegal operation.")
    # todo:auth check
    if offset is None:

        dataset = DataSet(dataset).load_from_config()

        offset_ = None
        if hasattr(dataset.machines[host], 'offset'):
            offset_ = dataset.machines[host].offset
        if offset_ is None:
            return {
                "message": "no offset has been provided for your host",
                "time": time.time() - start,
                "dataset": dataset,
                "host": host,
                "source": source
            }
        offset = offset_

    try:

        th = threading.Thread(target=private_crud,
                              args=(offset, add, delete_relation, delete_node, dataset, host, source,))
        th.start()

        return {"message": "success", "time": time.time() - start, "dataset": dataset, "host": host, "source": source}
    except Exception as e:
        print(e)
        return {"message": str(e), "time": time.time() - start, "dataset": dataset, "host": host, "source": source}


# 数据集
@router.get('/{username}/datasets')
async def get_private_datasets(username: str):
    d = DataDict()
    d = d.load_from_config()
    previous = d.data
    current = dict()
    for k, v in previous.items():
        if v['owner'] == username and ('is_public' in v and v['is_public'] is False):
            current[k] = v
    d.data = current
    return d.__dict__


@router.get('/{username}/datasets/{dataset_name}')
async def get_private_dataset(username: str, dataset_name: str):
    d = DataSet(dataset_name)
    d = d.load_from_config()
    if (hasattr(d, 'is_public') and d.is_public) or d.owner != username:
        # 数据集为共有数据集或者数据集不为该用户所有
        return ResponseVO(702, "Illegal operation.")
    return d.__dict__


@router.post('/{username}/datasets/{dataset_name}')
async def fork_public_dataset(username: str, dataset_name: str):
    # TODO: 如何确保fork到的是自己的数据集，而不fork到别人的数据集中？个人觉得可以考虑使用username+token的模式
    # 从公开数据集中fork到私有数据集，相当于在私有数据集中“创建”一个指定的数据集，所以把接口放到了这里
    # 处理配置文件
    d: DataSet = DataSet(dataset_name)
    d = d.load_from_config()
    if hasattr(d, 'is_public') and d.is_public is False:
        # 数据集不是共有数据集
        return ResponseVO(702, "Illegal operation.")
    if hasattr(d, 'is_public'):
        d.is_public = False
    if hasattr(d, 'version_id'):
        d.version_id = 0
    d.set_name = d.set_name + '_' + username
    # fork的同时也要转移所有权
    d.owner = username
    d.save_to_config()

    # 增加主机、数据源目录
    # FIXME: 直接使用斜杠拼接可能会出现兼容性问题
    for machine in d.machines:
        host_path = data_path + d.set_name + "/" + machine
        if not os.path.exists(host_path):
            os.mkdir(host_path)
        for source in d.machines[machine].sources:
            source_path = data_path + d.set_name + "/" + machine + "/" + source
            if not os.path.exists(source_path):
                os.mkdir(source_path)

    # 对图数据的create操作
    pre_dataset = dataset_name
    cur_dataset = d.set_name
    for pre_machine in d.machines:
        for pre_source in d.machines[pre_machine].sources:
            th = threading.Thread(target=deep_copy_neo4j, args=(pre_dataset, pre_machine, pre_source,
                                                                cur_dataset, pre_machine, pre_source))
            th.start()

    return ResponseVO(200, "Successfully fork.")


# TODO: 调整这里的 URL，这里的 URL 不符合语义，应为鉴权 + POST /datasets/{dataset_name}
@router.post('/{username}/datasets/{dataset_name}/pub')
async def publish_private_datasets(username: str, dataset_name: str):
    # 发布
    d: DataSet = DataSet(dataset_name)
    d = d.load_from_config()
    if (hasattr(d, 'is_public') and d.is_public) or d.owner != username:
        # 数据集为共有数据集或者数据集不为该用户所有
        return ResponseVO(702, "Illegal operation.")
    # 对原有数据集配置信息做修改
    if not hasattr(d, 'version_id'):
        d.version_id = 0
    d.version_id += 1
    d.save_to_config()

    # 对新数据集配置信息做修改并发布
    if hasattr(d, 'is_public'):
        d.is_public = True
    d.set_name = d.set_name + '_' + 'v' + str(d.version_id)
    d.save_to_config()

    # 增加主机、数据源目录
    # FIXME: 直接使用斜杠拼接可能会出现兼容性问题
    for machine in d.machines:
        host_path = data_path + d.set_name + "/" + machine
        if not os.path.exists(host_path):
            os.mkdir(host_path)
        for source in d.machines[machine].sources:
            source_path = data_path + d.set_name + "/" + machine + "/" + source
            if not os.path.exists(source_path):
                os.mkdir(source_path)

    # 对图数据的create操作
    pre_dataset = dataset_name
    cur_dataset = d.set_name
    for pre_machine in d.machines:
        for pre_source in d.machines[pre_machine].sources:
            th = threading.Thread(target=deep_copy_neo4j, args=(pre_dataset, pre_machine, pre_source,
                                                                cur_dataset, pre_machine, pre_source))
            th.start()

    return ResponseVO(200, "Successfully publish.")


@router.post('/{username}/datasets')
async def add_private_dataset(username: str, dataset: DatasetVO):
    # 在DatasetVO的基础上，需要生成 lastUpdate（更新时间）
    if dataset.is_public or dataset.owner != username:
        # 数据集为共有数据集或者数据集不为该用户所有
        return ResponseVO(702, "Illegal operation.")

    # 检查数据集同名的情况
    all_datasets_obj = DataDict().load_from_config()
    if dataset.name in all_datasets_obj.data:
        return ResponseVO(409, "Dataset name exists.")

    d = DataSet(dataset.name, dataset.owner, dataset.description, dataset.collaborators, dataset.is_public)
    d.save_to_config()
    return ResponseVO(200, "Successfully added.")


# 主机
@router.get("/{username}/hosts")
async def get_private_hosts(username: str, dataset_name: str):
    d = DataSet(dataset_name)
    d = d.load_from_config()
    if (hasattr(d, 'is_public') and d.is_public) or d.owner != username:
        # 数据集为共有数据集或者数据集不为该用户所有
        return ResponseVO(702, "Illegal operation.")
    return d.machines


@router.get("/{username}/hosts/{host_name}")
async def get_private_host(username: str, host_name: str, dataset_name: str):
    d = DataSet(dataset_name)
    d = d.load_from_config()
    if (hasattr(d, 'is_public') and d.is_public) or d.owner != username:
        # 数据集为共有数据集或者数据集不为该用户所有
        return ResponseVO(702, "Illegal operation.")
    return d.machines[host_name].__dict__


def remove_dataset_name(host: HostVO) -> dict:
    return {
        key: val
        for key, val in host.__dict__.items() if key != 'dataset_name'
    }


@router.put("/{username}/hosts/{host_name}")
async def update_private_host(username: str, host_name: str, updated_host: HostVO):
    # 对保存的host信息进行更新
    d = DataSet(updated_host.dataset_name)
    d = d.load_from_config()
    if (hasattr(d, 'is_public') and d.is_public) or d.owner != username:
        # 数据集为共有数据集或者数据集不为该用户所有
        return ResponseVO(702, "Illegal operation.")
    d.machines[host_name].modify(**remove_dataset_name(updated_host))
    d.save_to_config()
    return ResponseVO(200, "Successfully updated.")


@router.post("/{username}/hosts")
async def add_private_host(username: str, host: HostVO) -> ResponseVO:
    # HostVO和Machine的结构一致
    d = DataSet(host.dataset_name)
    d = d.load_from_config()
    if (hasattr(d, 'is_public') and d.is_public) or d.owner != username:
        # 数据集为共有数据集或者数据集不为该用户所有
        return ResponseVO(702, "Illegal operation.")
    d.machines[host.name] = Machine(**remove_dataset_name(host))
    d.save_to_config()

    # 检查主机同名的情况
    # FIXME: 直接使用斜杠拼接可能会出现兼容性问题
    host_path = data_path + host.dataset_name + "/" + host.name
    if os.path.exists(host_path):
        return ResponseVO(409, "Host name exists.")

    os.mkdir(host_path)
    return ResponseVO(200, "Successfully added.")


# 数据源
@router.get("/{username}/sources")
async def get_private_sources(username: str, dataset_name: str, host_name: str):
    d = DataSet(dataset_name)
    d = d.load_from_config()
    if (hasattr(d, 'is_public') and d.is_public) or d.owner != username:
        # 数据集为共有数据集或者数据集不为该用户所有
        return ResponseVO(702, "Illegal operation.")
    return d.machines[host_name].sources


@router.get("/{username}/sources/{source_name}")
async def get_private_source(username: str, source_name: str, dataset_name: str, host_name: str):
    d = DataSet(dataset_name)
    d = d.load_from_config()
    if (hasattr(d, 'is_public') and d.is_public) or d.owner != username:
        # 数据集为共有数据集或者数据集不为该用户所有
        return ResponseVO(702, "Illegal operation.")
    return d.machines[host_name].sources[source_name].__dict__


@router.post("/{username}/sources")
async def add_private_source(username: str, source: SourceVO) -> ResponseVO:
    # 在SourceVO的基础上，需要生成 lastUpdate（更新时间）
    d = DataSet(source.dataset_name)
    d = d.load_from_config()
    if (hasattr(d, 'is_public') and d.is_public) or d.owner != username:
        # 数据集为共有数据集或者数据集不为该用户所有
        return ResponseVO(702, "Illegal operation.")
    d.machines[source.host_name].sources[source.name] = Source(source.name, source.description)
    d.save_to_config()

    # 检查数据源同名的情况
    # FIXME: 直接使用斜杠拼接可能会出现兼容性问题
    source_path = data_path + source.dataset_name + "/" + source.host_name + "/" + source.name
    if os.path.exists(source_path):
        return ResponseVO(409, "Source name exists.")

    os.mkdir(source_path)
    return ResponseVO(200, "Successfully added.")


# 字段
@router.get("/{username}/columns")
async def get_private_columns(username: str, dataset_name: str, host_name: str, source_name: str):
    d = DataSet(dataset_name)
    d = d.load_from_config()
    if (hasattr(d, 'is_public') and d.is_public) or d.owner != username:
        # 数据集为共有数据集或者数据集不为该用户所有
        return ResponseVO(702, "Illegal operation.")
    return d.machines[host_name].sources[source_name].columns


@router.post("/{username}/columns")
async def add_private_column(username: str, column: ColumnVO) -> ResponseVO:
    d = DataSet(column.dataset_name)
    d = d.load_from_config()
    if (hasattr(d, 'is_public') and d.is_public) or d.owner != username:
        # 数据集为共有数据集或者数据集不为该用户所有
        return ResponseVO(702, "Illegal operation.")
    d.machines[column.host_name].sources[column.source_name].columns[column.name] = Column(column.name,
                                                                                           column.dataset_name,
                                                                                           column.host_name,
                                                                                           column.source_name,
                                                                                           column.type,
                                                                                           column.annotation)
    d.save_to_config()
    return ResponseVO(200, "Successfully added.")


@router.delete("/{username}/columns/{column_name}")
async def del_private_column(username: str, column_name: str, dataset_name: str, host_name: str,
                             source_name: str) -> ResponseVO:
    print(column_name)
    d = DataSet(dataset_name)
    d = d.load_from_config()
    if (hasattr(d, 'is_public') and d.is_public) or d.owner != username:
        # 数据集为共有数据集或者数据集不为该用户所有
        return ResponseVO(702, "Illegal operation.")
    del d.machines[host_name].sources[source_name].columns[column_name]
    d.save_to_config()
    return ResponseVO(200, "Successfully deleted.")
