# -*- coding = utf-8 -*-
# @Time : 2022/11/17 11:12
# @Author : GHHHHHHHHH
# @File : ViewService.py
# @Software : PyCharm
import os
import time
import cv2
from utils import JsonUtils
from dao import ViewDao
from werkzeug.datastructures import FileStorage
import numpy as np


def handleTrainLog(data):
    _id = data['id']
    log = ViewDao.get_train_log_by_id(_id)
    assert len(log) == 1
    log = JsonUtils.open_json(log[0]['file_path'])
    return log


def handleLayerLog(data):
    _id = data['id']
    log = ViewDao.get_layer_log_by_id(_id)
    assert len(log) == 1
    log = JsonUtils.open_json(log[0]['file_path'])
    return log


def handlePreLog(data):
    _id = data['id']
    log = ViewDao.get_pre_log_by_id(_id)
    assert len(log) == 1
    log = JsonUtils.open_json(log[0]['file_path'])
    return log


def get_all_layer_log(uuid):
    logs = ViewDao.get_all_layer_logs(uuid)
    assert logs is not None
    ans = []
    for log in logs:
        temp = JsonUtils.open_json(log["file_path"])
        temp["layer_id"] = log['id']
        temp['create_time'] = log['create_time']
        ans.append(temp)
    return ans


def get_all_train_log(uuid):
    logs = ViewDao.get_all_train_logs(uuid)
    assert logs is not None
    ans = []
    for log in logs:
        temp = JsonUtils.open_json(log["file_path"])
        temp["train_id"] = log["id"]
        temp['create_time'] = log['create_time']
        ans.append(temp)
    return ans


def get_all_pre_log(uuid):
    logs = ViewDao.get_all_pre_logs(uuid)
    assert logs is not None
    ans = []
    for log in logs:
        temp = JsonUtils.open_json(log["file_path"])
        temp['create_time'] = log['create_time']
        temp["pre_id"] = log["id"]
        ans.append(temp)
    return ans


def save_layer_log(log):
    assert "uuid" in log.keys()
    uuid = str(log['uuid'])
    abs_url = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
    if not os.path.exists(abs_url + '/static/' + f"{uuid}"):
        os.mkdir(abs_url + '/static/' + f"{uuid}")
    if not os.path.exists(abs_url + '/static/' + f"{uuid}" + '/layerLog'):
        os.mkdir(abs_url + '/static/' + f"{uuid}" + '/layerLog')
    log_url = os.path.join(abs_url + '/static/' + f"{uuid}" + '/layerLog')
    s = time.strftime(log_url + '/%Y%m%d-%H%M%S.json', time.localtime(time.time()))
    FileStorage(log['data']).save(s)
    return 1 if ViewDao.save_layer_log(uuid, s) else 0


def save_pre_log(log):
    assert "uuid" in log.keys()
    uuid = str(log['uuid'])
    abs_url = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
    if not os.path.exists(abs_url + '/static/' + f"{uuid}"):
        os.mkdir(abs_url + '/static/' + f"{uuid}")
    if not os.path.exists(abs_url + '/static/' + f"{uuid}" + '/preLog'):
        os.mkdir(abs_url + '/static/' + f"{uuid}" + '/preLog')
    log_url = os.path.join(abs_url + '/static/' + f"{uuid}" + '/preLog')
    s = time.strftime(log_url + '/%Y%m%d-%H%M%S pre.json', time.localtime(time.time()))
    FileStorage(log['data']).save(s)
    log = JsonUtils.open_json(s)
    src = '/www/wwwroot/http/preImages'
    for i in range(len(log['nodes'])):
        if [log['nodes'][i]['image']] == [None]:
            continue
        time_str = time.strftime('%Y%m%d-%H%M%S', time.localtime(time.time()))
        url = src + '/' + time_str + '-' + str(i) + '.jpg'
        image = np.array([log['nodes'][i]['image']])
        cv2.imwrite(url, np.asanyarray(image, dtype=np.uint8)[0])
        log['nodes'][i]['image'] = 'http://43.140.221.237:9999/preImages/' + time_str + '-' + str(i) + '.jpg'
    JsonUtils.save_json(s, log)
    return 1 if ViewDao.save_pre_log(uuid, s) else 0


def save_train_log(log):
    assert "uuid" in log.keys()
    uuid = str(log['uuid'])
    abs_url = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
    if not os.path.exists(abs_url + '/static/' + f"{uuid}"):
        os.mkdir(abs_url + '/static/' + f"{uuid}")
    if not os.path.exists(abs_url + '/static/' + f"{uuid}" + '/trainLog'):
        os.mkdir(abs_url + '/static/' + f"{uuid}" + '/trainLog')
    log_url = os.path.join(abs_url + '/static/' + f"{uuid}" + '/trainLog')
    s = time.strftime(log_url + '/%Y%m%d-%H%M%S.json', time.localtime(time.time()))
    FileStorage(log['data']).save(s)
    return 1 if ViewDao.save_train_log(uuid, s) else 0


def deleteTrainLog(_id):
    log = ViewDao.get_train_log_by_id(_id)
    assert len(log) == 1
    log = log[0]
    os.remove(log['file_path'])
    return 1 if ViewDao.delete_train_log_by_id(_id) else 0


def deleteLayerLog(_id):
    log = ViewDao.get_layer_log_by_id(_id)
    assert len(log) == 1
    log = log[0]
    os.remove(log['file_path'])
    return 1 if ViewDao.delete_layer_log_by_id(_id) else 0


def deletePreLog(_id):
    log = ViewDao.get_pre_log_by_id(_id)
    assert len(log) == 1
    log = log[0]
    _log = JsonUtils.open_json(log['file_path'])
    src = '/www/wwwroot/http/preImages/'
    for item in _log['nodes']:
        if item['image'] is None:
            continue
        print(src + item['image'].split('/')[-1])
        os.remove(src + item['image'].split('/')[-1])
    os.remove(log['file_path'])
    return 1 if ViewDao.delete_pre_log_by_id(_id) else 0


def getList(_id):
    layer_logs = get_all_layer_log(_id)
    train_logs = get_all_train_log(_id)
    pre_logs = get_all_pre_log(_id)
    ans = []
    for item in layer_logs:
        ans.append({
            'id': item['layer_id'],
            'type': 1,
            'title': item['model_name'] + str(item['layer_id']),
            'date': item['create_time']
        })
    for item in train_logs:
        ans.append({
            'id': item['train_id'],
            'type': 0,
            'title': 'train' + str(item['train_id']),
            'date': item['create_time']
        })
    for item in pre_logs:
        ans.append({
            'id': item['pre_id'],
            'type': 2,
            'title': 'pre' + str(item['pre_id']),
            'date': item['create_time']
        })
    return ans
