# -*- coding: utf-8 -*-
# @Time     : 2024/3/12 14:08
# @Author   : Long-Long Qiu
# @FileName : flask_server.py
# @Product  : PyCharm
# import packages
import json
import pickle
from flask import Flask, request, send_file
from flask_cors import CORS
import argparse
import time
import os
import hashlib
import random
import pandas as pd
import concurrent.futures
import subprocess



app = Flask(__name__)
CORS(app, supports_credentials=True, origins='*')  # 允许跨域请求
parser = argparse.ArgumentParser(description='启动Flask服务')

car_size_map = {
    '4.2': [4200, 1900, 2500],
    '6.8': [6800, 2400, 3000],
    '7.6': [7600, 2400, 3000],
    '9.6': [9600, 2400, 3000],
    '13.5': [13500, 2400, 3000],
    '17.5': [17500, 3000, 3000]
}

car_loadbear_map = {
    '4.2': 3 * 1000,
    '6.8': 10 * 1000,
    '7.6': 13 * 1000,
    '9.6': 18 * 1000,
    '13.5': 30 * 1000,
    '17.5': 35 * 1000
}

# 不同线路对应的车型
route_car_map = {
    '乌鲁木齐市-上海市': [{'size': '4.2', 'price': 16834}, {'size': '6.8', 'price': 30301},
                          {'size': '7.6', 'price': 30862}, {'size': '9.6', 'price': 34229},
                          {'size': '13.5', 'price': 44891}, {'size': '17.5', 'price': 50502}],
    '宁波市-武汉市': [{'size': '4.2', 'price': 3709}, {'size': '6.8', 'price': 3826}, {'size': '7.6', 'price': 5166},
                      {'size': '9.6', 'price': 7174}, {'size': '13.5', 'price': 8160}, {'size': '17.5', 'price': 9087}],
    '宁波市-襄阳市': [{'size': '4.2', 'price': 4689}, {'size': '6.8', 'price': 4851}, {'size': '7.6', 'price': 6510},
                      {'size': '9.6', 'price': 9063}, {'size': '13.5', 'price': 10270},
                      {'size': '17.5', 'price': 11360}],
    '武汉市-葛店开发区': [{'size': '4.2', 'price': 200}, {'size': '6.8', 'price': 350}, {'size': '7.6', 'price': 380},
                          {'size': '9.6', 'price': 480}, {'size': '13.5', 'price': 1750},
                          {'size': '17.5', 'price': 2000}],
    '焦作市-上海市': [{'size': '4.2', 'price': 3520}, {'size': '6.8', 'price': 4950}, {'size': '7.6', 'price': 4950},
                      {'size': '9.6', 'price': 6325}, {'size': '13.5', 'price': 9200},
                      {'size': '17.5', 'price': 10350}],
}


def md5_encode(text):
    """
    对文本进行md5加密
    """
    md5_hash = hashlib.md5()
    md5_hash.update(text.encode('utf-8'))
    encoded_text = md5_hash.hexdigest()
    return encoded_text


def run_command(command):
    # 输出当前时间戳和执行的指令
    print(f"{time.strftime('%H:%M:%S')} - Running command: {command}")
    subprocess.run(command, shell=True)

# 获取所有路线
@app.route('/all_route', methods=['POST'])
def get_route():
    start = time.time()
    results = {}
    for name, cars in route_car_map.items():
        real_cars = []
        for car in cars:
            size = car_size_map[car['size']]
            price = car['price']
            loadbear = car_loadbear_map[car['size']]
            real_cars.append({'size': size, 'price': price, 'loadbear': loadbear})
        results[name] = real_cars

    log_content = {
        'url': '/all_route',
        'user_ip': request.remote_addr,
        'start_time': int(start),
        'result': results
    }
    with open(parser.parse_args().logPath, 'a') as fp:
        fp.write(json.dumps(log_content, ensure_ascii=False) + '\n')

    return results


# 解析excel格式的货物信息文件
@app.route('/parse_goods_info', methods=['POST'])
def parse_goods_info():
    start = time.time()
    # 1. 生成文件名，用于保存前端传过来的文件内容
    timeArray = time.localtime(int(time.time()))
    root = f'{parser.parse_args().dataPath}/goods/{time.strftime("%Y-%m-%d", timeArray)}'
    if not os.path.exists(root):  # 若不存在当前目录，则创建该目录
        os.makedirs(root)
    fileName = md5_encode(f"goods_{request.remote_addr}_{int(time.time() * 1000)}_{random.randint(0, 1000)}")
    # 文件名后缀
    nameEnd = request.files['file'].filename.split('.')[-1]
    filePath = f'{root}/{fileName}.{nameEnd}'

    # 2. 获取文件内容并将其保存到本地
    request.files['file'].save(filePath)

    # 3. 读取并解析货物信息
    df = pd.read_excel(filePath)
    num_rows, num_cols = df.shape
    goods = []
    # 3.1 通过索引号依次读取数据
    for i in range(num_rows):
        kind = df.iloc[i, 0]
        length = float(df.iloc[i, 1])
        width = float(df.iloc[i, 2])
        height = float(df.iloc[i, 3])
        weight = float(df.iloc[i, 4])
        num = int(df.iloc[i, 5])
        goods.append({
            'kind': kind,
            'width': length,
            'height': width,
            'depth': height,
            'weight': weight,
            'num': num
        })

    # 4. 删除文件
    # os.remove(filePath)

    log_content = {
        'url': '/parse_goods_info',
        'user_ip': request.remote_addr,
        'start_time': int(start),
        'result': goods
    }
    with open(parser.parse_args().logPath, 'a') as fp:
        fp.write(json.dumps(log_content, ensure_ascii=False) + '\n')

    # 5. 返回结果
    return json.dumps(goods, ensure_ascii=False)


# 解析excel格式的线路货车信息文件
@app.route('/parse_cars_info', methods=['POST'])
def parse_cars_info():
    start = time.time()
    # 1. 生成文件名，用于保存前端传过来的文件内容
    timeArray = time.localtime(int(time.time()))
    root = f'{parser.parse_args().dataPath}/cars/{time.strftime("%Y-%m-%d", timeArray)}'
    if not os.path.exists(root):  # 若不存在当前目录，则创建该目录
        os.makedirs(root)
    fileName = md5_encode(f"cars_{request.remote_addr}_{int(time.time() * 1000)}_{random.randint(0, 1000)}")
    # 文件名后缀
    nameEnd = request.files['file'].filename.split('.')[-1]
    filePath = f'{root}/{fileName}.{nameEnd}'

    # 2. 获取文件内容并将其保存到本地
    request.files['file'].save(filePath)

    # 3. 读取并解析线路车型信息
    df = pd.read_excel(filePath)
    num_rows, num_cols = df.shape
    cars = {}
    # 3.1 通过索引号依次读取数据
    for i in range(num_rows):  # 舍弃第一行表头
        route = df.iloc[i, 0]
        price = float(df.iloc[i, 1])
        loadbear = float(df.iloc[i, 2])
        length = float(df.iloc[i, 3])
        width = float(df.iloc[i, 4])
        height = float(df.iloc[i, 5])

        if route in cars:
            cars[route].append({
                'price': price,
                'loadbear': loadbear,
                'width': length,
                'height': width,
                'depth': height
            })
        else:
            cars[route] = [{
                'price': price,
                'loadbear': loadbear,
                'width': length,
                'height': width,
                'depth': height
            }]

    # 4. 删除文件
    # os.remove(filePath)

    log_content = {
        'url': '/parse_cars_info',
        'user_ip': request.remote_addr,
        'start_time': int(start),
        'result': cars
    }
    with open(parser.parse_args().logPath, 'a') as fp:
        fp.write(json.dumps(log_content, ensure_ascii=False) + '\n')

    # 5. 返回结果
    return cars



# 装箱计算接口
@app.route('/pack', methods=['POST'])
def pack():
    # 获取pack结果路径
    path_to_result = parser.parse_args().sessionPath
    # 判断是否已经产生结果
    def require_cal(tk):
        if not os.path.exists(f"{path_to_result}/{tk}"):
            return True
        return False

    # 获取request参数
    # 请求者的ip
    user_ip = request.remote_addr
    # 获取货物和车辆信息
    params = json.loads(request.get_data())
    items = params['items']
    all_cars = params['cars']
    # 针对请求者ip以及货物和车辆信息生成md5码
    token = md5_encode(f"{user_ip}-{json.dumps(all_cars, ensure_ascii=False)}-{json.dumps(items, ensure_ascii=False)}")
    # 若是新的pack计算请求，则开启一个新的进程进行计算
    if require_cal(token):
        # 将请求的货物和车辆信息保存为json文件方便后续读取
        item_path = f"./goods/{token}_goods.json"
        json.dump(items, open(item_path, 'w'), ensure_ascii=False)
        cars_path = f"./cars/{token}_cars.json"
        json.dump(all_cars, open(cars_path, 'w'), ensure_ascii=False)
        # 判断./results_pack路径下是否有对应token的文件夹，如果没有则创建
        if not os.path.exists(f"{path_to_result}/{token}"):
            os.makedirs(f"{path_to_result}/{token}")
        # 开始执行多进程的装箱py文件，输入参数如下：1、货物信息 2、车辆信息 3、 结果保存路径 4、 日志保存路径
        pack_command = f"""{parser.parse_args().pythonPath} ./pack_workers.py --items {item_path} --cars {cars_path} --resultPath {path_to_result}/{token} --logPath {parser.parse_args().logPath}"""
        os.system(pack_command)
        return {}
    else:
        # 直接读取结果并返回
        # 打开JSON文件
        with open(f'{path_to_result}/{token}/result.json', 'r') as file:
            # 解析JSON数据
            data = json.load(file)
            # 获取results
            results = data['result']
            # 按照字典中的price进行排序重新生成results
            results.sort(key=lambda x: x['price'])
            # 只取前10个结果
            results = results[:5]
            # 更新data中的results
            data['result'] = results
        return data


# 装箱计算接口
@app.route('/plot', methods=['POST'])
def plot():
    # 获取画图结果路径
    path_to_result = parser.parse_args().result_plot
    # 判断是否已经产生结果
    def require_cal(tk):
        if not os.path.exists(f"{path_to_result}/{tk}"):
            return True
        return False

    # 获取参数
    # 请求者的ip
    user_ip = request.remote_addr
    params = json.loads(request.get_data())
    # 获取打包和装箱效果
    heaps = pickle.loads(bytes(params['heaps'], encoding='latin1'))
    packer = pickle.loads(bytes(params['packer'], encoding='latin1'))
    use_plotly = params.get('use_plotly', False)
    plot_type = params.get('figure_type', 1)
    # 针对请求者ip和打包和装箱信息生成md5码
    token = md5_encode(f"{user_ip}-{params['heaps']}-{params['packer']}")
    # 若是新的pack计算请求，则开启一个新的进程进行计算
    if require_cal(token):
        # 生成画图结果保存路径
        if not os.path.exists(f"{path_to_result}/{token}"):
            os.makedirs(f"{path_to_result}/{token}")
        # 生成bin的保存路径
        if not os.path.exists(f"./bin/{token}"):
            os.makedirs(f"./bin/{token}")
        # 判断./results_plot路径下是否有对应token的文件夹，如果没有则创建
        if not os.path.exists(f"{path_to_result}/{token}"):
            os.makedirs(f"{path_to_result}/{token}")
        # 生成heaps保存路径
        heaps_path = f"./heaps/{token}_heaps.json"
        json.dump(heaps, open(heaps_path, 'w'), ensure_ascii=False)

        # 获取画图结果、是否删除图片、是否绘制heap的外边框、日志路径
        imgPath = f"{parser.parse_args().imgPath}"
        delImg = f"{parser.parse_args().delImg}"
        plotHeapCube = f"{parser.parse_args().plotHeapCube}"
        logPath = f"{parser.parse_args().logPath}"

        # 构建所有的plot命令列表
        plot_commands = []
        for i, b in enumerate(packer.bins):
            bin_path = f"./bin/{token}/bin_{i}.txt"
            with open(bin_path, 'wb') as file:
                pickle.dump(b, file)
            result_plot = f"{parser.parse_args().result_plot}/{token}/result_{i}.json"
            # 画图指令参数包括1、heaps路径 2、index 3、bin 4、use_plotly 5、plot_type 6、 result_plot 7、imgPath 8、delImg 9、plotHeapCube 10、logPath 11、user_ip
            plot_commands.append(f"""{parser.parse_args().pythonPath} ./render_worker_single.py --heaps {heaps_path} --index {i} --bin {bin_path} --use_plotly {use_plotly} --plot_type {plot_type} --result_plot {result_plot} --imgPath {imgPath} --delImg {delImg} --plotHeapCube {plotHeapCube} --logPath {logPath} --user_ip {user_ip}""")
        # 并行化执行所有画图command
        with concurrent.futures.ProcessPoolExecutor() as executor:
            for command in plot_commands:
                executor.submit(run_command, command)

        return {}
    else:
        # 直接读取结果并返回
        # 打开JSON文件
        if not os.path.exists(f'{path_to_result}/{token}/result.json'):
            # 获取当前目录下所有JSON文件
            json_files = [file for file in os.listdir(f'{path_to_result}/{token}') if file.endswith('.json')]
            # 存储所有读取的数据
            all_data = []
            # 读取所有JSON文件的内容
            for file in json_files:
                file_name = f'{path_to_result}/{token}/' + file
                with open(file_name, 'r') as f:
                    data = json.load(f)
                    all_data.append(data)
            # 合并数据
            merged_data = {
                "base64": all_data[0]["base64"],  # 假设所有JSON文件的"base64"字段值相同
                "care_image": [d["car_image"] for d in all_data]
            }
            # 写入 result.json
            with open(f'{path_to_result}/{token}/result.json', 'w') as f:
                json.dump(merged_data, f)

            with open(f'{path_to_result}/{token}/result.json', 'r') as file:
                # 解析JSON数据
                data = json.load(file)
        else:
            with open(f'{path_to_result}/{token}/result.json', 'r') as file:
                # 解析JSON数据
                data = json.load(file)
        return data

# 由于argparse不能识别bool类型(会将bool类型当做str类型)，故需要将其转换成bool值
def str2bool(v):
    if isinstance(v, bool):
        return v
    if v.lower() in ('yes', 'true', 'True', 't', 'y', '1'):
        return True
    elif v.lower() in ('no', 'false', 'False', 'f', 'n', '0'):
        return False
    else:
        raise argparse.ArgumentTypeError('Boolean value expected.')


if __name__ == '__main__':
    parser.add_argument('--port', type=int, default=13110, help='端口号')
    parser.add_argument('--imgPath', type=str, default='./front/images', help='图片保存路径')
    parser.add_argument('--delImg', type=str2bool, default=False, help='是否在磁盘上删除图片')
    parser.add_argument('--topNum', type=int, default=5, help='返回最优的结果数量')
    parser.add_argument('--debug', type=str2bool, default=True, help='是否启用Flask的debug模式')
    parser.add_argument('--dataPath', type=str, default='./data', help='用户上传的货物、路线、车辆等信息的存储路径')
    parser.add_argument('--plotHeapCube', type=str2bool, default=False, help='是否绘制heap的外边框')
    parser.add_argument('--result_plot', type=str, default='./results_plot', help='result_plot')
    parser.add_argument('--logPath', type=str, default='./pack.log', help='日志文件路径')
    parser.add_argument('--excelPath', type=str, default='./excel_results', help='结果明细excel存储路径')
    parser.add_argument('--sessionPath', type=str, default='./results_pack', help='结果session存储路径')
    parser.add_argument('--pythonPath', type=str, default='/usr/bin/python3', help='python执行路径')

    args = parser.parse_args()

    if not os.path.exists(args.imgPath):
        os.makedirs(args.imgPath)
    if not os.path.exists(args.excelPath):
        os.makedirs(args.excelPath)
    if not os.path.exists(args.sessionPath):
        os.makedirs(args.sessionPath)
    if not os.path.exists(args.result_plot):
        os.makedirs(args.result_plot)

    app.run(host='0.0.0.0', port=args.port, debug=parser.parse_args().debug)
