"""
run on client-backend.
read grib file received by client-backend and insert into influxdb on respective datanode.

usage: python3 insert.py path/to/grib/file.grib
"""
import datetime
import logging
import os
import sys
import time

import pygrib
import requests
import zstd

import logger as my_log
import series_key

my_log.init()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)


def main() -> None:
    if len(sys.argv) != 2:
        logger.error("invalid args")
        return

    grib_file_path = sys.argv[1]
    if not os.path.isfile(grib_file_path):
        logger.error(f"file not exists {grib_file_path}")
        return

    file_start_time = time.time()

    # get series_key of all messages
    series_key_list = []
    meta_info_list = []
    with pygrib.open(grib_file_path) as grib_file:
        msg_len = grib_file.messages
        i = -1
        for msg in grib_file:
            i += 1
            msg_start_time = time.time()

            values_count = msg["getNumberOfValues"]
            if values_count != 1038240:
                logger.error(f"incomplete data")
            measurement = msg["parameterName"]
            pressure_level = str(msg["level"])
            timestamp = datetime.datetime(msg["year"], msg["month"], msg["day"],
                                          msg["hour"], msg["minute"], msg["second"],
                                          tzinfo=datetime.timezone.utc).timestamp()
            timestamp = int(timestamp)
            meta_info_list.append({
                'variable': measurement,
                'pressure_level': pressure_level,
                'timestamp': timestamp,
            })
            series_key_list.append(series_key.get_series_key(measurement, pressure_level, timestamp))
    generate_key_time = time.time()
    logger.info(f"generate key used {generate_key_time - file_start_time}")

    # send series_key to name_node, query data_node IP
    unique_key_list = list(set(series_key_list))
    upload_keys = bytes()
    for key in unique_key_list:
        upload_keys += key
    # url = "http://10.249.177.55:8081/locate"
    url = "http://localhost:8081/locate"
    response = requests.post(url, files={'data': upload_keys})
    if response.status_code != 200:
        logger.error(f"failed to get datanode from namenode: [{response.status_code}] {response.text}")
        return
    data_nodes_list = response.text.strip().split(",")
    key_node_map = {}
    for key, node_ip in zip(unique_key_list, data_nodes_list):
        key_node_map[key] = node_ip
    got_data_node_time = time.time()

    # upload messages to different data node
    with pygrib.open(grib_file_path) as grib_file:
        for i, msg in enumerate(grib_file):
            data_node_ip = key_node_map[series_key_list[i]]
            block_info = meta_info_list[i]
            logger.info(f"insert {block_info} into {data_node_ip}")
            upload_start_time = time.time()

            compressed_data = zstd.compress(msg.values, 1)
            upload_params = block_info.copy()
            upload_params["variable"] = series_key.variable_name2id[upload_params["variable"]]
            upload_params["pressure_level"] = series_key.variable_name2id[upload_params["pressure_level"]]
            resp = requests.post(f"http://{data_node_ip}:8082/insert", params=upload_params,
                                 files={"data": compressed_data})
            if resp.status_code != 200:
                logger.error(f"failed to insert {block_info}")
            else:
                logger.info(f"insert {block_info} success")
            logger.info(f"insert used {time.time() - upload_start_time}")


if __name__ == '__main__':
    main()
