import argparse
from typing import Dict, Any

import ray
import json

import yaml

from modules import ASRPredictor
from utils.logger import get_logger
from utils.tools import notify, parse_s3url, read_metadata
# from storage import Boto3Storage
from ray.data import DataContext

DataContext.get_current().enable_progress_bars = False
logger = get_logger()

parser = argparse.ArgumentParser(
    prog='asr',
    description='audio asr')

parser.add_argument("--inputs",
                    "-i",
                    type=str,
                    help="json argument")
parser.add_argument("--config_path",
                    "-c",
                    type=str,
                    help="json argument")
args = parser.parse_args()

with open(args.config_path) as fp:
    config = yaml.safe_load(fp)

request_data = json.loads(args.inputs)
callback_url = request_data.get("callback_url")
task_id = request_data.get("task_id")

# storage_cfg = config["storage"]
# storage = Boto3Storage(storage_cfg['addr'],
#                        storage_cfg['accessKey'],
#                        storage_cfg['secretKey'])

meta_paths = request_data.get("meta_paths")

items = []
for meta_path in meta_paths:
    # bucket_name, object_name = parse_s3url(meta_path)
    # print(bucket_name, object_name)
    # metadata = storage.get_json(meta_path)
    with open(meta_path) as fp:
        metadata = json.load(fp)
    items.append(metadata)

use_notify = True


def set_progress_status(row: Dict[str, Any]) -> Dict[str, Any]:
    row["task_id"] = task_id
    row["callback_url"] = callback_url
    row['transcribe_progress'] = "pending"
    return row


if use_notify:
    notify(items, callback_url, task_id, "pending")

ray_cfg = config['model']['ray_options']
results = []
for row in (
        ray.data.from_items(items)
                .map(ASRPredictor,
                     concurrency=ray_cfg['concurrency'],
                     num_gpus=ray_cfg['num_gpus'],
                     fn_constructor_args=[config],
                     # runtime_env={
                     #     "env_vars": {"LD_LIBRARY_PATH": "/root/miniconda3/envs/search/lib:$LD_LIBRARY_PATH",
                     #                  "MODELSCOPE_CACHE": "/home/projects/ai_models"},
                     #              }
                     )
                .iter_rows()
):
    logger.info(row)
    # bucket_name, obj_name = parse_s3url(row['meta_path'])
    # try:
    #     storage.put_json(row, row['meta_path'])
    # except:
    #     row['transcribe_progress'] = "failed"
    #     logger.error(f"{row['meta_path']} upload failed")
    if use_notify:
        # del row["silent_points"]
        # logger.info(row)
        notify([row], callback_url, task_id, "part_finished")
    results.append(row)

if use_notify:
    notify(results, callback_url, task_id, "all_finished")
