import argparse
from typing import Dict, Any

import ray
import json

import yaml

from modules.autocut_predictor import AutoCutPredictor
# from storage import Boto3Storage
from utils.logger import get_logger
from utils.tools import notify, parse_s3url, read_metadata
from ray.data import DataContext

DataContext.get_current().enable_progress_bars = False
logger = get_logger()

parser = argparse.ArgumentParser(
    prog='autocut',
    description='auto cut')

parser.add_argument("--inputs",
                    "-i",
                    type=str,
                    help="json argument")
parser.add_argument("--config_path",
                    "-c",
                    type=str,
                    help="json argument")
args = parser.parse_args()

with open(args.config_path) as fp:
    config = yaml.safe_load(fp)

request_data = json.loads(args.inputs)
callback_url = request_data.get("callback_url")
task_id = request_data.get("task_id")
# storage_cfg = config["storage"]
# storage = Boto3Storage(storage_cfg['addr'],
#                        storage_cfg['accessKey'],
#                        storage_cfg['secretKey'])

tasks = request_data.get("tasks")
# meta_paths = request_data.get("meta_paths")

# items = []
# for task in tasks:
#     with open(task.get("meta_path"), 'r') as f:
#         meta = json.load(f)
#     meta["backtracking_points"] = task.get("backtracking_points")
#     # logger.info(meta)
#     items.append(meta)
#     metadata = storage.get_json(task.get("meta_path"))
# for meta_path in meta_paths:
#     # bucket_name, object_name = parse_s3url(meta_path)
#     # print(bucket_name, object_name)
#     metadata = storage.get_json(meta_path)
#     items.append(metadata)

use_notify = True
# logger.info(items)


def set_progress_status(row: Dict[str, Any]) -> Dict[str, Any]:
    row["task_id"] = task_id
    row["callback_url"] = callback_url
    # row.get("autocut_progress", 'pending')
    row['autocut_progress'] = "pending"
    return row


# if use_notify:
#     notify(tasks, callback_url, task_id, "pending")

ray_cfg = config['model']['ray_options']
results = []
for row in (
        ray.data.from_items(tasks, override_num_blocks=ray_cfg['override_num_blocks'])
                .map(set_progress_status)
                .map(AutoCutPredictor,
                     concurrency=ray_cfg['concurrency'],
                     num_gpus=ray_cfg['num_gpus'],
                     fn_constructor_args=[config],
                     )
                .iter_rows()
):
    logger.info(f"meta:{row['meta_path']} autocut progress: {row['autocut_progress']}")
    # bucket_name, obj_name = parse_s3url(row['meta_path'])
    # del row['backtracking_points']
    # try:
    #     storage.put_json(row, row['meta_path'])
    # except:
    #     row['autocut_progress'] = "failed"
    #     logger.error(f"meta:{row['meta_path']} autocut progress: {row['autocut_progress']}")
    # if use_notify:
    #     # del row['silent_points']
    #     # logger.info(row)
    #     notify([row], callback_url, task_id, "part_finished")
    # results.append(row)

# if use_notify:
#     notify(results, callback_url, task_id, "all_finished")
