# import sys
# sys.path.append('..')
from .utils.logger import get_logger
from typing import Optional, List

from fastapi import APIRouter
from pydantic import Extra, BaseModel
from ray.job_submission import JobSubmissionClient

emb_router = APIRouter()
logger = get_logger()

class TaskItem(BaseModel):
    noid: str
    path: str

    class Config:
        extra = Extra.allow

class RequestModel(BaseModel):
    callback_url: str
    task_id: str
    tasks: List[TaskItem]

@emb_router.get("/")
def root():
    return "Hello World!"
@emb_router.post('/submit/job')
def submit_task(request: RequestModel):
    try:
        for task in request.tasks:
            if task.embedding_progress is None:
                task.embedding_progress = "pending"
        # data = request.model_dump_json().encode('utf-8')
        data = request.model_dump_json()
        print(f"{type(data)}: {data}")
        client = JobSubmissionClient("http://localhost:8265")
        job_id = client.submit_job(
            # Entrypoint shell command to execute
            entrypoint=f"cd audio_embedding && python embedding.py -i \'{data}\' -c configs/config.yml",
            # entrypoint=f"echo hello",
            # submission_id= task_id,
            # Path to the local directory that contains the script.py file
            runtime_env={"working_dir": "/work/job",
                         # "conda": "embedding",
                         # "env_vars": {"MODELSCOPE_CACHE": "/home/projects/ai_models"},
                         },

        )
        return {"job_id": job_id, "dashboard": "http://localhost:8265"}
    except Exception as e:
        logger.error(f"offline task predictor occurs: {e}")
        return {"msg": str(e)}
