import requests
from openai import OpenAI
from rich import print
from .config import read_config

env = read_config()
model_help_message = '''
  [blue]/?, /h, /help[/blue]      show this help message
  [blue]/ls, /list API[/blue]     list models for selected API
  [blue]/load API MODEL[/blue]    load a different model
  [blue]/c, /clear[/blue]         clear screen
  [blue]/q, /bye[/blue]           exit

Use [blue]`"""`[/blue] to begin a multi-line message
Use [blue]`[]`[/blue] to upload a readable file, path seperator [blue]`/`[/blue]
'''

def get_models() -> list:
    """### Get models"""
    ollama_base_url = env.get('ollama_base_url')
    models_url = f'{ollama_base_url}/api/tags'
    response = requests.get(models_url)
    if response.status_code == 200:
        data = response.json()
        models = [model['name'] for model in data.get('models', [])]
        return models

def list_models(api: str) -> list:
    """### List models"""
    if api == 'ollama':
        return get_models()
    elif api in ['openai', 'deepseek', 'tongyi']:
        models = []
        api_key = env.get(f'{api}_api_key')
        base_url = env.get(f'{api}_base_url')
        client = OpenAI(api_key=api_key, base_url=base_url)
        for model in client.models.list().data:
            models.append(model.id)
        return models

def print_models(api: str) -> None:
    """### Print models"""
    models = list_models(api)
    for model in models:
        print(f'[bold blue]{model}[/bold blue]')

def model_help() -> None:
    """### Print model help message"""
    print('Aviliable commands:', end='')
    print(model_help_message, end='')
