import ollama
from ollama import Client
import os
import sys

model = "7shi/llama-translate:8b-q4_K_M"

def translation(instruction, input_text):
    prompt = f"""### Instruction:
{instruction}

### Input:
{input_text}

### Response:
"""
    messages = [{ "role": "user", "content": prompt }]
    client = Client(host=os.environ["OLLAMA_HOST"])

    try:
        # Send the POST request and capture the response
        response = client.chat(model=model, messages=messages)
        # print(response)
    except ollama.ResponseError as e:
        # if the request was failed
        print("Error:", e.error)
        return None

    # Extract the 'content' field from the response
    response_content = response["message"]["content"].strip()

    return response_content


if __name__ == "__main__":
    text_to_trans = sys.argv[1]
    translated_line = translation(f"Translate English to Chinese.", f"{text_to_trans}")
    print(translated_line)
