from langchain import hub, LLMChain
import os
from langchain_ollama import ChatOllama

prompt_maker_template = hub.pull("hardkothari/prompt-maker")


llm = ChatOllama(base_url="http://192.168.99.142:11434", model="qwen2.5-coder:latest")
llm_chain = LLMChain(llm=llm, prompt=prompt_maker_template)

while True:
    task = input("What is your task ? Type quit to leave the chat.\n\n")

    if task == 'quit':
        break

    lazy_prompt = input("What is your current prompt? Type quit to leave the chat.\n\n")

    if lazy_prompt == 'quit':
        break

    print("\n Response:")
    response = llm_chain({'lazy_prompt': lazy_prompt, 'task': task})

    print(response)