File size: 320 Bytes
5c694b4
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
from langchain_ollama import ChatOllama

# Define local LLM
local_llm = "llama3.2:3b-instruct-fp16"

# Instantiate the models
llm = ChatOllama(model=local_llm, temperature=0)
llm_json_mode = ChatOllama(model=local_llm, temperature=0, format="json")

# Test the model
response = llm.invoke("What is AI?")
print(response)