0-Parth-D commited on
Commit
a1934d7
·
1 Parent(s): 143bd7b

Fixed API key error

Browse files
Files changed (1) hide show
  1. src/rag_code_assistant/agent.py +7 -11
src/rag_code_assistant/agent.py CHANGED
@@ -34,14 +34,10 @@ def load_vectorstore():
34
  )
35
 
36
  def load_llm():
37
- """
38
- Loads the LLM with fallback logic:
39
- - Tries Ollama (local development with your laptop)
40
- - Falls back to Groq Cloud (production deployment on Hugging Face)
41
- """
42
- ollama_url = os.environ["OLLAMA_BASE_URL"]
43
 
44
- # If OLLAMA_BASE_URL is set, use local Ollama (for demo purposes)
45
  if ollama_url:
46
  print("🔧 Using local Ollama LLM (Development Mode)")
47
  return ChatOllama(
@@ -50,18 +46,18 @@ def load_llm():
50
  base_url=ollama_url,
51
  )
52
 
53
- # Otherwise, use Groq Cloud (for production on Hugging Face)
54
- groq_api_key = os.environ["GROQ_API_KEY"]
55
  if not groq_api_key:
56
  raise ValueError(
57
  "Neither OLLAMA_BASE_URL nor GROQ_API_KEY found! "
58
  "Please set one in your environment variables."
59
  )
60
 
61
- print("☁️ Using Groq Cloud LLM (Production Mode)")
62
  return ChatGroq(
63
  api_key=groq_api_key,
64
- model_name="llama-3.3-70b-versatile", # Fast, smart, and free!
65
  temperature=0.1
66
  )
67
 
 
34
  )
35
 
36
  def load_llm():
37
+ # 1. Use .get() so it doesn't crash if the variable is missing
38
+ ollama_url = os.environ.get("OLLAMA_BASE_URL")
 
 
 
 
39
 
40
+ # 2. If the URL exists (like on your laptop), use Ollama
41
  if ollama_url:
42
  print("🔧 Using local Ollama LLM (Development Mode)")
43
  return ChatOllama(
 
46
  base_url=ollama_url,
47
  )
48
 
49
+ # 3. If it doesn't exist (like on Hugging Face), fall back to Groq
50
+ groq_api_key = os.environ.get("GROQ_API_KEY")
51
  if not groq_api_key:
52
  raise ValueError(
53
  "Neither OLLAMA_BASE_URL nor GROQ_API_KEY found! "
54
  "Please set one in your environment variables."
55
  )
56
 
57
+ print("☁️ Using Groq Cloud LLM (Production Mode)")
58
  return ChatGroq(
59
  api_key=groq_api_key,
60
+ model_name="llama-3.3-70b-versatile",
61
  temperature=0.1
62
  )
63