Navya-Sree commited on
Commit
13a9561
·
verified ·
1 Parent(s): e25ee33

Create src/macg/llm.py

Browse files
Files changed (1) hide show
  1. src/macg/llm.py +30 -0
src/macg/llm.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ class LLMClient:
4
+ """
5
+ Start small: a stub interface. Replace with OpenAI / Ollama / etc later.
6
+ For now, you can manually paste outputs OR implement one adapter.
7
+ """
8
+ def complete(self, system: str, prompt: str) -> str:
9
+ raise NotImplementedError("Plug in an LLM adapter (OpenAI/Ollama) or start with manual mode.")
10
+
11
+
12
+ class ManualLLM(LLMClient):
13
+ """
14
+ Manual mode: prints prompt and asks you to paste the response.
15
+ Great for learning + debugging the agent loop.
16
+ """
17
+ def complete(self, system: str, prompt: str) -> str:
18
+ print("\n" + "="*80)
19
+ print("SYSTEM:\n", system)
20
+ print("-"*80)
21
+ print("PROMPT:\n", prompt)
22
+ print("="*80)
23
+ print("Paste model output below. End with a line containing only: END\n")
24
+ lines = []
25
+ while True:
26
+ line = input()
27
+ if line.strip() == "END":
28
+ break
29
+ lines.append(line)
30
+ return "\n".join(lines)