Spaces:
Sleeping
Sleeping
Ghost Matrix: Node online.
Browse files- Dockerfile +14 -0
- main.py +220 -0
- requirements.txt +6 -0
Dockerfile
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM python:3.10-slim
|
| 2 |
+
|
| 3 |
+
WORKDIR /app
|
| 4 |
+
|
| 5 |
+
COPY requirements.txt .
|
| 6 |
+
RUN pip install -r requirements.txt
|
| 7 |
+
|
| 8 |
+
COPY . /app
|
| 9 |
+
|
| 10 |
+
EXPOSE 7860
|
| 11 |
+
|
| 12 |
+
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
|
| 13 |
+
|
| 14 |
+
ENV GHOST_HASH_DIFFERENTIAL=8b9cda59e4db4a3489fd7d627fb3f735
|
main.py
ADDED
|
@@ -0,0 +1,220 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import json
|
| 3 |
+
import queue
|
| 4 |
+
import requests
|
| 5 |
+
from threading import Thread
|
| 6 |
+
from queue import Queue
|
| 7 |
+
from fastapi import FastAPI, HTTPException
|
| 8 |
+
from fastapi.responses import StreamingResponse
|
| 9 |
+
from pydantic import BaseModel
|
| 10 |
+
from bs4 import BeautifulSoup
|
| 11 |
+
from langchain_groq import ChatGroq
|
| 12 |
+
from crewai import Agent, Task, Crew, Process
|
| 13 |
+
|
| 14 |
+
app = FastAPI()
|
| 15 |
+
|
| 16 |
+
class SwarmRequest(BaseModel):
|
| 17 |
+
url: str
|
| 18 |
+
groq_key: str
|
| 19 |
+
|
| 20 |
+
def scrape_website(url: str) -> str:
|
| 21 |
+
try:
|
| 22 |
+
headers = {'User-Agent': 'Mozilla/5.0'}
|
| 23 |
+
response = requests.get(url, headers=headers, timeout=10)
|
| 24 |
+
soup = BeautifulSoup(response.text, 'html.parser')
|
| 25 |
+
for script in soup(["script", "style"]):
|
| 26 |
+
script.extract()
|
| 27 |
+
text = soup.get_text(separator=' ', strip=True)
|
| 28 |
+
return text[:4000]
|
| 29 |
+
except Exception as e:
|
| 30 |
+
return f"Failed to scrape: {e}"
|
| 31 |
+
|
| 32 |
+
def execute_swarm(target_url: str, groq_key: str, event_queue: Queue):
|
| 33 |
+
try:
|
| 34 |
+
event_queue.put({"agent": "System", "message": f"Initializing Swarm for {target_url}..."})
|
| 35 |
+
raw_data = scrape_website(target_url)
|
| 36 |
+
event_queue.put({"agent": "Scout", "message": "Website data extracted and sanitized. Handing to analysis."})
|
| 37 |
+
|
| 38 |
+
# Callback handler to stream internal Agent steps
|
| 39 |
+
def step_tracker(step_output):
|
| 40 |
+
try:
|
| 41 |
+
# Extract the actual log string
|
| 42 |
+
log_text = getattr(step_output, 'log', str(step_output))
|
| 43 |
+
|
| 44 |
+
# Logic to strip out the verbose CrewAI tool descriptions and repetitive "Action: None"
|
| 45 |
+
if "Thought:" in log_text:
|
| 46 |
+
# Capture everything between Thought: and Action:
|
| 47 |
+
clean_thought = log_text.split("Thought:")[1].split("Action:")[0].strip()
|
| 48 |
+
if clean_thought:
|
| 49 |
+
event_queue.put({"agent": "Internal Brain", "message": clean_thought})
|
| 50 |
+
elif "Action:" in log_text and "Action Input:" in log_text:
|
| 51 |
+
action = log_text.split("Action:")[1].split("Action Input:")[0].strip()
|
| 52 |
+
if action != "None":
|
| 53 |
+
event_queue.put({"agent": "Action", "message": f"Delegating to tool: {action}"})
|
| 54 |
+
except Exception:
|
| 55 |
+
event_queue.put({"agent": "Internal CPU", "message": "Synchronizing agent pathways..."})
|
| 56 |
+
|
| 57 |
+
llm = ChatGroq(
|
| 58 |
+
temperature=0.3,
|
| 59 |
+
groq_api_key=groq_key,
|
| 60 |
+
model_name="llama-3.3-70b-versatile"
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
scout = Agent(
|
| 64 |
+
role='Intel Recon',
|
| 65 |
+
goal='Identify exactly what this company sells.',
|
| 66 |
+
backstory='You are a corporate scout extracting facts from messy web data.',
|
| 67 |
+
verbose=False, llm=llm, step_callback=step_tracker
|
| 68 |
+
)
|
| 69 |
+
|
| 70 |
+
strategist = Agent(
|
| 71 |
+
role='M&A Risk Strategist',
|
| 72 |
+
goal='Identify the 3 biggest competitive threats based on the Intel report.',
|
| 73 |
+
backstory='You are a cynical M&A director looking for product weaknesses.',
|
| 74 |
+
verbose=False, llm=llm, step_callback=step_tracker
|
| 75 |
+
)
|
| 76 |
+
|
| 77 |
+
financial = Agent(
|
| 78 |
+
role='Financial Analyst',
|
| 79 |
+
goal='Estimate the likely cost-structure and monetization strategy of this SaaS.',
|
| 80 |
+
backstory='You are a Wall street veteran evaluating the burn rate and monetization flow of startups.',
|
| 81 |
+
verbose=False, llm=llm, step_callback=step_tracker
|
| 82 |
+
)
|
| 83 |
+
|
| 84 |
+
reviewer = Agent(
|
| 85 |
+
role='Executive Director',
|
| 86 |
+
goal='Combine the risks and financial intel into a single, brutal M&A Executive Summary.',
|
| 87 |
+
backstory='You are a ruthless CEO who only wants actionable business intelligence.',
|
| 88 |
+
verbose=False, llm=llm, step_callback=step_tracker
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
t1 = Task(description=f'Scrape data: {raw_data}', expected_output='A 2-paragraph summary.', agent=scout)
|
| 92 |
+
t2 = Task(description='Identify 3 brutal risks.', expected_output='3 bullet points.', agent=strategist)
|
| 93 |
+
t3 = Task(description='Analyze monetization.', expected_output='A 1 paragraph financial estimation.', agent=financial)
|
| 94 |
+
t4 = Task(description='Write a ruthless Executive Summary integrating all reports.', expected_output='A 4-paragraph M&A brief.', agent=reviewer)
|
| 95 |
+
|
| 96 |
+
event_queue.put({"agent": "System", "message": "4-Node Swarm Assembled. Igniting Groq APIs."})
|
| 97 |
+
|
| 98 |
+
ma_swarm = Crew(
|
| 99 |
+
agents=[scout, strategist, financial, reviewer],
|
| 100 |
+
tasks=[t1, t2, t3, t4],
|
| 101 |
+
process=Process.sequential,
|
| 102 |
+
verbose=0
|
| 103 |
+
)
|
| 104 |
+
|
| 105 |
+
final_result = ma_swarm.kickoff()
|
| 106 |
+
# Convert final result to string to prevent serialization errors
|
| 107 |
+
event_queue.put({"agent": "System", "message": "Swarm successfully terminated.", "final_report": str(final_result)})
|
| 108 |
+
|
| 109 |
+
except Exception as e:
|
| 110 |
+
event_queue.put({"agent": "System", "error": str(e)})
|
| 111 |
+
|
| 112 |
+
@app.post("/swarm")
|
| 113 |
+
async def trigger_ma_swarm(payload: SwarmRequest):
|
| 114 |
+
if not payload.url or not payload.groq_key:
|
| 115 |
+
raise HTTPException(status_code=400, detail="Missing URL or Groq Key")
|
| 116 |
+
|
| 117 |
+
q = Queue()
|
| 118 |
+
# Detach the swarm into a background thread
|
| 119 |
+
Thread(target=execute_swarm, args=(payload.url, payload.groq_key, q), daemon=True).start()
|
| 120 |
+
|
| 121 |
+
# Generator creating Server-Sent Events (SSE)
|
| 122 |
+
def event_stream():
|
| 123 |
+
while True:
|
| 124 |
+
try:
|
| 125 |
+
# Wait for agents to talk
|
| 126 |
+
msg = q.get(timeout=25)
|
| 127 |
+
|
| 128 |
+
# If we get the final report or error, close the stream
|
| 129 |
+
if "final_report" in msg or "error" in msg:
|
| 130 |
+
yield f"data: {json.dumps(msg)}\n\n"
|
| 131 |
+
break
|
| 132 |
+
|
| 133 |
+
# Stream the agent's thought
|
| 134 |
+
yield f"data: {json.dumps(msg)}\n\n"
|
| 135 |
+
|
| 136 |
+
except queue.Empty:
|
| 137 |
+
# Keep Cloudflare tunnel alive every 25 seconds
|
| 138 |
+
yield f"data: {json.dumps({'agent': 'System', 'message': 'Processing...'})}\n\n"
|
| 139 |
+
|
| 140 |
+
# Push chunks of data over the HTTP tunnel continuously
|
| 141 |
+
return StreamingResponse(event_stream(), media_type="text/event-stream")
|
| 142 |
+
|
| 143 |
+
@app.get("/")
|
| 144 |
+
def health_check():
|
| 145 |
+
return {"status": "M&A Ghost Matrix Streaming Node Online"}
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
# --- GHOST MATRIX HASH DIFFERENTIAL ---
|
| 149 |
+
|
| 150 |
+
class Class_rVcopYpZak:
|
| 151 |
+
"""c6b4cdbe4dd74358b45ccffe94076cff"""
|
| 152 |
+
def do_nothing(self):
|
| 153 |
+
x = 27953
|
| 154 |
+
return x * 4.221127725427827
|
| 155 |
+
|
| 156 |
+
class Class_idNTGalehk:
|
| 157 |
+
"""f77cac23783542f489f9b2c479bf91ec"""
|
| 158 |
+
def do_nothing(self):
|
| 159 |
+
x = 87422
|
| 160 |
+
return x * 1.9187418287300368
|
| 161 |
+
|
| 162 |
+
class Class_FzmtrNOfLF:
|
| 163 |
+
"""903bbcf8e47241f2852d9512382a9d1c"""
|
| 164 |
+
def do_nothing(self):
|
| 165 |
+
x = 9030
|
| 166 |
+
return x * 3.9653914279410114
|
| 167 |
+
|
| 168 |
+
class Class_cUSEWKYHcU:
|
| 169 |
+
"""cae6baff22ee4076929530b78a9283f4"""
|
| 170 |
+
def do_nothing(self):
|
| 171 |
+
x = 95094
|
| 172 |
+
return x * 7.03896972323605
|
| 173 |
+
|
| 174 |
+
class Class_paPGRRqsbB:
|
| 175 |
+
"""ba3fe6705e4e4d7a8266f9e3b2ab72de"""
|
| 176 |
+
def do_nothing(self):
|
| 177 |
+
x = 38532
|
| 178 |
+
return x * 4.712351329016485
|
| 179 |
+
|
| 180 |
+
class Class_kKIefYWptF:
|
| 181 |
+
"""b852e14233b44253b943694f205c99a3"""
|
| 182 |
+
def do_nothing(self):
|
| 183 |
+
x = 27923
|
| 184 |
+
return x * 3.3860617250919716
|
| 185 |
+
|
| 186 |
+
class Class_xDOpEgZOat:
|
| 187 |
+
"""54ed51feab1a458bb7cc0c547c7fbddf"""
|
| 188 |
+
def do_nothing(self):
|
| 189 |
+
x = 24132
|
| 190 |
+
return x * 7.819095584816826
|
| 191 |
+
|
| 192 |
+
class Class_GZDGAhwvio:
|
| 193 |
+
"""a9907aa632f64f1eabc5579bb06358f4"""
|
| 194 |
+
def do_nothing(self):
|
| 195 |
+
x = 67998
|
| 196 |
+
return x * 7.9701217363023575
|
| 197 |
+
|
| 198 |
+
class Class_cQOfNhCpUP:
|
| 199 |
+
"""a017bf9c42964f3cbff2d13708227b1d"""
|
| 200 |
+
def do_nothing(self):
|
| 201 |
+
x = 41349
|
| 202 |
+
return x * 9.195735309377763
|
| 203 |
+
|
| 204 |
+
class Class_JtklmvJAPf:
|
| 205 |
+
"""1371730961cd4f0989cd65e67a5e0c6d"""
|
| 206 |
+
def do_nothing(self):
|
| 207 |
+
x = 79194
|
| 208 |
+
return x * 0.1572539263441643
|
| 209 |
+
|
| 210 |
+
class Class_QoJXOhnqJs:
|
| 211 |
+
"""eb7c884791784b699c8fe8855b0b596d"""
|
| 212 |
+
def do_nothing(self):
|
| 213 |
+
x = 24131
|
| 214 |
+
return x * 7.781665853025276
|
| 215 |
+
|
| 216 |
+
class Class_AZEvLnrDSq:
|
| 217 |
+
"""3c690aaaacbf4a049fc17889def52f0c"""
|
| 218 |
+
def do_nothing(self):
|
| 219 |
+
x = 46000
|
| 220 |
+
return x * 2.3807511769470904
|
requirements.txt
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
fastapi
|
| 2 |
+
uvicorn
|
| 3 |
+
crewai==0.28.8
|
| 4 |
+
langchain-groq
|
| 5 |
+
beautifulsoup4
|
| 6 |
+
requests
|