File size: 4,386 Bytes
464256b 3fbaa5b 464256b 3fbaa5b 464256b 3fbaa5b 464256b 3fbaa5b 464256b 3fbaa5b 464256b 3fbaa5b 464256b 3fbaa5b 464256b 3fbaa5b 464256b 3fbaa5b 2929aa5 464256b 2929aa5 ec34995 3fbaa5b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 |
import gradio as gr
import base64
import requests
import re
from langchain_groq import ChatGroq
# Define API Key and Model Name
neura_code = "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"
neura_chat = "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"
MODEL_NAME = "deepseek-r1-distill-llama-70b"
CHATBOT_MODEL = "llama-3.3-70b-versatile"
groq_model = ChatGroq(temperature=0.4, groq_api_key=neura_code, model_name=MODEL_NAME)
chatbot_model = ChatGroq(temperature=0.6, groq_api_key=neura_chat, model_name=CHATBOT_MODEL)
def clean_output(text):
"""
Removes <think>...</think> sections from the generated output.
"""
return re.sub(r"<think>.*?</think>", "", text, flags=re.DOTALL).strip()
def ocr_gradio(image):
"""
Processes the uploaded image, extracts Markdown content, and generates code from it.
"""
api_key = "44b3dc5fd85497b6b0885c0cdc071b1d5804eeb9f32e03917f0d949c4a779a37"
model = "meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo"
system_prompt = (
"Convert the provided image into Markdown format. Include all elements like arrows, headers, footers, subtexts, and tables."
)
image_data = encode_image(image)
response = requests.post(
"https://api.together.xyz/v1/chat/completions",
headers={"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"},
json={
"model": model,
"messages": [
{"role": "user", "content": [
{"type": "text", "text": system_prompt},
{"type": "image_url", "image_url": {"url": image_data}},
]}
],
},
)
if response.status_code == 200:
markdown_text = response.json().get("choices", [{}])[0].get("message", {}).get("content", "")
return generate_code(markdown_text)
else:
return f"Error: {response.status_code} - {response.text}"
def encode_image(image_path: str) -> str:
"""Encodes a local image as a base64 string."""
with open(image_path, "rb") as image_file:
encoded_string = base64.b64encode(image_file.read()).decode("utf-8")
return f"data:image/jpeg;base64,{encoded_string}"
def generate_code(user_input):
"""
Sends the extracted Markdown to LLM for code generation.
"""
try:
prompt = f"Convert the following Markdown into executable code.\n\n{user_input}"
completion = groq_model.invoke(prompt)
return clean_output(completion.content)
except Exception as e:
return f"Error processing input: {str(e)}"
def chatbot_response(user_message):
"""Handles chatbot interactions using the Groq model."""
try:
return chatbot_model.invoke(user_message).content
except Exception as e:
return f"Chatbot Error: {str(e)}"
custom_css = """
body { background-color: #121212; color: #E0E0E0; font-family: 'Poppins', sans-serif; }
.gradio-container { max-width: 800px; margin: auto; }
input, textarea { background: #222; color: #FFF; border-radius: 8px; }
button { background: #6200EE; color: white; font-weight: bold; border-radius: 8px; padding: 10px; }
button:hover { background: #3700B3; }
"""
gradio_theme = gr.themes.Base(
primary_hue="purple",
secondary_hue="indigo",
font=["Poppins", "sans-serif"]
)
ocr_interface = gr.Blocks()
with ocr_interface:
gr.Markdown("<h1 style='text-align: center;'>Neura.AI</h1>")
gr.Interface(
fn=ocr_gradio,
inputs=gr.Image(type="filepath"),
outputs="text",
description="<p style='color:#BBB;'>The future of coding is here. Just upload a diagram of the technical architecture of your project or app to Neura.AI and generate high-quality, executable code!</p>",
theme=gradio_theme,
css=custom_css
)
chatbot_interface = gr.Blocks()
with chatbot_interface:
gr.Markdown("<h1 style='text-align: center;'>NeuraGPT</h1>")
gr.Interface(
fn=chatbot_response,
inputs=gr.Textbox(placeholder="Ask me anything about coding!", lines=2),
outputs="text",
description="<p style='color:#BBB;'>Engage with NeuraGPT for AI-powered insights and smart coding advice.</p>",
theme=gradio_theme,
css=custom_css
)
demo = gr.TabbedInterface([ocr_interface, chatbot_interface], ["Neura.AI", "NeuraGPT"], theme=gradio_theme)
demo.launch()
|