|
import streamlit as st |
|
import google.generativeai as genai |
|
import requests |
|
import subprocess |
|
import os |
|
import pylint |
|
import pandas as pd |
|
import numpy as np |
|
from sklearn.model_selection import train_test_split |
|
from sklearn.ensemble import RandomForestClassifier |
|
from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score |
|
import git |
|
import spacy |
|
import boto3 |
|
import unittest |
|
import sympy as sp |
|
from scipy.optimize import differential_evolution |
|
import matplotlib.pyplot as plt |
|
import seaborn as sns |
|
from tenacity import retry, stop_after_attempt, wait_fixed |
|
import networkx as nx |
|
from scipy.stats import ttest_ind |
|
import nltk |
|
from nltk.sentiment import SentimentIntensityAnalyzer |
|
from PIL import Image |
|
import logging |
|
|
|
|
|
nltk.download('punkt', quiet=True) |
|
nltk.download('vader_lexicon', quiet=True) |
|
try: |
|
nlp = spacy.load("en_core_web_sm") |
|
except: |
|
spacy.cli.download("en_core_web_sm") |
|
nlp = spacy.load("en_core_web_sm") |
|
|
|
|
|
genai.configure(api_key=st.secrets["GOOGLE_API_KEY"]) |
|
|
|
generation_config = { |
|
"temperature": 0.4, |
|
"top_p": 0.8, |
|
"top_k": 50, |
|
"max_output_tokens": 2048, |
|
} |
|
|
|
model = genai.GenerativeModel( |
|
model_name="gemini-1.5-pro-latest", |
|
generation_config=generation_config, |
|
system_instruction="You are Ath, an advanced AI coding assistant. Provide secure, efficient code with clear explanations." |
|
) |
|
|
|
|
|
st.set_page_config(page_title="AI Code Assistant", page_icon="π»", layout="wide") |
|
st.markdown(""" |
|
<style> |
|
.main-container {padding: 2rem; border-radius: 10px; background: #f8f9fa;} |
|
.code-block {background: #1e1e1e; color: #d4d4d4; padding: 1rem; border-radius: 5px;} |
|
</style> |
|
""", unsafe_allow_html=True) |
|
|
|
|
|
@retry(stop=stop_after_attempt(3), wait=wait_fixed(1)) |
|
def generate_response(prompt): |
|
try: |
|
response = model.generate_content(prompt) |
|
return response.text |
|
except Exception as e: |
|
return f"Error: {str(e)}" |
|
|
|
def process_user_input(text): |
|
try: |
|
doc = nlp(text) |
|
return { |
|
'tokens': [token.text for token in doc], |
|
'entities': [(ent.text, ent.label_) for ent in doc.ents], |
|
'sentiment': SentimentIntensityAnalyzer().polarity_scores(text) |
|
} |
|
except Exception as e: |
|
st.error(f"NLP Error: {str(e)}") |
|
return text |
|
|
|
def optimize_code(code): |
|
try: |
|
with open("temp.py", "w") as f: |
|
f.write(code) |
|
result = subprocess.run(["pylint", "temp.py"], capture_output=True, text=True) |
|
os.remove("temp.py") |
|
return code, result.stdout |
|
except Exception as e: |
|
return code, f"Optimization Error: {str(e)}" |
|
|
|
|
|
st.title("π» AI Code Assistant") |
|
st.markdown("### Generate, Optimize, and Deploy Code") |
|
|
|
task_type = st.selectbox("Select Task Type", [ |
|
"Code Generation", |
|
"Data Analysis", |
|
"NLP Processing", |
|
"Math Solving" |
|
]) |
|
|
|
prompt = st.text_area("Enter your request:", height=150) |
|
|
|
if st.button("Generate Solution"): |
|
if not prompt.strip(): |
|
st.error("Please enter a valid prompt") |
|
else: |
|
with st.spinner("Processing..."): |
|
try: |
|
|
|
processed = process_user_input(prompt) |
|
|
|
|
|
response = generate_response(prompt) |
|
|
|
|
|
with st.expander("Generated Solution", expanded=True): |
|
if task_type == "Code Generation": |
|
optimized, lint = optimize_code(response) |
|
st.code(optimized, language='python') |
|
st.write("Code Analysis:") |
|
st.text(lint[:1000]) |
|
|
|
elif task_type == "Data Analysis": |
|
df = pd.DataFrame(np.random.randn(50, 4), columns=['A','B','C','D']) |
|
st.write("Sample Analysis:") |
|
st.dataframe(df.describe()) |
|
fig, ax = plt.subplots() |
|
df.plot.kde(ax=ax) |
|
st.pyplot(fig) |
|
|
|
elif task_type == "NLP Processing": |
|
st.json(processed) |
|
|
|
elif task_type == "Math Solving": |
|
solution = sp.solve(prompt) |
|
st.latex(f"Solution: {solution}") |
|
|
|
st.success("Processing complete!") |
|
|
|
except Exception as e: |
|
st.error(f"Error: {str(e)}") |
|
|
|
|
|
st.sidebar.header("Tools") |
|
if st.sidebar.button("Clear Cache"): |
|
st.cache_data.clear() |
|
st.success("Cache cleared!") |
|
|
|
st.sidebar.markdown(""" |
|
--- |
|
**About** |
|
AI Code Assistant v2.0 |
|
Powered by Gemini Pro |
|
[GitHub Repo](https://github.com/your-repo) |
|
""") |