Spaces:
Sleeping
Sleeping
import streamlit as st | |
import requests | |
import os # To access environment variables | |
import google.generativeai as genai # Import Gemini API | |
# Load API keys from environment variables | |
HF_API_TOKEN = os.getenv("HF_API_TOKEN") | |
GEMINI_API_KEY = os.getenv("GOOGLE_API_KEY") | |
# Set up Hugging Face API | |
MODEL_ID = "Salesforce/codet5p-770m" # CodeT5+ (Recommended) | |
API_URL = f"https://api-inference.huggingface.co/models/{MODEL_ID}" | |
HEADERS = {"Authorization": f"Bearer {HF_API_TOKEN}"} | |
# Initialize Gemini API | |
genai.configure(api_key='AIzaSyBkc8CSEhyYwZAuUiJfzF1Xtns-RYmBOpg') | |
def translate_code(code_snippet, source_lang, target_lang): | |
"""Translate code using Hugging Face API.""" | |
prompt = f"Translate the following {source_lang} code to {target_lang}:\n\n{code_snippet}\n\nTranslated {target_lang} Code:\n" | |
response = requests.post(API_URL, headers=HEADERS, json={ | |
"inputs": prompt, | |
"parameters": { | |
"max_new_tokens": 150, | |
"temperature": 0.2, | |
"top_k": 50 | |
} | |
}) | |
if response.status_code == 200: | |
generated_text = response.json()[0]["generated_text"] | |
translated_code = generated_text.split(f"Translated {target_lang} Code:\n")[-1].strip() | |
return translated_code | |
else: | |
return f"Error: {response.status_code}, {response.text}" | |
def fallback_translate_with_gemini(code_snippet, source_lang, target_lang): | |
"""Fallback function using Gemini API for translation.""" | |
prompt = f"""You are a code translation expert. Convert the following {source_lang} code to {target_lang}: | |
{code_snippet} | |
Ensure the translation is accurate and follows {target_lang} best practices. | |
Do not give any explaination. only give the translated code. | |
""" | |
try: | |
model = genai.GenerativeModel("gemini-1.5-pro") | |
response = model.generate_content(prompt) | |
return response.text.strip() if response else "Translation failed." | |
except Exception as e: | |
return f"Gemini API Error: {str(e)}" | |
# Streamlit UI | |
st.title("🔄 Programming Language Translator") | |
st.write("Translate code between different programming languages using AI.") | |
languages = ["Python", "Java", "C++", "C"] | |
source_lang = st.selectbox("Select source language", languages) | |
target_lang = st.selectbox("Select target language", languages) | |
code_input = st.text_area("Enter your code here:", height=200) | |
# Initialize session state | |
if "translate_attempts" not in st.session_state: | |
st.session_state.translate_attempts = 0 | |
st.session_state.translated_code = "" | |
if st.button("Translate"): | |
if code_input.strip(): | |
st.session_state.translate_attempts += 1 | |
with st.spinner("Translating..."): | |
if st.session_state.translate_attempts == 1: | |
# First attempt using the pretrained model | |
st.session_state.translated_code = translate_code(code_input, source_lang, target_lang) | |
else: | |
# Second attempt uses Gemini API | |
st.session_state.translated_code = fallback_translate_with_gemini(code_input, source_lang, target_lang) | |
st.subheader("Translated Code:") | |
st.code(st.session_state.translated_code, language=target_lang.lower()) | |
else: | |
st.warning("⚠️ Please enter some code before translating.") | |
# V1 without gemini api | |
# import streamlit as st | |
# import requests | |
# import os # Import os to access environment variables | |
# # Get API token from environment variable | |
# API_TOKEN = os.getenv("HF_API_TOKEN") | |
# # Change MODEL_ID to a better model | |
# MODEL_ID = "Salesforce/codet5p-770m" # CodeT5+ (Recommended) | |
# # MODEL_ID = "bigcode/starcoder2-15b" # StarCoder2 | |
# # MODEL_ID = "bigcode/starcoder" | |
# API_URL = f"https://api-inference.huggingface.co/models/{MODEL_ID}" | |
# HEADERS = {"Authorization": f"Bearer {API_TOKEN}"} | |
# def translate_code(code_snippet, source_lang, target_lang): | |
# """Translate code using Hugging Face API securely.""" | |
# prompt = f"Translate the following {source_lang} code to {target_lang}:\n\n{code_snippet}\n\nTranslated {target_lang} Code:\n" | |
# response = requests.post(API_URL, headers=HEADERS, json={ | |
# "inputs": prompt, | |
# "parameters": { | |
# "max_new_tokens": 150, | |
# "temperature": 0.2, | |
# "top_k": 50 | |
# # "stop": ["\n\n", "#", "//", "'''"] | |
# } | |
# }) | |
# if response.status_code == 200: | |
# generated_text = response.json()[0]["generated_text"] | |
# translated_code = generated_text.split(f"Translated {target_lang} Code:\n")[-1].strip() | |
# return translated_code | |
# else: | |
# return f"Error: {response.status_code}, {response.text}" | |
# # Streamlit UI | |
# st.title("🔄 Code Translator using StarCoder") | |
# st.write("Translate code between different programming languages using AI.") | |
# languages = ["Python", "Java", "C++", "C"] | |
# source_lang = st.selectbox("Select source language", languages) | |
# target_lang = st.selectbox("Select target language", languages) | |
# code_input = st.text_area("Enter your code here:", height=200) | |
# if st.button("Translate"): | |
# if code_input.strip(): | |
# with st.spinner("Translating..."): | |
# translated_code = translate_code(code_input, source_lang, target_lang) | |
# st.subheader("Translated Code:") | |
# st.code(translated_code, language=target_lang.lower()) | |
# else: | |
# st.warning("⚠️ Please enter some code before translating.") |