MLDeveloper commited on
Commit
4dfb3f4
·
verified ·
1 Parent(s): 7ae7f8d

Update translator.py

Browse files
Files changed (1) hide show
  1. translator.py +34 -25
translator.py CHANGED
@@ -1,33 +1,42 @@
 
1
  import requests
2
  import os
 
3
 
4
- # Your Hugging Face API token (Replace 'your_token_here' with your actual token)
5
-
6
- API_TOKEN = os.getenv("HF_API_TOKEN")
7
-
8
- # Define model and API endpoint
9
- MODEL_ID = "bigcode/starcoder"
10
- API_URL = f"https://api-inference.huggingface.co/models/{MODEL_ID}"
11
- HEADERS = {"Authorization": f"Bearer {API_TOKEN}"}
12
 
13
  def translate_code(code_snippet, source_lang, target_lang):
14
  """
15
- Translate code using Hugging Face API (No local download needed).
16
  """
17
- prompt = f"Translate the following {source_lang} code to {target_lang}:\n\n{code_snippet}\n\nTranslated {target_lang} Code:"
18
-
19
- response = requests.post(API_URL, headers=HEADERS, json={"inputs": prompt})
20
-
21
- if response.status_code == 200:
22
- return response.json()[0]["generated_text"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
  else:
24
- return f"Error: {response.status_code}, {response.text}"
25
-
26
- # Example usage
27
- source_code = """
28
- def add(a, b):
29
- return a + b
30
- """
31
- translated_code = translate_code(source_code, "Python", "Java")
32
- print("Translated Java Code:\n", translated_code)
33
-
 
1
+ import streamlit as st
2
  import requests
3
  import os
4
+ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
5
 
6
+ # Load CodeT5 model from Hugging Face
7
+ MODEL_NAME = "Salesforce/codet5-large"
8
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
9
+ model = AutoModelForSeq2SeqLM.from_pretrained(MODEL_NAME)
 
 
 
 
10
 
11
  def translate_code(code_snippet, source_lang, target_lang):
12
  """
13
+ Translate code using CodeT5 model.
14
  """
15
+ prompt = f"Translate this {source_lang} code to {target_lang}:\n\n{code_snippet}"
16
+
17
+ # Tokenize and generate translation
18
+ inputs = tokenizer(prompt, return_tensors="pt", padding=True, truncation=True, max_length=512)
19
+ outputs = model.generate(**inputs, max_length=512)
20
+
21
+ # Decode the output
22
+ translated_code = tokenizer.decode(outputs[0], skip_special_tokens=True)
23
+ return translated_code
24
+
25
+ # Streamlit UI
26
+ st.title("🔄 Code Translator (Python, Java, C++, C)")
27
+ st.write("Translate code between Python, Java, C++, and C.")
28
+
29
+ languages = ["Python", "Java", "C++", "C"]
30
+
31
+ source_lang = st.selectbox("Select source language", languages)
32
+ target_lang = st.selectbox("Select target language", languages)
33
+ code_input = st.text_area("Enter your code here:", height=200)
34
+
35
+ if st.button("Translate"):
36
+ if code_input.strip():
37
+ with st.spinner("Translating..."):
38
+ translated_code = translate_code(code_input, source_lang, target_lang)
39
+ st.subheader("Translated Code:")
40
+ st.code(translated_code, language=target_lang.lower())
41
  else:
42
+ st.warning("⚠️ Please enter some code before translating.")