MLDeveloper commited on
Commit
f62fef5
·
verified ·
1 Parent(s): 4dfb3f4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -52
app.py CHANGED
@@ -1,60 +1,26 @@
1
  import streamlit as st
2
- import requests
3
- import os
4
 
5
- # Ensure the Hugging Face API Token is available
6
- API_TOKEN = os.getenv("HF_API_TOKEN")
7
- if not API_TOKEN:
8
- st.error("⚠️ API Token is missing! Please set HF_API_TOKEN as an environment variable.")
9
- st.stop()
10
-
11
- # Define Hugging Face Model and API Endpoint
12
- MODEL_ID = "bigcode/starcoder"
13
- API_URL = f"https://api-inference.huggingface.co/models/{MODEL_ID}"
14
- HEADERS = {"Authorization": f"Bearer {API_TOKEN}"}
15
 
16
  def translate_code(code_snippet, source_lang, target_lang):
17
- """Translate code between languages using Hugging Face API."""
18
- prompt = f"""You are an expert AI in code translation. Convert the following {source_lang} code to {target_lang}:
19
-
20
- {source_lang} Code:
21
- ```{source_lang.lower()}
22
- {code_snippet}
23
- ```
24
-
25
- Now, provide the equivalent {target_lang} code:
26
  """
 
 
 
27
 
28
- try:
29
- response = requests.post(API_URL, headers=HEADERS, json={"inputs": prompt})
30
-
31
- # Handle API response
32
- if response.status_code == 200:
33
- result = response.json()
34
- if isinstance(result, list) and result:
35
- generated_text = result[0].get("generated_text", "")
36
- translated_code = generated_text.split(f"```{target_lang.lower()}")[-1].strip("```").strip()
37
- return translated_code
38
- else:
39
- return "⚠️ Error: Unexpected API response format."
40
-
41
- elif response.status_code == 400:
42
- return "⚠️ Error: Bad request. Please check your input."
43
-
44
- elif response.status_code == 401:
45
- return "⚠️ Error: Unauthorized. Check your API token."
46
-
47
- elif response.status_code == 403:
48
- return "⚠️ Error: Access Forbidden. You may need special model access."
49
-
50
- elif response.status_code == 503:
51
- return "⚠️ Error: Model is loading. Please wait a moment and try again."
52
-
53
- else:
54
- return f"⚠️ API Error {response.status_code}: {response.text}"
55
-
56
- except requests.exceptions.RequestException as e:
57
- return f"⚠️ Network Error: {str(e)}"
58
 
59
  # Streamlit UI
60
  st.title("🔄 AI Code Translator")
@@ -74,4 +40,4 @@ if st.button("Translate"):
74
  with st.spinner("Translating... ⏳"):
75
  translated_code = translate_code(code_input, source_lang, target_lang)
76
  st.subheader(f"Translated {target_lang} Code:")
77
- st.code(translated_code, language=target_lang.lower())
 
1
  import streamlit as st
2
+ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
 
3
 
4
+ # Load CodeT5 model from Hugging Face
5
+ MODEL_NAME = "Salesforce/codet5-large"
6
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
7
+ model = AutoModelForSeq2SeqLM.from_pretrained(MODEL_NAME)
 
 
 
 
 
 
8
 
9
  def translate_code(code_snippet, source_lang, target_lang):
 
 
 
 
 
 
 
 
 
10
  """
11
+ Translate code using CodeT5 model.
12
+ """
13
+ prompt = f"""Translate this {source_lang} code to {target_lang}:
14
 
15
+ {code_snippet}"""
16
+
17
+ # Tokenize and generate translation
18
+ inputs = tokenizer(prompt, return_tensors="pt", padding=True, truncation=True, max_length=512)
19
+ outputs = model.generate(**inputs, max_length=512)
20
+
21
+ # Decode the output
22
+ translated_code = tokenizer.decode(outputs[0], skip_special_tokens=True)
23
+ return translated_code
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
 
25
  # Streamlit UI
26
  st.title("🔄 AI Code Translator")
 
40
  with st.spinner("Translating... ⏳"):
41
  translated_code = translate_code(code_input, source_lang, target_lang)
42
  st.subheader(f"Translated {target_lang} Code:")
43
+ st.code(translated_code, language=target_lang.lower())