acecalisto3 commited on
Commit
401a5f9
1 Parent(s): b3de3de

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -7
app.py CHANGED
@@ -1,7 +1,6 @@
1
  import os
2
  import subprocess
3
  import streamlit as st
4
- from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer
5
  import black
6
  from pylint import lint
7
  from io import StringIO
@@ -154,16 +153,24 @@ def code_editor_interface(code):
154
  return formatted_code, lint_message
155
 
156
  def translate_code(code, input_language, output_language):
157
- # Use a Hugging Face translation model to handle end-to-end code translation
158
- translator = pipeline("translation", model="Helsinki-NLP/opus-mt-en-es") # Example: English to Spanish
159
- translated_code = translator(code, target_lang=output_language)[0]['translation_text']
 
 
 
 
160
  st.session_state.current_state['toolbox']['translated_code'] = translated_code
161
  return translated_code
162
 
163
  def generate_code(code_idea):
164
- # Use a Hugging Face code generation model
165
- generator = pipeline('text-generation', model='bigcode/starcoder')
166
- generated_code = generator(code_idea, max_length=1000, num_return_sequences=1)[0]['generated_text']
 
 
 
 
167
  st.session_state.current_state['toolbox']['generated_code'] = generated_code
168
  return generated_code
169
 
 
1
  import os
2
  import subprocess
3
  import streamlit as st
 
4
  import black
5
  from pylint import lint
6
  from io import StringIO
 
153
  return formatted_code, lint_message
154
 
155
  def translate_code(code, input_language, output_language):
156
+ try:
157
+ model = InstructModel() # Initialize Mixtral Instruct model
158
+ except EnvironmentError as e:
159
+ return f"Error loading model: {e}"
160
+
161
+ prompt = f"Translate the following {input_language} code to {output_language}:\n\n{code}"
162
+ translated_code = model.generate_response(prompt)
163
  st.session_state.current_state['toolbox']['translated_code'] = translated_code
164
  return translated_code
165
 
166
  def generate_code(code_idea):
167
+ try:
168
+ model = InstructModel() # Initialize Mixtral Instruct model
169
+ except EnvironmentError as e:
170
+ return f"Error loading model: {e}"
171
+
172
+ prompt = f"Generate code for the following idea:\n\n{code_idea}"
173
+ generated_code = model.generate_response(prompt)
174
  st.session_state.current_state['toolbox']['generated_code'] = generated_code
175
  return generated_code
176