C2MV commited on
Commit
f5a37da
1 Parent(s): bd0644f

Update interface.py

Browse files
Files changed (1) hide show
  1. interface.py +21 -12
interface.py CHANGED
@@ -11,7 +11,7 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
11
  from sympy import symbols, sympify, lambdify
12
  import copy
13
  from config import DEVICE, MODEL_PATH, MAX_LENGTH, TEMPERATURE
14
- from decorators import spaces # Importamos el decorador
15
 
16
  # Configuración del dispositivo
17
  device = DEVICE
@@ -20,12 +20,15 @@ device = DEVICE
20
  model_path = MODEL_PATH # Reemplaza con la ruta real de tu modelo
21
  tokenizer = AutoTokenizer.from_pretrained(model_path)
22
  model = AutoModelForCausalLM.from_pretrained(model_path)
23
- model.to(device)
24
- model.eval()
25
 
26
- @spaces.GPU(duration=300) # Aplicamos el decorador para asegurar uso de GPU
27
- def generate_analysis(prompt, max_length=MAX_LENGTH):
28
  try:
 
 
 
 
29
  input_ids = tokenizer.encode(prompt, return_tensors='pt').to(device)
30
  max_gen_length = min(max_length + input_ids.size(1), model.config.max_position_embeddings)
31
 
@@ -75,6 +78,19 @@ def process_and_plot(
75
  substrate_eq_count,
76
  product_eq_count
77
  ):
 
 
 
 
 
 
 
 
 
 
 
 
 
78
  # Convierte los contadores a enteros
79
  biomass_eq_count = int(biomass_eq_count)
80
  substrate_eq_count = int(substrate_eq_count)
@@ -93,13 +109,6 @@ def process_and_plot(
93
  product_params = [product_param1, product_param2, product_param3][:product_eq_count]
94
  product_bounds = [product_bound1, product_bound2, product_bound3][:product_eq_count]
95
 
96
- # Lee el archivo Excel subido
97
- df = pd.read_excel(file.name)
98
- time = df['Time'].values
99
- biomass_data = df['Biomass'].values
100
- substrate_data = df['Substrate'].values
101
- product_data = df['Product'].values
102
-
103
  biomass_results = []
104
  substrate_results = []
105
  product_results = []
 
11
  from sympy import symbols, sympify, lambdify
12
  import copy
13
  from config import DEVICE, MODEL_PATH, MAX_LENGTH, TEMPERATURE
14
+ from decorators import spaces
15
 
16
  # Configuración del dispositivo
17
  device = DEVICE
 
20
  model_path = MODEL_PATH # Reemplaza con la ruta real de tu modelo
21
  tokenizer = AutoTokenizer.from_pretrained(model_path)
22
  model = AutoModelForCausalLM.from_pretrained(model_path)
23
+ # No movemos el modelo al dispositivo aquí
 
24
 
25
+ @spaces.GPU(duration=300)
26
+ def generate_analysis(prompt, max_length=MAX_LENGTH, device=None):
27
  try:
28
+ if device is None:
29
+ device = torch.device('cpu')
30
+ if next(model.parameters()).device != device:
31
+ model.to(device)
32
  input_ids = tokenizer.encode(prompt, return_tensors='pt').to(device)
33
  max_gen_length = min(max_length + input_ids.size(1), model.config.max_position_embeddings)
34
 
 
78
  substrate_eq_count,
79
  product_eq_count
80
  ):
81
+ # Verificar que las columnas requeridas estén presentes en el archivo Excel
82
+ df = pd.read_excel(file.name)
83
+ expected_columns = ['Tiempo', 'Biomasa', 'Sustrato', 'Producto'] # Nombres en español
84
+ for col in expected_columns:
85
+ if col not in df.columns:
86
+ raise KeyError(f"La columna esperada '{col}' no se encuentra en el archivo Excel.")
87
+
88
+ # Asignación de datos desde las columnas en español
89
+ time = df['Tiempo'].values # Columna de tiempo
90
+ biomass_data = df['Biomasa'].values # Columna de biomasa
91
+ substrate_data = df['Sustrato'].values # Columna de sustrato
92
+ product_data = df['Producto'].values # Columna de producto
93
+
94
  # Convierte los contadores a enteros
95
  biomass_eq_count = int(biomass_eq_count)
96
  substrate_eq_count = int(substrate_eq_count)
 
109
  product_params = [product_param1, product_param2, product_param3][:product_eq_count]
110
  product_bounds = [product_bound1, product_bound2, product_bound3][:product_eq_count]
111
 
 
 
 
 
 
 
 
112
  biomass_results = []
113
  substrate_results = []
114
  product_results = []