MarceloLZR commited on
Commit
dada16f
1 Parent(s): b3b0c1d

Upload 5 files

Browse files
Files changed (5) hide show
  1. Dockerfile +17 -0
  2. app.py +39 -0
  3. model.h5 +3 -0
  4. model.json +1 -0
  5. requirements.txt +0 -0
Dockerfile ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Usa una imagen base de Python
2
+ FROM python:3.10.8
3
+
4
+ # Establece el directorio de trabajo
5
+ WORKDIR /code
6
+
7
+ # Copia los archivos necesarios al contenedor
8
+ COPY ./requirements.txt /code/requirements.txt
9
+ RUN pip install --no-cache-dir -r requirements.txt
10
+
11
+ COPY . .
12
+
13
+ # Ajusta los permisos de los archivos
14
+ RUN chmod -R 777 /code
15
+
16
+
17
+ CMD ["sh", "-c", "uvicorn app:app --host 0.0.0.0 --port $PORT"]
app.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, HTTPException
2
+ import numpy as np
3
+ from keras.models import model_from_json
4
+
5
+ app = FastAPI()
6
+
7
+
8
+ loaded_model = None
9
+
10
+ # Cargar el modelo al iniciar la aplicación
11
+ def load_model():
12
+ global loaded_model
13
+ json_file = open("model.json", 'r')
14
+ loaded_model_json = json_file.read()
15
+ json_file.close()
16
+ loaded_model = model_from_json(loaded_model_json)
17
+ loaded_model.load_weights("model.h5")
18
+ print("Modelo cargado en el disco")
19
+
20
+ app.add_event_handler("startup", load_model)
21
+
22
+ # Ruta de inicio
23
+ @app.get("/")
24
+ async def read_root():
25
+ return {"message": "¡Bienvenido a la API de predicción! Visita /docs para ver la documentación."}
26
+
27
+ # Ruta de predicción
28
+ @app.get("/predict/{x0}/{x1}/{x2}/{x3}/{x4}")
29
+ async def predict(x0: float, x1: float, x2: float, x3: float, x4: float):
30
+ global loaded_model
31
+ if loaded_model is None:
32
+ raise HTTPException(status_code=500, detail="El modelo no está cargado.")
33
+ try:
34
+ # Convertir los datos de entrada a un array de NumPy para la predicción
35
+ input_data = np.array([[x0, x1, x2, x3, x4]])
36
+ prediction = loaded_model.predict(input_data).round()
37
+ return {"prediction": prediction.tolist()}
38
+ except Exception as e:
39
+ raise HTTPException(status_code=500, detail=str(e))
model.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:188bfde544e2d865ebc9874bad96a3389d555b4ccba4300e3407deaa127eb5a6
3
+ size 16648
model.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"class_name": "Sequential", "config": {"name": "sequential", "layers": [{"class_name": "InputLayer", "config": {"batch_input_shape": [null, 5], "dtype": "float32", "sparse": false, "ragged": false, "name": "dense_input"}}, {"class_name": "Dense", "config": {"name": "dense", "trainable": true, "dtype": "float32", "batch_input_shape": [null, 5], "units": 16, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "dtype": "float32", "units": 16, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Dense", "config": {"name": "dense_2", "trainable": true, "dtype": "float32", "units": 1, "activation": "sigmoid", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}]}, "keras_version": "2.12.0", "backend": "tensorflow"}
requirements.txt ADDED
Binary file (134 Bytes). View file