# -*- coding: utf-8 -*- """Deploy OceanApp demo.ipynb Automatically generated by Colaboratory. Original file is located at https://colab.research.google.com/drive/1j0T8gdLIa0X8fzkIgFpXDoU27BF49RUz?usp=sharing ![ ](https://i.pinimg.com/564x/3e/b8/f7/3eb8f7c348dffd7b3dffcafe81fbf2a6.jpg) # Modelo YOLO es una familia de modelos de detección de objetos a escala compuesta entrenados en COCO dataset, e incluye una funcionalidad simple para Test Time Augmentation (TTA), model ensembling, hyperparameter evolution, and export to ONNX, CoreML and TFLite. ## Gradio Inferencia ![](https://i.ibb.co/982NS6m/header.png) Este Notebook se acelera opcionalmente con un entorno de ejecución de GPU ---------------------------------------------------------------------- YOLOv5 Gradio demo *Author: Ultralytics LLC and Gradio* # Código """ #!pip install -qr https://raw.githubusercontent.com/ultralytics/yolov5/master/requirements.txt gradio # install dependencies import gradio as gr import pandas as pd import torch import logging import json import re from PIL import Image # Images torch.hub.download_url_to_file('https://i.pinimg.com/564x/18/0b/00/180b00e454362ff5caabe87d9a763a6f.jpg', 'ejemplo1.jpg') torch.hub.download_url_to_file('https://i.pinimg.com/564x/3b/2f/d4/3b2fd4b6881b64429f208c5f32e5e4be.jpg', 'ejemplo2.jpg') # Model #model = torch.hub.load('ultralytics/yolov5', 'yolov5s') # force_reload=True to update #model = torch.hub.load('ultralytics/yolov5', 'custom', path='best.pt') # local model o google colab model = torch.hub.load('ultralytics/yolov5', 'custom', path='best.pt', force_reload=True, autoshape=True) # local model o google colab #model = torch.hub.load('path/to/yolov5', 'custom', path='/content/yolov56.pt', source='local') # local repo def removeStr(string): return string.replace(" ", "") def listJSON(a,b,c,d,e,f): x = re.findall("obo mar", d) y = re.findall("elica", d) z = re.findall("elica", f) if x: d = 'Lobo marino' if y: d = 'Pelicano' if z: f = 'Pelicano' if(d=='Lobo marino' or d=='Pelicano'): if d =='Pelicano\nSp' or d =='Pelicano\nS': d = 'Pelicano' if f!='Pelicano': strlista = '"detail":[{"quantity":"'+str(removeStr(c))+'","description":"'+str(d)+'"}]' else: strlista = '"detail":[{"quantity":"'+str(removeStr(c))+'","description":"'+str(d)+'"},{"quantity":"'+str(removeStr(e))+'","description":"'+str(f)+'"}]' strlist = '{"image":"'+str(removeStr(a))+'","size":"'+str(removeStr(b))+'",'+strlista+'}' json_string = json.loads(strlist) return json_string def arrayLista(a,b,c,d): x = re.findall("obo mar", b) y = re.findall("elica", b) z = re.findall("elica", d) if x: b = 'Lobo marino' if y: b = 'Pelicano' if z: d = 'Pelicano' if(b=='Lobo marino' or b=='Pelicano'): strlist =[] strlist2 =[] strlist.append(removeStr(a)) strlist.append(b) if d=='Pelicano': strlist2.append(removeStr(c)) strlist2.append(d) strlista = [strlist,strlist2] df = pd.DataFrame(strlista,columns=['Cantidad','Especie']) return df def yolo(size, iou, conf, im): try: '''Wrapper fn for gradio''' g = (int(size) / max(im.size)) # gain im = im.resize((int(x * g) for x in im.size), Image.ANTIALIAS) # resize model.iou = iou model.conf = conf results2 = model(im) # inference results2.render() # updates results.imgs with boxes and labels results3 = str(results2) lista = listJSON(results3[0:9], results3[11:18] ,results3[19:21],results3[22:32], results3[35:37], results3[37:45]) lista2 = arrayLista(results3[19:21],results3[22:32], results3[35:37], results3[37:45]) return Image.fromarray(results2.ims[0]), lista2, lista except Exception as e: logging.error(e, exc_info=True) #------------ Interface------------- in1 = gr.inputs.Radio(['640', '1280'], label="Tamaño de la imagen", default='640', type='value') in2 = gr.inputs.Slider(minimum=0, maximum=1, step=0.05, default=0.45, label='NMS IoU threshold') in3 = gr.inputs.Slider(minimum=0, maximum=1, step=0.05, default=0.50, label='Umbral o threshold') in4 = gr.inputs.Image(type='pil', label="Original Image") out2 = gr.outputs.Image(type="pil", label="Identificación con Yolov5") out3 = gr.outputs.Dataframe(label="Descripción", headers=['Cantidad','Especie']) out4 = gr.outputs.JSON(label="JSON") #-------------- Text----- title = 'OceanApp' description = """
Sistema para el reconocimiento de las especies en la pesca acompañante de cerco, utilizando redes neuronales convolucionales para una empresa del sector pesquero en los puertos de callao y paracas.
Nota: Este modelo solo acepta imagenes de Lobos marinos o Pelicanos proporcionados por empresas peruanas.