Spaces:
Runtime error
Runtime error
import numpy as np | |
import pandas as pd | |
import torch | |
import torch.nn as nn | |
from torchvision import models, transforms | |
from PIL import Image | |
from matplotlib import pyplot as plt | |
def get_index_to_class_mapping(): | |
indices = np.arange(1, 16) | |
class_names = ['Atelectasis', 'Cardiomegaly', 'Consolidation', 'Edema', 'Effusion', 'Emphysema', 'Fibrosis', | |
'Hernia', 'Infiltration', 'Mass', 'No Finding', 'Nodule', 'Pleural Thickening', 'Pneumonia', | |
'Pneumothorax'] | |
mapping = dict(zip(indices, class_names)) | |
return mapping | |
def load_classifier_from_file(ckpt_file, location='cpu'): | |
checkpoint = torch.load(ckpt_file, map_location=location) | |
model = checkpoint['model'] | |
return model | |
def transform_pil_to_tensor(pil_image, device='cpu'): | |
# ImageNet mean and std | |
mean = [0.485, 0.456, 0.406] | |
std = [0.229, 0.224, 0.225] | |
# transformation to be applied | |
transform = transforms.Compose([ | |
transforms.Resize(224), | |
transforms.ToTensor(), | |
transforms.Normalize(mean, std) | |
]) | |
tensor = transform(pil_image).to(device) | |
return tensor | |
def noise_process(numpy_image, steps=10): | |
noisy_image_list = [] | |
noisy_image = numpy_image | |
noisy_image_list.append(noisy_image) | |
for step in range(steps): | |
noise = 255 * np.random.normal(0, (step+1) * 0.1, numpy_image.size).reshape(numpy_image.shape) | |
noisy_image = noisy_image + noise | |
noisy_image_list.append(noisy_image) | |
plt.imshow(noisy_image, cmap='gray') | |
plt.show() | |
return noisy_image_list | |