faceyacc commited on
Commit
a502dd9
1 Parent(s): b589572

added requirements.txt

Browse files
Files changed (2) hide show
  1. app.py +30 -4
  2. requirements.txt +5 -0
app.py CHANGED
@@ -1,7 +1,33 @@
 
1
  import gradio as gr
 
 
 
 
2
 
3
- def greet(name):
4
- return "Hello " + name + "!!"
5
 
6
- iface = gr.Interface(fn=greet, inputs="text", outputs="text")
7
- iface.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import transformers
2
  import gradio as gr
3
+ import datasets
4
+ import torch
5
+ from transformers import AutoFeatureExtractor, AutoModelForImageClassification
6
+ from transformers import ViTFeatureExtractor, ViTForImageClassification
7
 
 
 
8
 
9
+ dataset = load_dataset('beans', 'full_size')
10
+
11
+ extractor = AutoFeatureExtractor.from_pretrained("saved_model_files")
12
+ model = AutoModelForImageClassification.from_pretrained("saved_model_files")
13
+
14
+ labels = dataset['train'].features['labels'].names
15
+
16
+ def classify(im):
17
+ features = feature_extractor(im, return_tensors='pt')
18
+ logits = model(features["pixel_values"])[-1]
19
+ probability = torch.nn.functional.softmax(logits, dim=-1)
20
+ probs = probability[0].detach().numpy()
21
+ confidences = {label: float(probs[i]) for i, label in enumerate(labels)}
22
+ return confidences
23
+
24
+
25
+ description = "Bean leaf health classification wit Google's ViT"
26
+ title = "Bean Leaf Health Check"
27
+ examples = [["'angular_leaf_spot': 0.9999030828475952, 'bean_rust': 5.320278796716593e-05, 'healthy': 4.378804806037806e-05"]]
28
+
29
+
30
+ gr_interface = gr.Interface(classify, inputs='image', outputs='label', title='Bean Classification', description='Monitor your crops health in easier way')
31
+
32
+
33
+ gr_interface.launch(debug=True)
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
1
+ datasets
2
+ transformers
3
+ evaluate
4
+ gradio
5
+ torch