Andrey commited on
Commit
2c16c94
1 Parent(s): ec55dcd

Add "about" page and more descriptions in readme. Change default thre… (#12)

Browse files

* Add "about" page and more descriptions in readme. Change default threshold to 0.8.

* Update variable name for deepsource.

Files changed (6) hide show
  1. .flake8 +1 -1
  2. .github/README.md +2 -1
  3. README.md +1 -1
  4. pages/about.py +22 -0
  5. src/utils.py +16 -5
  6. st_app.py +12 -1
.flake8 CHANGED
@@ -1,5 +1,5 @@
1
  [flake8]
2
- ignore = I001,I002,I004,I005,I101,I201,C101,C403,C901,F401,F403,S001,D100,D101,D102,D103,D104,D105,D106,D107,D200,D210,D205,D400,T001,W504,D202,E203,W503,B006,T002,T100,P103,C408,F841
3
  max-line-length = 120
4
  exclude = outputs/*
5
  max-complexity = 10
 
1
  [flake8]
2
+ ignore = I001,I002,I004,I005,I101,I201,C101,C403,C901,F401,F403,S001,D100,D101,D102,D103,D104,D105,D106,D107,D200,D210,D205,D400,T001,W504,D202,E203,E501,W503,B006,T002,T100,P103,C408,F841
3
  max-line-length = 120
4
  exclude = outputs/*
5
  max-complexity = 10
.github/README.md CHANGED
@@ -1 +1,2 @@
1
- This is the main readme file.
 
 
1
+ ![visitors](https://visitor-badge.glitch.me/badge?page_id=wissamantoun.arabicnlpapp)
2
+ [![DeepSource](https://static.deepsource.io/deepsource-badge-light-mini.svg)](https://deepsource.io/gh/Erlemar/digit-draw-detect/?ref=repository-badge )
README.md CHANGED
@@ -10,4 +10,4 @@ app_file: st_app.py
10
  pinned: false
11
  license: mit
12
  ---
13
- Checking readme.
 
10
  pinned: false
11
  license: mit
12
  ---
13
+ ![visitors](https://visitor-badge.glitch.me/badge?page_id=wissamantoun.arabicnlpapp)
pages/about.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+
3
+ text = """
4
+ ## A brief history of the project.
5
+ The first idea of the project was conceived in the summer of 2017. I completed cs231n course and wanted to put my skills to the test. So I studied Flask and many other tools from scratch and made an [app](https://github.com/Erlemar/digit-draw-recognize) for recognizing handwritten digits. It had two models - a simple feed-forward neural net written in NumPy and a CNN written in Tensorflow. One fun feature of this app was online learning: the model continuously improved its predictions based on new inputs (though this did sometimes lead to incorrect predictions).
6
+
7
+ In 2019 I decided to update the [project](https://github.com/Erlemar/digit-draw-predict): I trained the new neural net in PyTorch and used cv2 to detect separate digits (people often drew multiple digits). More than that, the model had 11 classes - I made a separate class for "junk", as people often drew things for fun: animals, objects, or words.
8
+
9
+ The first two versions were deployed on Heroku's free plan, but in 2022 these plans were discontinued. I didn't want my project to die because of nostalgia, so I developed a new version and deployed it differently. The current version has an object detection model (yolo3 written from scratch) and 12 classes (digits, junk, and **censored**)
10
+ . If you want to know what does **censored** means, just try to draw something 😉
11
+
12
+ Initially, I considered deploying the app on Streamlit Cloud, but its computational limits were too low, so now the model is live on HuggingFace Spaces.
13
+
14
+ ### Links with additional information:
15
+
16
+ * [Project page on my personal website](https://andlukyane.com/project/drawn-digits-prediction)
17
+ * [A dataset with the digits and bounding boxes on Kaggle](https://www.kaggle.com/datasets/artgor/handwritten-digits-and-bounding-boxes)
18
+ * [Training code](https://github.com/Erlemar/pytorch_tempest_pet_)
19
+
20
+ """
21
+
22
+ st.markdown(text, unsafe_allow_html=True)
src/utils.py CHANGED
@@ -1,5 +1,6 @@
1
  import datetime
2
  import json
 
3
  import uuid
4
  from typing import List
5
 
@@ -9,12 +10,22 @@ import matplotlib.patches as patches
9
  import matplotlib.pyplot as plt
10
  import numpy.typing as npt
11
  import streamlit as st
 
12
 
13
  AWS_ACCESS_KEY_ID = ''
14
  AWS_SECRET_ACCESS_KEY = ''
15
- if st.secrets is not None:
16
- AWS_ACCESS_KEY_ID = st.secrets['AWS_ACCESS_KEY_ID']
17
- AWS_SECRET_ACCESS_KEY = st.secrets['AWS_SECRET_ACCESS_KEY']
 
 
 
 
 
 
 
 
 
18
 
19
  client = boto3.client('s3', aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY)
20
 
@@ -84,8 +95,8 @@ def save_image(image: npt.ArrayLike, pred: List[List]) -> str:
84
  fig.savefig(f'{file_name}.png')
85
 
86
  # dump bboxes in a local file
87
- with open(f'{file_name}.json', 'w') as f:
88
- json.dump({f'{file_name}.png': pred}, f)
89
 
90
  # upload the image and the bboxes to s3.
91
  save_object_to_s3(f'{file_name}.png', f'images/{file_name}.png')
 
1
  import datetime
2
  import json
3
+ import os
4
  import uuid
5
  from typing import List
6
 
 
10
  import matplotlib.pyplot as plt
11
  import numpy.typing as npt
12
  import streamlit as st
13
+ import tomli
14
 
15
  AWS_ACCESS_KEY_ID = ''
16
  AWS_SECRET_ACCESS_KEY = ''
17
+ try:
18
+ if st.secrets is not None:
19
+ AWS_ACCESS_KEY_ID = st.secrets['AWS_ACCESS_KEY_ID']
20
+ AWS_SECRET_ACCESS_KEY = st.secrets['AWS_SECRET_ACCESS_KEY']
21
+ except BaseException:
22
+ pass
23
+
24
+ if os.path.exists('config.toml'):
25
+ with open('config.toml', 'rb') as f:
26
+ config = tomli.load(f)
27
+ AWS_ACCESS_KEY_ID = config['AWS_ACCESS_KEY_ID']
28
+ AWS_SECRET_ACCESS_KEY = config['AWS_SECRET_ACCESS_KEY']
29
 
30
  client = boto3.client('s3', aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY)
31
 
 
95
  fig.savefig(f'{file_name}.png')
96
 
97
  # dump bboxes in a local file
98
+ with open(f'{file_name}.json', 'w') as j_f:
99
+ json.dump({f'{file_name}.png': pred}, j_f)
100
 
101
  # upload the image and the bboxes to s3.
102
  save_object_to_s3(f'{file_name}.png', f'images/{file_name}.png')
st_app.py CHANGED
@@ -8,6 +8,7 @@ from streamlit_drawable_canvas import st_canvas
8
  from src.ml_utils import predict, get_model, transforms
9
  from src.utils import plot_img_with_rects, save_image
10
 
 
11
  logging.info('Starting')
12
 
13
  col1, col2 = st.columns(2)
@@ -38,9 +39,19 @@ with col2:
38
  logging.info('prediction done')
39
 
40
  file_name = save_image(image.permute(1, 2, 0).numpy(), pred)
41
- threshold = st.slider('Bbox probability slider', min_value=0.0, max_value=1.0, value=0.5)
42
 
43
  fig = plot_img_with_rects(image.permute(1, 2, 0).numpy(), pred, threshold, coef=192)
44
  fig.savefig(f'{file_name}_temp.png')
45
  image = Image.open(f'{file_name}_temp.png')
46
  st.image(image)
 
 
 
 
 
 
 
 
 
 
 
8
  from src.ml_utils import predict, get_model, transforms
9
  from src.utils import plot_img_with_rects, save_image
10
 
11
+ st.title('Handwritten digit detector')
12
  logging.info('Starting')
13
 
14
  col1, col2 = st.columns(2)
 
39
  logging.info('prediction done')
40
 
41
  file_name = save_image(image.permute(1, 2, 0).numpy(), pred)
42
+ threshold = st.slider('Bbox probability slider', min_value=0.0, max_value=1.0, value=0.8)
43
 
44
  fig = plot_img_with_rects(image.permute(1, 2, 0).numpy(), pred, threshold, coef=192)
45
  fig.savefig(f'{file_name}_temp.png')
46
  image = Image.open(f'{file_name}_temp.png')
47
  st.image(image)
48
+
49
+ text = """
50
+ This is a small app for handwritten digit recognition and recognition developed for fun. It uses a handwritten YOLOv3 model trained from scratch.
51
+ You can draw a digit (or whatever you want) and the model will try to understand what is it.
52
+ You can use the slider above to show bounding boxes with a probability higher than the threshold.
53
+ If you want to know how the app works in more detail, you are welcome to read "About" page.
54
+ Enjoy! :)
55
+ """
56
+
57
+ st.markdown(text, unsafe_allow_html=True)