awacke1 commited on
Commit
8b01da1
1 Parent(s): b9c8fe2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -150
app.py CHANGED
@@ -2,155 +2,7 @@ import streamlit as st
2
 
3
 
4
  st.markdown("""
 
5
 
6
-
7
- # 2. Streamlit Docker Example
8
-
9
- https://huggingface.co/spaces/DockerTemplates/streamlit-docker-example/tree/main
10
-
11
- # Dockerfile:
12
- FROM python:3.8.9
13
-
14
- WORKDIR /app
15
-
16
- COPY ./requirements.txt /app/requirements.txt
17
- COPY ./packages.txt /app/packages.txt
18
-
19
- RUN apt-get update && xargs -r -a /app/packages.txt apt-get install -y && rm -rf /var/lib/apt/lists/*
20
- RUN pip3 install --no-cache-dir -r /app/requirements.txt
21
-
22
- # User
23
- RUN useradd -m -u 1000 user
24
- USER user
25
- ENV HOME /home/user
26
- ENV PATH $HOME/.local/bin:$PATH
27
-
28
- WORKDIR $HOME
29
- RUN mkdir app
30
- WORKDIR $HOME/app
31
- COPY . $HOME/app
32
-
33
- EXPOSE 8501
34
- CMD streamlit run app.py
35
-
36
- # app.py:
37
-
38
- import streamlit as st
39
- import pandas as pd
40
- import numpy as np
41
-
42
- st.title('Uber pickups in NYC')
43
-
44
- DATE_COLUMN = 'date/time'
45
- DATA_URL = ('https://s3-us-west-2.amazonaws.com/'
46
- 'streamlit-demo-data/uber-raw-data-sep14.csv.gz')
47
-
48
- @st.cache
49
- def load_data(nrows):
50
- data = pd.read_csv(DATA_URL, nrows=nrows)
51
- lowercase = lambda x: str(x).lower()
52
- data.rename(lowercase, axis='columns', inplace=True)
53
- data[DATE_COLUMN] = pd.to_datetime(data[DATE_COLUMN])
54
- return data
55
-
56
- data_load_state = st.text('Loading data...')
57
- data = load_data(10000)
58
- data_load_state.text("Done! (using st.cache)")
59
-
60
- if st.checkbox('Show raw data'):
61
- st.subheader('Raw data')
62
- st.write(data)
63
-
64
- st.subheader('Number of pickups by hour')
65
- hist_values = np.histogram(data[DATE_COLUMN].dt.hour, bins=24, range=(0,24))[0]
66
- st.bar_chart(hist_values)
67
-
68
- # Some number in the range 0-23
69
- hour_to_filter = st.slider('hour', 0, 23, 17)
70
- filtered_data = data[data[DATE_COLUMN].dt.hour == hour_to_filter]
71
-
72
- st.subheader('Map of all pickups at %s:00' % hour_to_filter)
73
- st.map(filtered_data)
74
-
75
- # requirements.txt
76
- streamlit
77
- numpy
78
- pandas
79
-
80
-
81
-
82
- # 2. Gradio Docker Example
83
-
84
- https://huggingface.co/spaces/sayakpaul/demo-docker-gradio/blob/main/Dockerfile
85
-
86
- # Dockerfile:
87
-
88
- # read the doc: https://huggingface.co/docs/hub/spaces-sdks-docker
89
- # you will also find guides on how best to write your Dockerfile
90
-
91
- FROM python:3.9
92
-
93
- WORKDIR /code
94
-
95
- COPY ./requirements.txt /code/requirements.txt
96
-
97
- RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
98
-
99
- # Set up a new user named "user" with user ID 1000
100
- RUN useradd -m -u 1000 user
101
- # Switch to the "user" user
102
- USER user
103
- # Set home to the user's home directory
104
- ENV HOME=/home/user \
105
- PATH=/home/user/.local/bin:$PATH
106
-
107
- # Set the working directory to the user's home directory
108
- WORKDIR $HOME/app
109
-
110
- # Copy the current directory contents into the container at $HOME/app setting the owner to the user
111
- COPY --chown=user . $HOME/app
112
-
113
- CMD ["python", "main.py"]
114
-
115
-
116
- # main.py
117
-
118
- import gradio as gr
119
- import torch
120
- import requests
121
- from torchvision import transforms
122
-
123
- model = torch.hub.load("pytorch/vision:v0.6.0", "resnet18", pretrained=True).eval()
124
- response = requests.get("https://git.io/JJkYN")
125
- labels = response.text.split("\n")
126
-
127
-
128
- def predict(inp):
129
- inp = transforms.ToTensor()(inp).unsqueeze(0)
130
- with torch.no_grad():
131
- prediction = torch.nn.functional.softmax(model(inp)[0], dim=0)
132
- confidences = {labels[i]: float(prediction[i]) for i in range(1000)}
133
- return confidences
134
-
135
-
136
- def run():
137
- demo = gr.Interface(
138
- fn=predict,
139
- inputs=gr.inputs.Image(type="pil"),
140
- outputs=gr.outputs.Label(num_top_classes=3),
141
- )
142
-
143
- demo.launch(server_name="0.0.0.0", server_port=7860)
144
-
145
-
146
- if __name__ == "__main__":
147
- run()
148
-
149
- # requirements.txt
150
-
151
- gradio
152
- torch
153
- torchvision
154
- requests
155
-
156
  """)
 
2
 
3
 
4
  st.markdown("""
5
+ https://github.com/AaronCWacker/Yggdrasil/tree/main
6
 
7
+ https://github.com/AaronCWacker/Docker-ACA-Examples
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  """)