awacke1 commited on
Commit
b9c8fe2
1 Parent(s): 4071758

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +107 -1
README.md CHANGED
@@ -10,4 +10,110 @@ pinned: false
10
  license: mit
11
  ---
12
 
13
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  license: mit
11
  ---
12
 
13
+ import streamlit as st
14
+
15
+
16
+ st.markdown("""
17
+ # 2. Streamlit Docker Example
18
+ https://huggingface.co/spaces/DockerTemplates/streamlit-docker-example/tree/main
19
+ # Dockerfile:
20
+ FROM python:3.8.9
21
+ WORKDIR /app
22
+ COPY ./requirements.txt /app/requirements.txt
23
+ COPY ./packages.txt /app/packages.txt
24
+ RUN apt-get update && xargs -r -a /app/packages.txt apt-get install -y && rm -rf /var/lib/apt/lists/*
25
+ RUN pip3 install --no-cache-dir -r /app/requirements.txt
26
+ # User
27
+ RUN useradd -m -u 1000 user
28
+ USER user
29
+ ENV HOME /home/user
30
+ ENV PATH $HOME/.local/bin:$PATH
31
+ WORKDIR $HOME
32
+ RUN mkdir app
33
+ WORKDIR $HOME/app
34
+ COPY . $HOME/app
35
+ EXPOSE 8501
36
+ CMD streamlit run app.py
37
+ # app.py:
38
+ import streamlit as st
39
+ import pandas as pd
40
+ import numpy as np
41
+ st.title('Uber pickups in NYC')
42
+ DATE_COLUMN = 'date/time'
43
+ DATA_URL = ('https://s3-us-west-2.amazonaws.com/'
44
+ 'streamlit-demo-data/uber-raw-data-sep14.csv.gz')
45
+ @st.cache
46
+ def load_data(nrows):
47
+ data = pd.read_csv(DATA_URL, nrows=nrows)
48
+ lowercase = lambda x: str(x).lower()
49
+ data.rename(lowercase, axis='columns', inplace=True)
50
+ data[DATE_COLUMN] = pd.to_datetime(data[DATE_COLUMN])
51
+ return data
52
+ data_load_state = st.text('Loading data...')
53
+ data = load_data(10000)
54
+ data_load_state.text("Done! (using st.cache)")
55
+ if st.checkbox('Show raw data'):
56
+ st.subheader('Raw data')
57
+ st.write(data)
58
+ st.subheader('Number of pickups by hour')
59
+ hist_values = np.histogram(data[DATE_COLUMN].dt.hour, bins=24, range=(0,24))[0]
60
+ st.bar_chart(hist_values)
61
+ # Some number in the range 0-23
62
+ hour_to_filter = st.slider('hour', 0, 23, 17)
63
+ filtered_data = data[data[DATE_COLUMN].dt.hour == hour_to_filter]
64
+ st.subheader('Map of all pickups at %s:00' % hour_to_filter)
65
+ st.map(filtered_data)
66
+ # requirements.txt
67
+ streamlit
68
+ numpy
69
+ pandas
70
+ # 2. Gradio Docker Example
71
+ https://huggingface.co/spaces/sayakpaul/demo-docker-gradio/blob/main/Dockerfile
72
+ # Dockerfile:
73
+ # read the doc: https://huggingface.co/docs/hub/spaces-sdks-docker
74
+ # you will also find guides on how best to write your Dockerfile
75
+ FROM python:3.9
76
+ WORKDIR /code
77
+ COPY ./requirements.txt /code/requirements.txt
78
+ RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
79
+ # Set up a new user named "user" with user ID 1000
80
+ RUN useradd -m -u 1000 user
81
+ # Switch to the "user" user
82
+ USER user
83
+ # Set home to the user's home directory
84
+ ENV HOME=/home/user \
85
+ PATH=/home/user/.local/bin:$PATH
86
+ # Set the working directory to the user's home directory
87
+ WORKDIR $HOME/app
88
+ # Copy the current directory contents into the container at $HOME/app setting the owner to the user
89
+ COPY --chown=user . $HOME/app
90
+ CMD ["python", "main.py"]
91
+ # main.py
92
+ import gradio as gr
93
+ import torch
94
+ import requests
95
+ from torchvision import transforms
96
+ model = torch.hub.load("pytorch/vision:v0.6.0", "resnet18", pretrained=True).eval()
97
+ response = requests.get("https://git.io/JJkYN")
98
+ labels = response.text.split("\n")
99
+ def predict(inp):
100
+ inp = transforms.ToTensor()(inp).unsqueeze(0)
101
+ with torch.no_grad():
102
+ prediction = torch.nn.functional.softmax(model(inp)[0], dim=0)
103
+ confidences = {labels[i]: float(prediction[i]) for i in range(1000)}
104
+ return confidences
105
+ def run():
106
+ demo = gr.Interface(
107
+ fn=predict,
108
+ inputs=gr.inputs.Image(type="pil"),
109
+ outputs=gr.outputs.Label(num_top_classes=3),
110
+ )
111
+ demo.launch(server_name="0.0.0.0", server_port=7860)
112
+ if __name__ == "__main__":
113
+ run()
114
+ # requirements.txt
115
+ gradio
116
+ torch
117
+ torchvision
118
+ requests
119
+ """)