reverting to gradio version
Browse files- .DS_Store +0 -0
- README.md +6 -2
- app.py +42 -24
- out.txt +2 -1
- requirements.txt +2 -2
- static/dummy.txt +0 -1
- templates/home.html +0 -40
- test.ipynb +4 -3
.DS_Store
CHANGED
Binary files a/.DS_Store and b/.DS_Store differ
|
|
README.md
CHANGED
@@ -12,5 +12,9 @@ license: unknown
|
|
12 |
|
13 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
14 |
|
15 |
-
|
16 |
-
|
|
|
|
|
|
|
|
|
|
12 |
|
13 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
14 |
|
15 |
+
Simple example of a gradio UI that calls anvilserver.py running on any
|
16 |
+
machine with internet connection. Can be a laptop also.
|
17 |
+
Just push app.py and requirements.txt to gradio and
|
18 |
+
pip install anvil_uplink on your server (e.g. laptop) machine
|
19 |
+
and run anvilserver.py there. The gradio UI calls the server_function
|
20 |
+
published within anvilserver.py. This uses anvil as a router.
|
app.py
CHANGED
@@ -1,32 +1,50 @@
|
|
1 |
-
|
|
|
|
|
|
|
|
|
|
|
2 |
import os
|
|
|
|
|
|
|
3 |
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
print('I am in get')
|
16 |
-
elif request.method=='POST':
|
17 |
-
print('I am in post')
|
18 |
-
data=request.get_json()
|
19 |
-
if 'script' in data: script=data['script']
|
20 |
-
if script=='' or script is None: return 'INVALID'
|
21 |
-
os.system(script+' > ./out.txt')
|
22 |
with open('./out.txt','r') as f: output=f.read()
|
23 |
return output
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
|
25 |
-
|
26 |
-
def home():
|
27 |
-
return render_template('home.html',messageD=MESSAGED)
|
28 |
|
29 |
-
|
30 |
-
app.run(host="0.0.0.0", port=7860)
|
31 |
|
|
|
|
|
32 |
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from threading import Thread
|
3 |
+
import time
|
4 |
+
import anvil.server
|
5 |
+
from registration import register,get_register,func_reg
|
6 |
+
from library import get_file,get_files
|
7 |
import os
|
8 |
+
anvil.server.connect('55MH4EBKM22EP4E6D5T6CVSL-VGO5X4SM6JEXGJVT')
|
9 |
+
register(get_file)
|
10 |
+
register(get_files)
|
11 |
|
12 |
+
# with gr.Blocks() as block:
|
13 |
+
# textbox = gr.inputs.Textbox(label='Function Register')
|
14 |
+
# button = gr.Button(value="Show Function Calls")
|
15 |
+
# button.click(get_register,inputs=None,outputs=[textbox])
|
16 |
+
|
17 |
+
# block.launch()
|
18 |
+
import json
|
19 |
+
import ast
|
20 |
+
def my_inference_function(name):
|
21 |
+
# print(ast.literal_eval(name)['name'])
|
22 |
+
os.system(name+' > ./out.txt')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
with open('./out.txt','r') as f: output=f.read()
|
24 |
return output
|
25 |
+
# return "Input Data: " + name + ", stay tuned for ML models from this API"
|
26 |
+
|
27 |
+
gradio_interface = gr.Interface(
|
28 |
+
fn=my_inference_function,
|
29 |
+
inputs="text",
|
30 |
+
outputs="text",
|
31 |
+
title="REST API with Gradio and Huggingface Spaces",
|
32 |
+
description='''Inputs should be json of test item e.g., as a dictionary;
|
33 |
+
output right now is just returning the input; later label will be returned.
|
34 |
+
|
35 |
+
This is how to call the API from Python:
|
36 |
+
|
37 |
+
import requests
|
38 |
+
|
39 |
+
response = requests.post("https://gmshroff-gmserver.hf.space/run/predict", json={
|
40 |
+
"data": [
|
41 |
+
"\<put some json string here\>",
|
42 |
+
]}).json()
|
43 |
|
44 |
+
data = response["data"])
|
|
|
|
|
45 |
|
46 |
+
''')
|
|
|
47 |
|
48 |
+
gradio_interface.launch()
|
49 |
+
# anvil.server.wait_forever()
|
50 |
|
out.txt
CHANGED
@@ -1 +1,2 @@
|
|
1 |
-
|
|
|
|
1 |
+
Filesystem 512-blocks Used Available Capacity iused ifree %iused Mounted on
|
2 |
+
/dev/disk3s5 1942700360 612016464 1270377600 33% 2983574 6351888000 0% /System/Volumes/Data
|
requirements.txt
CHANGED
@@ -1,2 +1,2 @@
|
|
1 |
-
|
2 |
-
|
|
|
1 |
+
anvil_uplink
|
2 |
+
PyPDF2
|
static/dummy.txt
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
dummy file
|
|
|
|
templates/home.html
DELETED
@@ -1,40 +0,0 @@
|
|
1 |
-
<html>
|
2 |
-
<head>
|
3 |
-
<title>gmserver</title>
|
4 |
-
<style>
|
5 |
-
* {
|
6 |
-
box-sizing: border-box;
|
7 |
-
}
|
8 |
-
/* Create two equal columns that floats next to each other */
|
9 |
-
.columnleft {
|
10 |
-
float: left;
|
11 |
-
padding: 10px;
|
12 |
-
width: 15%;
|
13 |
-
}
|
14 |
-
.columncenter {
|
15 |
-
float: left;
|
16 |
-
padding: 10px;
|
17 |
-
width: 80%;
|
18 |
-
}
|
19 |
-
/* Clear floats after the columns */
|
20 |
-
.row:after {
|
21 |
-
content: "";
|
22 |
-
display: table;
|
23 |
-
clear: both;
|
24 |
-
}
|
25 |
-
</style>
|
26 |
-
</head>
|
27 |
-
<body>
|
28 |
-
<div class="row">
|
29 |
-
<div class="columnleft">
|
30 |
-
<img src="/static/logo.jpeg" width="50px">
|
31 |
-
</div>
|
32 |
-
<div class="columncenter" align="center">
|
33 |
-
<h1>{{messageD['title']}}</h1>
|
34 |
-
{% for message in messageD['messageL']:%}
|
35 |
-
<h4>{{message}}</h4>
|
36 |
-
{% endfor %}
|
37 |
-
</div>
|
38 |
-
</div>
|
39 |
-
</body>
|
40 |
-
</html>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
test.ipynb
CHANGED
@@ -2,7 +2,7 @@
|
|
2 |
"cells": [
|
3 |
{
|
4 |
"cell_type": "code",
|
5 |
-
"execution_count":
|
6 |
"metadata": {},
|
7 |
"outputs": [],
|
8 |
"source": [
|
@@ -14,12 +14,13 @@
|
|
14 |
},
|
15 |
{
|
16 |
"cell_type": "code",
|
17 |
-
"execution_count":
|
18 |
"metadata": {},
|
19 |
"outputs": [],
|
20 |
"source": [
|
21 |
"headers = {'Content-Type': 'application/json'}\n",
|
22 |
-
"url='http://127.0.0.1:5000/run'\n",
|
|
|
23 |
"body={\"script\":\"python update_valdata.py\"}"
|
24 |
]
|
25 |
},
|
|
|
2 |
"cells": [
|
3 |
{
|
4 |
"cell_type": "code",
|
5 |
+
"execution_count": 2,
|
6 |
"metadata": {},
|
7 |
"outputs": [],
|
8 |
"source": [
|
|
|
14 |
},
|
15 |
{
|
16 |
"cell_type": "code",
|
17 |
+
"execution_count": 3,
|
18 |
"metadata": {},
|
19 |
"outputs": [],
|
20 |
"source": [
|
21 |
"headers = {'Content-Type': 'application/json'}\n",
|
22 |
+
"# url='http://127.0.0.1:5000/run'\n",
|
23 |
+
"url='https://huggingface.co/spaces/gmshroff/gmserver'\n",
|
24 |
"body={\"script\":\"python update_valdata.py\"}"
|
25 |
]
|
26 |
},
|