Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -12,7 +12,8 @@ import hashlib
|
|
12 |
import BlockChain
|
13 |
|
14 |
loop = asyncio.get_event_loop()
|
15 |
-
|
|
|
16 |
def get_types(cls_set: List[Type], component: str):
|
17 |
docset = []
|
18 |
types = []
|
@@ -43,6 +44,8 @@ name_list['id'] = 'name'
|
|
43 |
p2p_list = dict()
|
44 |
p2p_list['id'] = '11111111'
|
45 |
|
|
|
|
|
46 |
def chat(id, prompt):
|
47 |
|
48 |
return "AI ์๋ต์
๋๋ค."
|
@@ -100,6 +103,7 @@ def get_id_from_p2p(i):
|
|
100 |
|
101 |
# Blockchain code
|
102 |
|
|
|
103 |
def get_peers(name):
|
104 |
data = requests.get("https://health.petals.dev/api/v1/state").json()
|
105 |
out = []
|
@@ -109,13 +113,24 @@ def get_peers(name):
|
|
109 |
out.append(r['peer_id'])
|
110 |
return out
|
111 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
112 |
with gr.Blocks() as demo:
|
113 |
count = 0
|
114 |
aa = gr.Interface(
|
115 |
fn=chat,
|
116 |
inputs=["text","text"],
|
117 |
outputs="text",
|
118 |
-
description="chat, ai ์๋ต์ ๋ฐํํฉ๋๋ค. \n /run/predict",
|
119 |
)
|
120 |
|
121 |
rr = gr.Interface(
|
@@ -145,40 +160,14 @@ with gr.Blocks() as demo:
|
|
145 |
outputs="text",
|
146 |
description="get_name, id๋ก ๋๋ค์ ๋ฐํ(์์ผ๋ฉด no id)\n /run/predict_4",
|
147 |
)
|
148 |
-
|
149 |
-
nnn = gr.Interface(
|
150 |
-
fn=get_id,
|
151 |
-
inputs=["text"],
|
152 |
-
outputs="text",
|
153 |
-
description="get_name, ๋๋ค์์ผ๋ก id ๋ฐํ(์์ผ๋ฉด no name)\n /run/predict_5",
|
154 |
-
)
|
155 |
|
156 |
adp = gr.Interface(
|
157 |
fn=add_p,
|
158 |
inputs=["text", "text"],
|
159 |
outputs="text",
|
160 |
-
description="add_p, id๋ก p2p id ์ถ๊ฐ. ok ๋ฐํ. \n /run/
|
161 |
-
)
|
162 |
-
|
163 |
-
nnp = gr.Interface(
|
164 |
-
fn=get_p,
|
165 |
-
inputs=["text"],
|
166 |
-
outputs="text",
|
167 |
-
description="get_p, id๋ก p2p id ๋ฐํ. ์์ผ๋ฉด no id. \n /run/predict_7",
|
168 |
)
|
169 |
|
170 |
-
nnp = gr.Interface(
|
171 |
-
fn=get_id_from_p2p,
|
172 |
-
inputs=["text"],
|
173 |
-
outputs="text",
|
174 |
-
description="get_p, p2p id๋ก ์ผ๋ฐ id ๋ฐํ. ์์ผ๋ฉด no id. \n /run/predict_8",
|
175 |
-
)
|
176 |
|
177 |
-
gpeer = gr.Interface(
|
178 |
-
fn=get_peers,
|
179 |
-
inputs=["text"],
|
180 |
-
outputs="text",
|
181 |
-
description="get_peers, ํด๋น ๋ชจ๋ธ์ ๋ถ์ฐ์ฒ๋ฆฌ์ค์ธ peer๋ค์ p2p id list ๋ฐํ\n /run/predict_8",
|
182 |
-
)
|
183 |
|
184 |
demo.queue(max_size=32).launch(enable_queue=True)
|
|
|
12 |
import BlockChain
|
13 |
|
14 |
loop = asyncio.get_event_loop()
|
15 |
+
|
16 |
+
# init code
|
17 |
def get_types(cls_set: List[Type], component: str):
|
18 |
docset = []
|
19 |
types = []
|
|
|
44 |
p2p_list = dict()
|
45 |
p2p_list['id'] = '11111111'
|
46 |
|
47 |
+
gpu_add_list = []
|
48 |
+
|
49 |
def chat(id, prompt):
|
50 |
|
51 |
return "AI ์๋ต์
๋๋ค."
|
|
|
103 |
|
104 |
# Blockchain code
|
105 |
|
106 |
+
|
107 |
def get_peers(name):
|
108 |
data = requests.get("https://health.petals.dev/api/v1/state").json()
|
109 |
out = []
|
|
|
113 |
out.append(r['peer_id'])
|
114 |
return out
|
115 |
|
116 |
+
blockchain = Blockchain()
|
117 |
+
|
118 |
+
def add_transaction(id, kind, data):
|
119 |
+
blockchain.new_transaction(id, kind, data)
|
120 |
+
|
121 |
+
def proof(name):
|
122 |
+
peers = get_peers(name)
|
123 |
+
for p in gpu_add_list:
|
124 |
+
if not p in peers:
|
125 |
+
add_transaction(get_id_from_p2p(peer), "out", 0)
|
126 |
+
|
127 |
with gr.Blocks() as demo:
|
128 |
count = 0
|
129 |
aa = gr.Interface(
|
130 |
fn=chat,
|
131 |
inputs=["text","text"],
|
132 |
outputs="text",
|
133 |
+
description="chat, ai ์๋ต์ ๋ฐํํฉ๋๋ค. ๋ด๋ถ์ ์ผ๋ก ํธ๋์ญ์
์์ฑ. \n /run/predict",
|
134 |
)
|
135 |
|
136 |
rr = gr.Interface(
|
|
|
160 |
outputs="text",
|
161 |
description="get_name, id๋ก ๋๋ค์ ๋ฐํ(์์ผ๋ฉด no id)\n /run/predict_4",
|
162 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
163 |
|
164 |
adp = gr.Interface(
|
165 |
fn=add_p,
|
166 |
inputs=["text", "text"],
|
167 |
outputs="text",
|
168 |
+
description="add_p, id๋ก p2p id ์ถ๊ฐ. ok ๋ฐํ. \n /run/predict_5",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
169 |
)
|
170 |
|
|
|
|
|
|
|
|
|
|
|
|
|
171 |
|
|
|
|
|
|
|
|
|
|
|
|
|
172 |
|
173 |
demo.queue(max_size=32).launch(enable_queue=True)
|