Spaces:
Sleeping
Sleeping
File size: 4,664 Bytes
e48ab6b cf7a07e 3f75f85 e48ab6b ccdb9b2 e48ab6b fa9812c e48ab6b 9e16fe0 e48ab6b 9e16fe0 e48ab6b 9e16fe0 e48ab6b 9e16fe0 e48ab6b 06f7edb e48ab6b c42a6a7 e48ab6b 06f7edb e48ab6b c42a6a7 e48ab6b ccdb9b2 dd149cf e48ab6b dd149cf e48ab6b ccdb9b2 e48ab6b ccdb9b2 e48ab6b fa9812c e48ab6b fa9812c 1418034 f223851 e48ab6b c42a6a7 e48ab6b dd149cf e48ab6b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 |
from threading import Thread
import gradio as gr
import inspect
from gradio import routes
from typing import List, Type
import requests, os, re, asyncio, queue
import math
import time
import datetime
import requests, json
from pprint import pprint
import hivemind
from petals.constants import PUBLIC_INITIAL_PEERS
from health import fetch_health_state
dht = hivemind.DHT(initial_peers=PUBLIC_INITIAL_PEERS, client_mode=True, start=True)
model_name = "quantumaikr/llama-2-70b-fb16-korean"
loop = asyncio.get_event_loop()
# Monkey patch
def get_types(cls_set: List[Type], component: str):
docset = []
types = []
if component == "input":
for cls in cls_set:
doc = inspect.getdoc(cls)
doc_lines = doc.split("\n")
docset.append(doc_lines[1].split(":")[-1])
types.append(doc_lines[1].split(")")[0].split("(")[-1])
else:
for cls in cls_set:
doc = inspect.getdoc(cls)
doc_lines = doc.split("\n")
docset.append(doc_lines[-1].split(":")[-1])
types.append(doc_lines[-1].split(")")[0].split("(")[-1])
return docset, types
routes.get_types = get_types
# App code
account_list = dict()
account_list['id'] = "pass"
name_list = dict()
name_list['id'] = 'name'
p2p_list = dict()
p2p_list['id'] = '11111111'
def chat(id, prompt):
return "AI ์๋ต์
๋๋ค."
def register(id, pw):
if id in account_list:
return "exist"
else:
account_list[id] = pw
return "ok"
def login(id, pw):
if id in account_list:
if account_list[id] == pw:
return "ok"
else:
return "password error"
else:
return "no id"
def add_name(id, name):
name_list[id] = name
return "ok"
def get_name(id):
if id in name_list:
return name_list[id]
else:
return "no id"
def get_id(name):
reverse_dict= dict(map(reversed,name_list.items()))
if name in reverse_dict:
return reverse_dict[name]
else:
return "no name"
def add_p(id, p_id):
p2p_list[id] = p_id
return "ok"
def get_p(id):
if id in p2p_list:
return p2p_list[id]
else:
return "no id"
def get_id_from_p2p(i):
reverse_dict= dict(map(reversed,p2p_list.items()))
if i in reverse_dict:
return reverse_dict[i]
else:
return "no id"
# Blockchain code
def get_peers(name):
data = fetch_health_state(dht)
out = []
for d in data['model_reports']:
if d['name'] == name:
for r in d['server_rows']:
out.append(r['peer_id'])
return out
with gr.Blocks() as demo:
count = 0
aa = gr.Interface(
fn=chat,
inputs=["text","text"],
outputs="text",
description="chat, ai ์๋ต์ ๋ฐํํฉ๋๋ค. \n /run/predict",
)
rr = gr.Interface(
fn=register,
inputs=["text", "text"],
outputs="text",
description="register, ํ์๊ฐ์
(์ฑ๊ณต์:ok, ์ค๋ณต์:exist ๋ฐํ)\n /run/predict_1",
)
ll = gr.Interface(
fn=login,
inputs=["text", "text"],
outputs="text",
description="login, ๋ก๊ทธ์ธ(์ฑ๊ณต์: ok, ์คํจ์: password error, ์์ด๋๊ฐ ์์ผ๋ฉด: no id) \n /run/predict_2",
)
ad = gr.Interface(
fn=add_name,
inputs=["text", "text"],
outputs="text",
description="add_name, id๋ก ๋๋ค์ ์ถ๊ฐ. ok ๋ฐํ.\n /run/predict_3",
)
nn = gr.Interface(
fn=get_name,
inputs=["text"],
outputs="text",
description="get_name, id๋ก ๋๋ค์ ๋ฐํ(์์ผ๋ฉด no id)\n /run/predict_4",
)
nnn = gr.Interface(
fn=get_id,
inputs=["text"],
outputs="text",
description="get_name, ๋๋ค์์ผ๋ก id ๋ฐํ(์์ผ๋ฉด no name)\n /run/predict_5",
)
adp = gr.Interface(
fn=add_p,
inputs=["text", "text"],
outputs="text",
description="add_p, id๋ก p2p id ์ถ๊ฐ. ok ๋ฐํ. \n /run/predict_6",
)
nnp = gr.Interface(
fn=get_p,
inputs=["text"],
outputs="text",
description="get_p, id๋ก p2p id ๋ฐํ. ์์ผ๋ฉด no id. \n /run/predict_7",
)
nnp = gr.Interface(
fn=get_id_from_p2p,
inputs=["text"],
outputs="text",
description="get_p, p2p id๋ก ์ผ๋ฐ id ๋ฐํ. ์์ผ๋ฉด no id. \n /run/predict_8",
)
gpeer = gr.Interface(
fn=get_peers,
inputs=["text"],
outputs="text",
description="get_peers, ํด๋น ๋ชจ๋ธ์ ๋ถ์ฐ์ฒ๋ฆฌ์ค์ธ peer๋ค์ p2p id list ๋ฐํ\n /run/predict_8",
)
demo.queue(max_size=32).launch(enable_queue=True) |