NeuralInternet Defalt-404 commited on
Commit
7f29be0
0 Parent(s):

Duplicate from Defalt-404/Bittensor_Explore

Browse files

Co-authored-by: Kunj Kansara <Defalt-404@users.noreply.huggingface.co>

Files changed (3) hide show
  1. .gitattributes +35 -0
  2. README.md +13 -0
  3. app.py +133 -0
.gitattributes ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Bittensor Explore
3
+ emoji: ⚡
4
+ colorFrom: blue
5
+ colorTo: red
6
+ sdk: gradio
7
+ sdk_version: 3.42.0
8
+ app_file: app.py
9
+ pinned: false
10
+ duplicated_from: Defalt-404/Bittensor_Explore
11
+ ---
12
+
13
+ Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
app.py ADDED
@@ -0,0 +1,133 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import http
3
+ import ssl
4
+ import json
5
+ import warnings
6
+ warnings.filterwarnings("ignore")
7
+
8
+ def retrieve_api_key(url):
9
+
10
+ context = ssl.create_default_context()
11
+ context.check_hostname = True
12
+ conn = http.client.HTTPSConnection(url, context=context)
13
+ conn.request("GET", "/admin/api-keys/")
14
+ api_key_response = conn.getresponse()
15
+ api_keys_data = (
16
+ api_key_response.read().decode("utf-8").replace("\n", "").replace("\t", "")
17
+ )
18
+ api_keys_json = json.loads(api_keys_data)
19
+ api_key = api_keys_json[0]["api_key"]
20
+ conn.close()
21
+ return api_key
22
+
23
+
24
+ def get_benchmark_uids(num_miner):
25
+
26
+ url="test.neuralinternet.ai"
27
+ api_key = retrieve_api_key(url)
28
+
29
+ context = ssl.create_default_context()
30
+ context.check_hostname = True
31
+ conn = http.client.HTTPSConnection(url, context=context)
32
+
33
+ headers = {
34
+ "Content-Type": "application/json",
35
+ "Authorization": f"Bearer {api_key}",
36
+ "Endpoint-Version": "2023-05-19",
37
+ }
38
+
39
+ conn.request("GET", f"/top_miner_uids?n={num_miner}", headers=headers)
40
+ miner_response = conn.getresponse()
41
+ miner_data = (
42
+ miner_response.read().decode("utf-8").replace("\n", "").replace("\t", "")
43
+ )
44
+ uids = json.loads(miner_data)
45
+ return uids
46
+
47
+
48
+
49
+ def retrieve_response(payload):
50
+
51
+ url="d509-65-108-32-175.ngrok-free.app"
52
+ api_key = retrieve_api_key(url)
53
+ headers = {
54
+ "Content-Type": "application/json",
55
+ "Authorization": f"Bearer {api_key}",
56
+ "Endpoint-Version": "2023-05-19",
57
+ }
58
+ payload = json.dumps(payload)
59
+ context = ssl.create_default_context()
60
+ context.check_hostname = True
61
+ conn = http.client.HTTPSConnection(url, context=context)
62
+ conn.request("POST", "/chat", payload, headers)
63
+ init_response = conn.getresponse()
64
+ init_data = init_response.read().decode("utf-8").replace("\n", "").replace("\t", "")
65
+ init_json = json.loads(init_data)
66
+
67
+ response_dict = dict()
68
+ for choice in init_json['choices']:
69
+ uid = choice['uid']
70
+ resp = choice['message']['content']
71
+ resp = resp.replace("\n", "").replace("\t", "")
72
+ response_dict[uid] = resp
73
+ response_text = '\n\n'.join([f'"{key}": "{value}"' for key, value in response_dict.items()])
74
+ return response_text
75
+
76
+
77
+
78
+ def interface_fn(system_prompt, optn, arg, user_prompt):
79
+
80
+ if len(system_prompt) == 0:
81
+ system_prompt = "You are an AI Assistant, created by bittensor and powered by NI(Neural Internet). Your task is to provide consise response to user's prompt"
82
+
83
+ messages = [{"role": "system", "content": system_prompt},{"role": "user", "content": user_prompt}]
84
+ payload = dict()
85
+
86
+ if optn == 'TOP':
87
+
88
+ if int(arg) > 30:
89
+ arg = 30
90
+ payload['top_n'] = int(arg)
91
+ payload['messages'] = messages
92
+ response = retrieve_response(payload)
93
+ return response
94
+
95
+ elif optn == 'BENCHMARK':
96
+
97
+ if int(arg) > 30:
98
+ arg = 30
99
+ uids = get_benchmark_uids(int(arg))
100
+ payload['uids'] = uids
101
+ payload['messages'] = messages
102
+ response = retrieve_response(payload)
103
+ return response
104
+
105
+ else:
106
+
107
+ uids = list()
108
+ if ',' in arg:
109
+ uids = [int(x) for x in arg.split(',')]
110
+ else:
111
+ uids = [arg]
112
+ payload['uids'] = uids
113
+ payload['messages'] = messages
114
+ response = retrieve_response(payload)
115
+ return response
116
+
117
+
118
+ interface = gr.Interface(
119
+ fn=interface_fn,
120
+ inputs=[
121
+ gr.inputs.Textbox(label="System Prompt", optional=True),
122
+ gr.inputs.Dropdown(["TOP", "BENCHMARK", "UIDs"], label="Select Function"),
123
+ gr.inputs.Textbox(label="Arguement"),
124
+ gr.inputs.Textbox(label="Enter your question")
125
+ ],
126
+ outputs=gr.outputs.Textbox(label="Model Responses"),
127
+ title="Explore Bittensor Miners",
128
+ description="Enter parameters as per you want and get response",
129
+ examples=[["Your task is to provide consise response of user prompts", "TOP", 5, 'What is Bittensor?']
130
+ ,["Your task is to provide accurate, lengthy response with good lexical flow", "BENCHMARK", 5, "What is neural network and how its feeding mechanism works?"],
131
+ ["Act like you're in the technology field for 10+ year and give unbiased opinion", "UIDs", '975,517,906,743,869' , "What are the potential ethical concerns surrounding artificial intelligence and machine learning in healthcare?"]])
132
+
133
+ interface.launch(enable_queue=True)