File size: 4,604 Bytes
7f29be0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
import gradio as gr
import http
import ssl
import json
import warnings
warnings.filterwarnings("ignore")

def retrieve_api_key(url):
    
    context = ssl.create_default_context()
    context.check_hostname = True
    conn = http.client.HTTPSConnection(url, context=context)
    conn.request("GET", "/admin/api-keys/")
    api_key_response = conn.getresponse()
    api_keys_data = (
            api_key_response.read().decode("utf-8").replace("\n", "").replace("\t", "")
        )
    api_keys_json = json.loads(api_keys_data)
    api_key = api_keys_json[0]["api_key"]
    conn.close()
    return api_key
    
    
def get_benchmark_uids(num_miner):
    
    url="test.neuralinternet.ai"
    api_key = retrieve_api_key(url)
    
    context = ssl.create_default_context()
    context.check_hostname = True
    conn = http.client.HTTPSConnection(url, context=context)
    
    headers = {
            "Content-Type": "application/json",
            "Authorization": f"Bearer {api_key}",
            "Endpoint-Version": "2023-05-19",
        }
    
    conn.request("GET", f"/top_miner_uids?n={num_miner}", headers=headers)
    miner_response = conn.getresponse()
    miner_data = (
            miner_response.read().decode("utf-8").replace("\n", "").replace("\t", "")
        )
    uids = json.loads(miner_data)
    return uids



def retrieve_response(payload):
    
    url="d509-65-108-32-175.ngrok-free.app"
    api_key = retrieve_api_key(url)
    headers = {
            "Content-Type": "application/json",
            "Authorization": f"Bearer {api_key}",
            "Endpoint-Version": "2023-05-19",
        }
    payload = json.dumps(payload)
    context = ssl.create_default_context()
    context.check_hostname = True
    conn = http.client.HTTPSConnection(url, context=context)
    conn.request("POST", "/chat", payload, headers)
    init_response = conn.getresponse()
    init_data = init_response.read().decode("utf-8").replace("\n", "").replace("\t", "")
    init_json = json.loads(init_data)
    
    response_dict = dict()
    for choice in init_json['choices']:
        uid = choice['uid']
        resp = choice['message']['content']
        resp = resp.replace("\n", "").replace("\t", "")
        response_dict[uid] = resp
    response_text = '\n\n'.join([f'"{key}": "{value}"' for key, value in response_dict.items()])
    return response_text



def interface_fn(system_prompt, optn, arg, user_prompt):
    
    if len(system_prompt) == 0:
        system_prompt = "You are an AI Assistant, created by bittensor and powered by NI(Neural Internet). Your task is to provide consise response to user's prompt"
    
    messages = [{"role": "system", "content": system_prompt},{"role": "user", "content": user_prompt}]
    payload = dict()
    
    if optn == 'TOP':
        
        if int(arg) > 30:
            arg = 30
        payload['top_n'] = int(arg)
        payload['messages'] = messages
        response = retrieve_response(payload)
        return response
    
    elif optn == 'BENCHMARK':
        
        if int(arg) > 30:
            arg = 30
        uids = get_benchmark_uids(int(arg))
        payload['uids'] = uids
        payload['messages'] = messages
        response = retrieve_response(payload)
        return response
    
    else:
        
        uids = list()
        if ',' in arg:
            uids = [int(x) for x in arg.split(',')]
        else:
            uids = [arg]
        payload['uids'] = uids
        payload['messages'] = messages
        response = retrieve_response(payload)
        return response


interface = gr.Interface(
    fn=interface_fn,
    inputs=[
        gr.inputs.Textbox(label="System Prompt", optional=True),
        gr.inputs.Dropdown(["TOP", "BENCHMARK", "UIDs"], label="Select Function"),
        gr.inputs.Textbox(label="Arguement"),
        gr.inputs.Textbox(label="Enter your question")
        ],
    outputs=gr.outputs.Textbox(label="Model Responses"),
    title="Explore Bittensor Miners",
    description="Enter parameters as per you want and get response",
    examples=[["Your task is to provide consise response of user prompts", "TOP", 5, 'What is Bittensor?']
            ,["Your task is to provide accurate, lengthy response with good lexical flow", "BENCHMARK", 5, "What is neural network and how its feeding mechanism works?"], 
            ["Act like you're in the technology field for 10+ year and give unbiased opinion", "UIDs", '975,517,906,743,869' , "What are the potential ethical concerns surrounding artificial intelligence and machine learning in healthcare?"]])

interface.launch(enable_queue=True)