artificialguybr commited on
Commit
1418000
1 Parent(s): f99f5e4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -21
app.py CHANGED
@@ -5,24 +5,16 @@ import json
5
 
6
  api_key = os.getenv('API_KEY')
7
 
8
- def call_mistral_7b_api(content, system_prompt, temperature=0.2, top_p=0.7, max_tokens=1024):
9
- print(f"Content: {content}")
10
- print(f"System Prompt: {system_prompt}") # New line to print system prompt
11
- print(f"Temperature: {temperature}")
12
- print(f"Top P: {top_p}")
13
- print(f"Max Tokens: {max_tokens}")
14
-
15
- invoke_url = "https://api.nvcf.nvidia.com/v2/nvcf/pexec/functions/35ec3354-2681-4d0e-a8dd-80325dcf7c63"
16
  headers = {
17
  "Authorization": f"Bearer {api_key}",
18
  "accept": "text/event-stream",
19
  "content-type": "application/json",
20
  }
21
-
22
- # Include the system prompt in the payload
23
  payload = {
24
  "messages": [
25
- {"role": "system", "content": system_prompt}, # System prompt
26
  {"role": "user", "content": content}
27
  ],
28
  "temperature": temperature,
@@ -30,7 +22,6 @@ def call_mistral_7b_api(content, system_prompt, temperature=0.2, top_p=0.7, max_
30
  "max_tokens": max_tokens,
31
  "stream": True
32
  }
33
-
34
  response = requests.post(invoke_url, headers=headers, json=payload, stream=True)
35
  if response.status_code != 200:
36
  print(f"Erro na requisição: {response.status_code}")
@@ -44,11 +35,8 @@ def call_mistral_7b_api(content, system_prompt, temperature=0.2, top_p=0.7, max_
44
  for line in response.iter_lines():
45
  if line:
46
  decoded_line = line.decode('utf-8').strip()
47
-
48
- # Verifica se a linha contém dados JSON válidos
49
  if decoded_line.startswith('data: {'):
50
- json_str = decoded_line[6:] # Remove 'data: ' do início
51
-
52
  try:
53
  json_line = json.loads(json_str)
54
  content_parts = json_line.get("choices", [{}])[0].get("delta", {}).get("content", "")
@@ -58,22 +46,22 @@ def call_mistral_7b_api(content, system_prompt, temperature=0.2, top_p=0.7, max_
58
  print(f"Linha problemática: {decoded_line}")
59
  elif decoded_line == 'data: [DONE]':
60
  print("Recebido sinal de conclusão da API.")
61
- break # Sai do loop se a API indicar que terminou de enviar dados
62
  else:
63
  print(f"Linha ignorada (não é JSON ou sinal de conclusão): {decoded_line}")
64
-
65
  return response_text
66
-
67
  content_input = gr.Textbox(lines=2, placeholder="Enter your content here...", label="Content")
 
68
  temperature_input = gr.Slider(minimum=0, maximum=1, step=0.01, value=0.2, label="Temperature")
69
  top_p_input = gr.Slider(minimum=0, maximum=1, step=0.01, value=0.7, label="Top P")
70
  max_tokens_input = gr.Slider(minimum=1, maximum=1024, step=1, value=1024, label="Max Tokens")
71
 
72
  iface = gr.Interface(fn=call_mistral_7b_api,
73
- inputs=[content_input, system_prompt_input, temperature_input, top_p_input, max_tokens_input], # Include system_prompt_input
74
  outputs="text",
75
  title="Mistral-7B API Explorer",
76
- description="Your existing description here"
77
  )
78
 
79
  iface.launch()
 
5
 
6
  api_key = os.getenv('API_KEY')
7
 
8
+ def call_mistral_7b_api(content, system_prompt, temperature, top_p, max_tokens):
9
+ invoke_url = "https://api.nvcf.nvidia.com/v2/nvcf/pexec/functions/8f4118ba-60a8-4e6b-8574-e38a4067a4a3"
 
 
 
 
 
 
10
  headers = {
11
  "Authorization": f"Bearer {api_key}",
12
  "accept": "text/event-stream",
13
  "content-type": "application/json",
14
  }
 
 
15
  payload = {
16
  "messages": [
17
+ {"role": "system", "content": system_prompt},
18
  {"role": "user", "content": content}
19
  ],
20
  "temperature": temperature,
 
22
  "max_tokens": max_tokens,
23
  "stream": True
24
  }
 
25
  response = requests.post(invoke_url, headers=headers, json=payload, stream=True)
26
  if response.status_code != 200:
27
  print(f"Erro na requisição: {response.status_code}")
 
35
  for line in response.iter_lines():
36
  if line:
37
  decoded_line = line.decode('utf-8').strip()
 
 
38
  if decoded_line.startswith('data: {'):
39
+ json_str = decoded_line[6:]
 
40
  try:
41
  json_line = json.loads(json_str)
42
  content_parts = json_line.get("choices", [{}])[0].get("delta", {}).get("content", "")
 
46
  print(f"Linha problemática: {decoded_line}")
47
  elif decoded_line == 'data: [DONE]':
48
  print("Recebido sinal de conclusão da API.")
49
+ break
50
  else:
51
  print(f"Linha ignorada (não é JSON ou sinal de conclusão): {decoded_line}")
 
52
  return response_text
53
+
54
  content_input = gr.Textbox(lines=2, placeholder="Enter your content here...", label="Content")
55
+ system_prompt_input = gr.Textbox(default="I carefully provide accurate, factual, thoughtful, nuanced answers and am brilliant at reasoning.", label="System Prompt")
56
  temperature_input = gr.Slider(minimum=0, maximum=1, step=0.01, value=0.2, label="Temperature")
57
  top_p_input = gr.Slider(minimum=0, maximum=1, step=0.01, value=0.7, label="Top P")
58
  max_tokens_input = gr.Slider(minimum=1, maximum=1024, step=1, value=1024, label="Max Tokens")
59
 
60
  iface = gr.Interface(fn=call_mistral_7b_api,
61
+ inputs=[content_input, system_prompt_input, temperature_input, top_p_input, max_tokens_input],
62
  outputs="text",
63
  title="Mistral-7B API Explorer",
64
+ description="Explore the capabilities of Mistral-7B Instruct"
65
  )
66
 
67
  iface.launch()