Staticaliza commited on
Commit
82c04f6
1 Parent(s): 71b1aac

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -17
app.py CHANGED
@@ -5,8 +5,6 @@ import json
5
  import random
6
  import os
7
 
8
- KEY = os.environ.get("KEY")
9
-
10
  base_endpoint = "https://api-inference.huggingface.co/models/"
11
  model_endpoint = f"{base_endpoint}HuggingFaceM4/idefics-80b-instruct"
12
 
@@ -31,12 +29,7 @@ default_pre_text = "(Describe the image) "
31
  def add_file(file):
32
  return file.name, gr.update(label='🖼️ Uploaded!')
33
 
34
- def predict(access_key, token, image, instructions, pre_input, input, seed = 42):
35
-
36
- if (access_key != KEY):
37
- print(">>> MODEL FAILED: Input: " + input + ", Attempted Key: " + access_key)
38
- return ("[UNAUTHORIZED ACCESS]", input, []);
39
-
40
  if not instructions or len(instructions.strip()) <= 0:
41
  instructions = default_instructions
42
 
@@ -58,12 +51,7 @@ def predict(access_key, token, image, instructions, pre_input, input, seed = 42)
58
  )
59
  return response.strip()
60
 
61
- def predict_2(access_key, token, image, instructions, input, seed = 42):
62
-
63
- if (access_key != KEY):
64
- print(">>> MODEL FAILED: Input: " + input + ", Attempted Key: " + access_key)
65
- return ("[UNAUTHORIZED ACCESS]", input, []);
66
-
67
  formatted_input = instructions.replace("█", "<end_of_utterance>").replace("░", input).replace(symbol, f"![](https://statical-stc-itt.hf.space/file={image})")
68
  print(formatted_input)
69
  model = InferenceClient(model_endpoint, headers = { "Authorization": f"Bearer {token}" })
@@ -96,7 +84,6 @@ with gr.Blocks() as demo:
96
  text = gr.Textbox(label="Text Input", placeholder="Message...", lines=2)
97
  seed = gr.Slider( minimum = 0, maximum = 9007199254740991, value = 42, step = 1, interactive = True, label = "Seed" )
98
  token = gr.Textbox(label="Token", placeholder="Token...", lines=1)
99
- access_key = gr.Textbox(label = "Access Key", lines = 1)
100
 
101
  with gr.Column():
102
  output = gr.Textbox(label = "Result", lines = 1)
@@ -104,8 +91,8 @@ with gr.Blocks() as demo:
104
  run2 = gr.Button("Generate Simple")
105
  maintain = gr.Button("☁️")
106
 
107
- run.click(predict, inputs=[access_key, token, image, instructions, pre_text, text, seed], outputs=[output], queue = False)
108
- run2.click(predict_2, inputs=[access_key, token, image, instructions, text, seed], outputs=[output], queue = False)
109
  maintain.click(cloud, inputs = [], outputs = [], queue = False)
110
 
111
  demo.launch()
 
5
  import random
6
  import os
7
 
 
 
8
  base_endpoint = "https://api-inference.huggingface.co/models/"
9
  model_endpoint = f"{base_endpoint}HuggingFaceM4/idefics-80b-instruct"
10
 
 
29
  def add_file(file):
30
  return file.name, gr.update(label='🖼️ Uploaded!')
31
 
32
+ def predict(token, image, instructions, pre_input, input, seed = 42):
 
 
 
 
 
33
  if not instructions or len(instructions.strip()) <= 0:
34
  instructions = default_instructions
35
 
 
51
  )
52
  return response.strip()
53
 
54
+ def predict_2(token, image, instructions, input, seed = 42):
 
 
 
 
 
55
  formatted_input = instructions.replace("█", "<end_of_utterance>").replace("░", input).replace(symbol, f"![](https://statical-stc-itt.hf.space/file={image})")
56
  print(formatted_input)
57
  model = InferenceClient(model_endpoint, headers = { "Authorization": f"Bearer {token}" })
 
84
  text = gr.Textbox(label="Text Input", placeholder="Message...", lines=2)
85
  seed = gr.Slider( minimum = 0, maximum = 9007199254740991, value = 42, step = 1, interactive = True, label = "Seed" )
86
  token = gr.Textbox(label="Token", placeholder="Token...", lines=1)
 
87
 
88
  with gr.Column():
89
  output = gr.Textbox(label = "Result", lines = 1)
 
91
  run2 = gr.Button("Generate Simple")
92
  maintain = gr.Button("☁️")
93
 
94
+ run.click(predict, inputs=[token, image, instructions, pre_text, text, seed], outputs=[output], queue = False)
95
+ run2.click(predict_2, inputs=[token, image, instructions, text, seed], outputs=[output], queue = False)
96
  maintain.click(cloud, inputs = [], outputs = [], queue = False)
97
 
98
  demo.launch()