Spaces:
Sleeping
Sleeping
Julian-Hans
commited on
Commit
•
c99fd10
1
Parent(s):
9eba4f6
added rate limit resilience
Browse files- blip_image_caption_large.py +4 -1
- musicgen_small.py +9 -7
- phi3_mini_4k_instruct.py +4 -1
blip_image_caption_large.py
CHANGED
@@ -22,5 +22,8 @@ class Blip_Image_Caption_Large:
|
|
22 |
|
23 |
def caption_image_api(self, image_path):
|
24 |
client = InferenceClient(config.IMAGE_CAPTION_MODEL, token=config.HF_API_TOKEN)
|
25 |
-
|
|
|
|
|
|
|
26 |
return result
|
|
|
22 |
|
23 |
def caption_image_api(self, image_path):
|
24 |
client = InferenceClient(config.IMAGE_CAPTION_MODEL, token=config.HF_API_TOKEN)
|
25 |
+
try:
|
26 |
+
result = client.image_to_text(image_path).generated_text
|
27 |
+
except Exception as e:
|
28 |
+
result = f"Error: {e}"
|
29 |
return result
|
musicgen_small.py
CHANGED
@@ -36,11 +36,13 @@ class Musicgen_Small:
|
|
36 |
# EDITS: changed variables to match the code
|
37 |
|
38 |
# Convert the byte content into an audio array
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
|
|
|
|
46 |
|
|
|
36 |
# EDITS: changed variables to match the code
|
37 |
|
38 |
# Convert the byte content into an audio array
|
39 |
+
try:
|
40 |
+
audio_buffer = BytesIO(response.content)
|
41 |
+
# Use scipy to save the audio, assuming it's a WAV format audio stream
|
42 |
+
# If it's raw PCM audio, you would need to decode it first.
|
43 |
+
with open(audio_path, "wb") as f:
|
44 |
+
f.write(audio_buffer.read())
|
45 |
+
# -----ATTRIBUTION-END-----
|
46 |
+
except Exception as e:
|
47 |
+
print(f"Error: {e}")
|
48 |
|
phi3_mini_4k_instruct.py
CHANGED
@@ -38,5 +38,8 @@ class Phi3_Mini_4k_Instruct:
|
|
38 |
|
39 |
def generate_text_api(self, messages):
|
40 |
client = InferenceClient(config.LLM_MODEL, token=config.HF_API_TOKEN)
|
41 |
-
|
|
|
|
|
|
|
42 |
return result
|
|
|
38 |
|
39 |
def generate_text_api(self, messages):
|
40 |
client = InferenceClient(config.LLM_MODEL, token=config.HF_API_TOKEN)
|
41 |
+
try:
|
42 |
+
result = client.chat_completion(messages, max_tokens=config.LLM_MAX_NEW_TOKENS, temperature=config.LLM_TEMPERATURE, top_p=config.LLM_TOP_P).choices[0].message.content
|
43 |
+
except Exception as e:
|
44 |
+
result = f"Error: {e}"
|
45 |
return result
|