Update app.py
Browse files
app.py
CHANGED
@@ -23,7 +23,7 @@ login(token=HF_HUB_TOKEN)
|
|
23 |
# # # Load Hebrew and English text generation models
|
24 |
# hebrew_generator = pipeline("text-generation", model="onlplab/alephbert-base")
|
25 |
# english_generator = pipeline("text-generation", model="vicgalle/gpt2-open-instruct-v1")
|
26 |
-
|
27 |
|
28 |
|
29 |
# # hebrew_generator = pipeline("text-generation", model="Norod78/hebrew-gpt_neo-small")
|
@@ -72,17 +72,19 @@ def detect_language(user_input):
|
|
72 |
def generate_response(text):
|
73 |
language = detect_language(text)
|
74 |
print(f"Detected language: {language}, ", f"current time: {current_time_gmt()}")
|
75 |
-
|
76 |
if language == "hebrew":
|
77 |
-
hebrew_generator = pipeline("text-generation", model="onlplab/alephbert-base")
|
78 |
-
output = hebrew_generator(text, max_length=100, truncation=True)
|
|
|
79 |
print(f"Hebrew model output: {output}, ", f"current time: {current_time_gmt()}") # Debugging
|
80 |
return output[0]["generated_text"]
|
81 |
|
82 |
elif language == "english":
|
83 |
#english_generator = pipeline("text-generation", model="mistralai/Mistral-Nemo-Instruct-2407", max_new_tokens=128)
|
84 |
# english_generator = pipeline("text-generation", model="distilgpt2")
|
85 |
-
output = english_generator(text, max_length=50, truncation=True)
|
|
|
86 |
print(f"English model output: {output}, ", f"current time: {current_time_gmt()}") # Debugging
|
87 |
return output[0]["generated_text"]
|
88 |
|
|
|
23 |
# # # Load Hebrew and English text generation models
|
24 |
# hebrew_generator = pipeline("text-generation", model="onlplab/alephbert-base")
|
25 |
# english_generator = pipeline("text-generation", model="vicgalle/gpt2-open-instruct-v1")
|
26 |
+
lang_generator = pipeline("text-generation", model="microsoft/Phi-3-mini-4k-instruct")
|
27 |
|
28 |
|
29 |
# # hebrew_generator = pipeline("text-generation", model="Norod78/hebrew-gpt_neo-small")
|
|
|
72 |
def generate_response(text):
|
73 |
language = detect_language(text)
|
74 |
print(f"Detected language: {language}, ", f"current time: {current_time_gmt()}")
|
75 |
+
|
76 |
if language == "hebrew":
|
77 |
+
#hebrew_generator = pipeline("text-generation", model="onlplab/alephbert-base")
|
78 |
+
# output = hebrew_generator(text, max_length=100, truncation=True)
|
79 |
+
output = lang_generator(text, max_length=100, truncation=True)
|
80 |
print(f"Hebrew model output: {output}, ", f"current time: {current_time_gmt()}") # Debugging
|
81 |
return output[0]["generated_text"]
|
82 |
|
83 |
elif language == "english":
|
84 |
#english_generator = pipeline("text-generation", model="mistralai/Mistral-Nemo-Instruct-2407", max_new_tokens=128)
|
85 |
# english_generator = pipeline("text-generation", model="distilgpt2")
|
86 |
+
#output = english_generator(text, max_length=50, truncation=True)
|
87 |
+
output = lang_generator(text, max_length=50, truncation=True)
|
88 |
print(f"English model output: {output}, ", f"current time: {current_time_gmt()}") # Debugging
|
89 |
return output[0]["generated_text"]
|
90 |
|