Update maker.py
Browse files
maker.py
CHANGED
@@ -57,7 +57,7 @@ Sure, I'd be happy to help you build a bot! I'm generating a title, system promp
|
|
57 |
|
58 |
"""
|
59 |
|
60 |
-
def predict_beta(message, chatbot=[], system_prompt=system_prompt, max_new_tokens=
|
61 |
client = Client(tulu)
|
62 |
try:
|
63 |
result = client.predict(
|
@@ -112,7 +112,7 @@ def extract_title_prompt_example(text, title, system_prompt, example_input):
|
|
112 |
pass
|
113 |
return text, title, system_prompt, example_input
|
114 |
|
115 |
-
def make_open_gpt(message, history, current_title, current_system_prompt, current_example_input):
|
116 |
response = predict_beta(message, history, system_prompt)
|
117 |
response, title, system_prompt, example_input = extract_title_prompt_example(response, current_title, current_system_prompt, current_example_input)
|
118 |
return "", history + [(message, response)], title, system_prompt, example_input, [(None, welcome_preview_message.format(title, example_input))], example_input, gr.Column(visible=True), gr.Group(visible=True)
|
|
|
57 |
|
58 |
"""
|
59 |
|
60 |
+
def predict_beta(message, chatbot=[], system_prompt=system_prompt, max_new_tokens=650, temperature=0.4, top_p=0.9, repetition_penalty=0.9, advanced=True):
|
61 |
client = Client(tulu)
|
62 |
try:
|
63 |
result = client.predict(
|
|
|
112 |
pass
|
113 |
return text, title, system_prompt, example_input
|
114 |
|
115 |
+
def make_open_gpt(message, history, current_title, current_system_prompt, current_example_input, system_prompt=system_prompt):
|
116 |
response = predict_beta(message, history, system_prompt)
|
117 |
response, title, system_prompt, example_input = extract_title_prompt_example(response, current_title, current_system_prompt, current_example_input)
|
118 |
return "", history + [(message, response)], title, system_prompt, example_input, [(None, welcome_preview_message.format(title, example_input))], example_input, gr.Column(visible=True), gr.Group(visible=True)
|