Spaces:
Running
Running
File size: 1,955 Bytes
e519d95 8018b87 1bac931 8018b87 e519d95 1bac931 e519d95 8018b87 e519d95 1bac931 e519d95 1bac931 e519d95 1bac931 e519d95 8018b87 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 |
import os
os.system('pip install ipython')
from IPython.display import clear_output
os.system('pip install python-dotenv pydub ffmpeg-python nltk gradio==3.48.0 OpenAI gradio_client emoji')
from utils.tts import *
from utils.llm import *
clear_output()
import gradio as gr
# Define the main function for the API endpoint that takes the input text and chatbot role
def generate_story(secret_token, input_text, chatbot_role):
if secret_token != SECRET_TOKEN:
raise gr.Error(
f'Invalid secret token. Secret Token: secret')
# Initialize a list of lists for history with the user input as the first entry
history = [[input_text, None]]
story_sentences = get_sentence(history, chatbot_role) # get_sentence function generates text
story_text = "" # Initialize variable to hold the full story text
last_history = None # To store the last history after all sentences
# Iterate over the sentences generated by get_sentence and concatenate them
for sentence, updated_history in story_sentences:
if sentence:
story_text += sentence.strip() + " " # Add each sentence to the story_text
last_history = updated_history # Keep track of the last history update
if last_history is not None:
# Convert the list of lists back into a list of tuples for the history
history_tuples = [tuple(entry) for entry in last_history]
#return history_tuples, chatbot_role, story_text
return generate_speech_from_history2(history_tuples, chatbot_role, story_text)
else:
return []
# Create a Gradio Interface using only the `generate_story_and_speech()` function and the 'json' output type
demo = gr.Interface(
fn=generate_story,
inputs=[gr.Text(label='Secret Token'),gr.Textbox(placeholder="Enter your text here"), gr.Dropdown(choices=ROLES, label="Select Chatbot Role")],
outputs="json"
)
demo.queue()
demo.launch(debug=True) |