Spaces:
Runtime error
Runtime error
duyngtr
commited on
Commit
·
72fee55
0
Parent(s):
init code
Browse files- README.md +37 -0
- app_story.py +379 -0
- client_config.py +52 -0
- config/mj_guided_first.json +825 -0
- config/mj_guided_first_old.json +590 -0
- config/mj_guided_second.json +960 -0
- config/mj_guided_second_old.json +858 -0
- grpc_services/generated/commons_pb2.py +26 -0
- grpc_services/generated/commons_pb2.pyi +32 -0
- grpc_services/generated/commons_pb2_grpc.py +29 -0
- grpc_services/generated/flowbot_service_pb2.py +55 -0
- grpc_services/generated/flowbot_service_pb2.pyi +297 -0
- grpc_services/generated/flowbot_service_pb2_grpc.py +363 -0
- grpc_services/generated/question_gen_service_pb2.py +59 -0
- grpc_services/generated/question_gen_service_pb2.pyi +366 -0
- grpc_services/generated/question_gen_service_pb2_grpc.py +188 -0
- requirements.txt +22 -0
- tmp_files/metadata.csv +1 -0
README.md
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
title: Buddy OS AI Service
|
| 3 |
+
emoji: 🔥
|
| 4 |
+
colorFrom: gray
|
| 5 |
+
colorTo: pink
|
| 6 |
+
sdk: streamlit
|
| 7 |
+
sdk_version: 1.38.0
|
| 8 |
+
app_file: app_story.py
|
| 9 |
+
pinned: false
|
| 10 |
+
---
|
| 11 |
+
|
| 12 |
+
# Flow-based chatbot service repository
|
| 13 |
+
|
| 14 |
+
## Documents:
|
| 15 |
+
- Wiki page: [click here](https://olli-ai.atlassian.net/wiki/x/CACcag)
|
| 16 |
+
|
| 17 |
+
## Env preparation
|
| 18 |
+
Prepare the .env file like this for testing the code on k8s
|
| 19 |
+
|
| 20 |
+
```
|
| 21 |
+
ENV="local"
|
| 22 |
+
HOST="dev.flowbot.aws.iviet.com"
|
| 23 |
+
FLOWBOT_PORT=443
|
| 24 |
+
OPENAI_API_KEY=<YOUR API KEY>
|
| 25 |
+
```
|
| 26 |
+
|
| 27 |
+
## Code for testing
|
| 28 |
+
|
| 29 |
+
```
|
| 30 |
+
python client_chat.py <scenario>
|
| 31 |
+
```
|
| 32 |
+
|
| 33 |
+
Available scenarios:
|
| 34 |
+
|
| 35 |
+
- ai_tutor: To showcase AI Tutor usecase (Tool used: Evaluation)
|
| 36 |
+
|
| 37 |
+
- example: To test implemented tools (intent, entity, evaluation)
|
app_story.py
ADDED
|
@@ -0,0 +1,379 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
from exceptiongroup import print_exc
|
| 3 |
+
import streamlit as st
|
| 4 |
+
import base64
|
| 5 |
+
import requests
|
| 6 |
+
import os
|
| 7 |
+
import uuid
|
| 8 |
+
import grpc
|
| 9 |
+
import time
|
| 10 |
+
import mutagen
|
| 11 |
+
from mutagen import m4a, mp3
|
| 12 |
+
import io
|
| 13 |
+
import grpc
|
| 14 |
+
from grpc_reflection.v1alpha import reflection
|
| 15 |
+
# from demo_utils.utils import Exporter
|
| 16 |
+
|
| 17 |
+
# Import gRPC services
|
| 18 |
+
import grpc_services.generated.flowbot_service_pb2_grpc as flowbot_service_pb2_grpc
|
| 19 |
+
from grpc_services.generated.flowbot_service_pb2 import (
|
| 20 |
+
BotConfig,
|
| 21 |
+
BotParams,
|
| 22 |
+
ChatRequest,
|
| 23 |
+
ChatParams,
|
| 24 |
+
StartRequest,
|
| 25 |
+
StopRequest,
|
| 26 |
+
Variable,
|
| 27 |
+
)
|
| 28 |
+
|
| 29 |
+
from grpc_services.generated.commons_pb2 import SessionInfo
|
| 30 |
+
|
| 31 |
+
from dotenv import load_dotenv
|
| 32 |
+
# Client Configuration (Mockup for Demo Purposes)
|
| 33 |
+
import client_config as usrcfg # Ensure you have this module for scenario configs
|
| 34 |
+
|
| 35 |
+
from elevenlabs import stream
|
| 36 |
+
from elevenlabs.client import ElevenLabs
|
| 37 |
+
|
| 38 |
+
# Load environment variables
|
| 39 |
+
load_dotenv()
|
| 40 |
+
|
| 41 |
+
# Server Configuration
|
| 42 |
+
MONITOR_SERVER_INTERFACE = os.environ.get("HOST", "localhost")
|
| 43 |
+
MONITOR_SERVER_PORT = int(os.environ.get("FLOWBOT_PORT", 50001))
|
| 44 |
+
CHANNEL_IP = f"{MONITOR_SERVER_INTERFACE}:{MONITOR_SERVER_PORT}"
|
| 45 |
+
NGINX_HOST_POSTFIX = "aws.iviet.com"
|
| 46 |
+
NARRATOR_ID = os.environ.get("NARRATOR_ID")
|
| 47 |
+
PEPPA_ID = os.environ.get("PEPPA_ID")
|
| 48 |
+
ELEVEN_API_KEY = os.environ.get("ELEVEN_API_KEY")
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
AVATAR = {
|
| 52 |
+
"Sfx": ":material/music_cast:",
|
| 53 |
+
"Narrator": ":material/menu_book:",
|
| 54 |
+
"Peppa": "🐽",
|
| 55 |
+
"user": ":material/mood:"
|
| 56 |
+
}
|
| 57 |
+
|
| 58 |
+
SPEAKER_TO_ID = {
|
| 59 |
+
"Narrator": NARRATOR_ID,
|
| 60 |
+
"Peppa": PEPPA_ID
|
| 61 |
+
}
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
tts_client = ElevenLabs(api_key=ELEVEN_API_KEY)
|
| 65 |
+
|
| 66 |
+
import pandas as pd
|
| 67 |
+
|
| 68 |
+
TMP_DIR = "tmp_files"
|
| 69 |
+
TMP_METADATA = "tmp_files/metadata.csv"
|
| 70 |
+
WAITING_TIME = 1.5
|
| 71 |
+
|
| 72 |
+
df = pd.read_csv(TMP_METADATA)
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class AudioGetter:
|
| 76 |
+
@classmethod
|
| 77 |
+
def from_local(cls, text, speaker):
|
| 78 |
+
sub_df = df[(df['speaker']==speaker) & (df['text']==text)]
|
| 79 |
+
|
| 80 |
+
if len(sub_df) > 0:
|
| 81 |
+
filepath = sub_df['filepath'].iloc[0]
|
| 82 |
+
|
| 83 |
+
with open(filepath, 'rb') as f:
|
| 84 |
+
data = f.read()
|
| 85 |
+
|
| 86 |
+
yield data
|
| 87 |
+
else:
|
| 88 |
+
raise FileNotFoundError
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
@classmethod
|
| 92 |
+
def from_text(cls, text, speaker):
|
| 93 |
+
stream = tts_client.text_to_speech.convert_as_stream(
|
| 94 |
+
text=text,
|
| 95 |
+
voice_id=SPEAKER_TO_ID.get(speaker),
|
| 96 |
+
model_id=os.environ.get("MODEL_ID")
|
| 97 |
+
)
|
| 98 |
+
|
| 99 |
+
for chunk in stream:
|
| 100 |
+
if isinstance(chunk, bytes):
|
| 101 |
+
yield chunk
|
| 102 |
+
|
| 103 |
+
@classmethod
|
| 104 |
+
def from_url(cls, url):
|
| 105 |
+
response = requests.get(url)
|
| 106 |
+
|
| 107 |
+
yield response.content
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def get_audio_base64(audio_getter_fn, *args, **kwargs):
|
| 111 |
+
"""
|
| 112 |
+
Fetch audio from a URL and convert to base64 for embedding
|
| 113 |
+
|
| 114 |
+
Args:
|
| 115 |
+
url (str): Direct URL to the audio file
|
| 116 |
+
|
| 117 |
+
Returns:
|
| 118 |
+
dict: Dictionary containing base64 encoded audio and its duration
|
| 119 |
+
"""
|
| 120 |
+
try:
|
| 121 |
+
|
| 122 |
+
audio_buffer = io.BytesIO()
|
| 123 |
+
|
| 124 |
+
audio_base64, duration = None, None
|
| 125 |
+
|
| 126 |
+
# Get audio duration using mutagen
|
| 127 |
+
|
| 128 |
+
for chunk in audio_getter_fn(*args, **kwargs):
|
| 129 |
+
# Create an in-memory bytes buffer
|
| 130 |
+
audio_buffer.write(chunk)
|
| 131 |
+
audio_file = mp3.MP3(audio_buffer)
|
| 132 |
+
duration = audio_file.info.length
|
| 133 |
+
|
| 134 |
+
audio_buffer.seek(0)
|
| 135 |
+
# Encode the audio to base64
|
| 136 |
+
audio_base64 = base64.b64encode(audio_buffer.read()).decode('utf-8')
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
# Return both base64 and duration
|
| 140 |
+
return audio_base64, duration
|
| 141 |
+
|
| 142 |
+
except FileNotFoundError as e:
|
| 143 |
+
raise e
|
| 144 |
+
|
| 145 |
+
except Exception as e:
|
| 146 |
+
st.error(f"Error processing audio: {e}")
|
| 147 |
+
return None
|
| 148 |
+
|
| 149 |
+
# Initialize gRPC channel
|
| 150 |
+
def get_grpc_stub():
|
| 151 |
+
options = [
|
| 152 |
+
("grpc.max_receive_message_length", 1166528012),
|
| 153 |
+
("grpc.max_send_message_length", 1166528012),
|
| 154 |
+
]
|
| 155 |
+
if NGINX_HOST_POSTFIX in CHANNEL_IP:
|
| 156 |
+
channel = grpc.secure_channel(CHANNEL_IP, options=options, credentials=grpc.ssl_channel_credentials())
|
| 157 |
+
else:
|
| 158 |
+
channel = grpc.insecure_channel(CHANNEL_IP, options=options)
|
| 159 |
+
|
| 160 |
+
# Enable reflection
|
| 161 |
+
stub = flowbot_service_pb2_grpc.FlowBotServiceStub(channel)
|
| 162 |
+
return stub
|
| 163 |
+
|
| 164 |
+
# Sidebar Components
|
| 165 |
+
def render_sidebar():
|
| 166 |
+
st.sidebar.markdown("`HINT`: If audio does not play automatically, kindly please press the PLAY button.")
|
| 167 |
+
st.sidebar.title("Settings")
|
| 168 |
+
|
| 169 |
+
scenario = st.sidebar.selectbox(
|
| 170 |
+
"Select Scenario",
|
| 171 |
+
[elem for elem in list(usrcfg.script_and_config.keys()) if "Jaxx" in elem]
|
| 172 |
+
)
|
| 173 |
+
st.session_state.play_audio = st.sidebar.toggle("Play audio", value=True)
|
| 174 |
+
|
| 175 |
+
start_button = st.sidebar.button("Start conversation", use_container_width=True)
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
return scenario, start_button
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
def render_stub_response(stub_response):
|
| 182 |
+
global df
|
| 183 |
+
|
| 184 |
+
for response in stub_response:
|
| 185 |
+
url = None
|
| 186 |
+
|
| 187 |
+
# Quick fix to adapt new proto
|
| 188 |
+
if response.WhichOneof("response") == "metadata":
|
| 189 |
+
response = response.metadata
|
| 190 |
+
|
| 191 |
+
else:
|
| 192 |
+
continue
|
| 193 |
+
|
| 194 |
+
response_type = response.response_type
|
| 195 |
+
|
| 196 |
+
text_col, audio_col = st.columns([0.8, 0.2])
|
| 197 |
+
|
| 198 |
+
if response_type == "text_read_along":
|
| 199 |
+
|
| 200 |
+
content = json.loads(response.content)
|
| 201 |
+
text = content['text']
|
| 202 |
+
url = content['audio_url']
|
| 203 |
+
|
| 204 |
+
additional_info = json.loads(response.additional_info or "{}")
|
| 205 |
+
speaker = additional_info.get('speaker', "Narrator")
|
| 206 |
+
|
| 207 |
+
st.session_state.messages.append({"role": speaker, "content": text})
|
| 208 |
+
|
| 209 |
+
with text_col:
|
| 210 |
+
with st.chat_message(speaker, avatar=AVATAR[speaker]):
|
| 211 |
+
st.markdown(text)
|
| 212 |
+
|
| 213 |
+
if st.session_state.get("play_audio", False):
|
| 214 |
+
audio_b64, duration = None, None
|
| 215 |
+
|
| 216 |
+
try:
|
| 217 |
+
|
| 218 |
+
if url is not None:
|
| 219 |
+
print(f"Fetch {url}")
|
| 220 |
+
audio_b64, duration = get_audio_base64(AudioGetter.from_url, url)
|
| 221 |
+
|
| 222 |
+
# Synthesize if not have
|
| 223 |
+
|
| 224 |
+
else:
|
| 225 |
+
try:
|
| 226 |
+
audio_b64, duration = get_audio_base64(AudioGetter.from_local, text, speaker)
|
| 227 |
+
print(f"Fetch local: {text}")
|
| 228 |
+
|
| 229 |
+
except FileNotFoundError:
|
| 230 |
+
print(f"Synthesize: {text}")
|
| 231 |
+
audio_b64, duration = get_audio_base64(AudioGetter.from_text, text, speaker)
|
| 232 |
+
|
| 233 |
+
# Use audio_b64 to save file
|
| 234 |
+
audio_filename = os.path.join(TMP_DIR, speaker + '-' + text[:20].replace(" ", "_") + ".mp3")
|
| 235 |
+
|
| 236 |
+
with open(audio_filename, 'wb') as f:
|
| 237 |
+
f.write(base64.b64decode(audio_b64))
|
| 238 |
+
|
| 239 |
+
new_df = pd.DataFrame({
|
| 240 |
+
'story': ["[OLLI ft. Mighty Jaxx] Guided Story 1"],
|
| 241 |
+
'speaker': [speaker],
|
| 242 |
+
'text': [text],
|
| 243 |
+
"filepath": [audio_filename]
|
| 244 |
+
})
|
| 245 |
+
df = pd.concat([df, new_df])
|
| 246 |
+
|
| 247 |
+
df.to_csv(TMP_METADATA, mode='a', header=False, index=False)
|
| 248 |
+
|
| 249 |
+
has_audio = duration is not None and duration > 0.0
|
| 250 |
+
|
| 251 |
+
|
| 252 |
+
if has_audio:
|
| 253 |
+
with audio_col:
|
| 254 |
+
st.audio(base64.b64decode(audio_b64), autoplay=True)
|
| 255 |
+
|
| 256 |
+
time.sleep(duration + WAITING_TIME)
|
| 257 |
+
|
| 258 |
+
except Exception as e:
|
| 259 |
+
# print_exc()
|
| 260 |
+
pass
|
| 261 |
+
|
| 262 |
+
elif response_type == "audio":
|
| 263 |
+
text = "♫♩♬♪♩"
|
| 264 |
+
speaker = "Sfx"
|
| 265 |
+
|
| 266 |
+
st.session_state.messages.append({"role": speaker, "content": text})
|
| 267 |
+
|
| 268 |
+
with text_col:
|
| 269 |
+
with st.chat_message(speaker, avatar=AVATAR[speaker]):
|
| 270 |
+
st.markdown(text)
|
| 271 |
+
|
| 272 |
+
if st.session_state.get("play_audio", False):
|
| 273 |
+
url = json.loads(response.content)['audio_url']
|
| 274 |
+
try:
|
| 275 |
+
if url is not None:
|
| 276 |
+
print(f"Fetch {url}")
|
| 277 |
+
audio_b64, duration = get_audio_base64(AudioGetter.from_url, url)
|
| 278 |
+
|
| 279 |
+
has_audio = duration is not None and duration > 0.0
|
| 280 |
+
|
| 281 |
+
if has_audio:
|
| 282 |
+
with audio_col:
|
| 283 |
+
st.audio(base64.b64decode(audio_b64), autoplay=True)
|
| 284 |
+
time.sleep(duration)
|
| 285 |
+
|
| 286 |
+
except Exception as e:
|
| 287 |
+
print_exc()
|
| 288 |
+
pass
|
| 289 |
+
|
| 290 |
+
# Initialize Bot
|
| 291 |
+
def init_bot(stub, scenario):
|
| 292 |
+
session_id = str(uuid.uuid4())
|
| 293 |
+
session_info = SessionInfo(session_id=session_id)
|
| 294 |
+
|
| 295 |
+
# Load scenario config
|
| 296 |
+
path = usrcfg.script_and_config[scenario]["path"]
|
| 297 |
+
var = usrcfg.script_and_config[scenario].get("var", {})
|
| 298 |
+
|
| 299 |
+
with open(path, "r") as f:
|
| 300 |
+
botscript = f.read()
|
| 301 |
+
|
| 302 |
+
# Create Bot
|
| 303 |
+
bot_params = stub.CreateBot(BotConfig(session_info=session_info, bot_config=botscript))
|
| 304 |
+
|
| 305 |
+
# Initialize Chat
|
| 306 |
+
chat_params = ChatParams(
|
| 307 |
+
session_info=session_info,
|
| 308 |
+
bot_params=BotParams(variables=[{"name": k, "value": v} for k, v in var.items()]),
|
| 309 |
+
)
|
| 310 |
+
stub.InitBot(chat_params)
|
| 311 |
+
|
| 312 |
+
# start conversation
|
| 313 |
+
response_iter = stub.StartConversation(
|
| 314 |
+
StartRequest(session_info=session_info)
|
| 315 |
+
)
|
| 316 |
+
|
| 317 |
+
with st.spinner("Voice Box is generating..."):
|
| 318 |
+
render_stub_response(response_iter)
|
| 319 |
+
|
| 320 |
+
return session_info
|
| 321 |
+
|
| 322 |
+
def restart_session():
|
| 323 |
+
st.session_state.messages = []
|
| 324 |
+
st.session_state.audios = []
|
| 325 |
+
|
| 326 |
+
# Main App
|
| 327 |
+
def main():
|
| 328 |
+
st.title("Guided Story Telling")
|
| 329 |
+
|
| 330 |
+
# Sidebar
|
| 331 |
+
scenario, start_button = render_sidebar()
|
| 332 |
+
|
| 333 |
+
st.markdown("---")
|
| 334 |
+
|
| 335 |
+
# Main layout
|
| 336 |
+
user_input = st.chat_input("Type your message here...")
|
| 337 |
+
|
| 338 |
+
# Initialize gRPC stub
|
| 339 |
+
stub = get_grpc_stub()
|
| 340 |
+
|
| 341 |
+
if start_button:
|
| 342 |
+
restart_session()
|
| 343 |
+
st.session_state.started = True
|
| 344 |
+
st.session_state.session_info = init_bot(stub, scenario)
|
| 345 |
+
|
| 346 |
+
# Chat Interface
|
| 347 |
+
if st.session_state.get("started", False):
|
| 348 |
+
# chat_loop
|
| 349 |
+
if user_input:
|
| 350 |
+
# Display chat messages history
|
| 351 |
+
|
| 352 |
+
for message in st.session_state.messages:
|
| 353 |
+
speaker = message['role']
|
| 354 |
+
with st.chat_message(speaker, avatar=AVATAR[speaker]):
|
| 355 |
+
st.markdown(message["content"])
|
| 356 |
+
|
| 357 |
+
# Append new user input
|
| 358 |
+
st.session_state.messages.append({"role": "user", "content": user_input})
|
| 359 |
+
with st.chat_message("user", avatar=AVATAR['user']):
|
| 360 |
+
st.markdown(user_input)
|
| 361 |
+
|
| 362 |
+
# Send to gRPC and display response
|
| 363 |
+
with st.spinner("AI is generating..."):
|
| 364 |
+
stub_response = stub.Chat(ChatRequest(session_info=st.session_state.session_info, message=user_input))
|
| 365 |
+
|
| 366 |
+
render_stub_response(stub_response)
|
| 367 |
+
|
| 368 |
+
# Stop conversation
|
| 369 |
+
if st.sidebar.button("Stop conversation", use_container_width=True):
|
| 370 |
+
restart_session()
|
| 371 |
+
chat_info = stub.StopConversation(StopRequest(session_info=st.session_state.session_info))
|
| 372 |
+
st.write("Conversation stopped.")
|
| 373 |
+
st.session_state.started = False
|
| 374 |
+
|
| 375 |
+
else:
|
| 376 |
+
st.write("Please select a scenario and press 'Start'.")
|
| 377 |
+
|
| 378 |
+
if __name__ == "__main__":
|
| 379 |
+
main()
|
client_config.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
script_and_config = {
|
| 2 |
+
"ai_tutor": {
|
| 3 |
+
"path": "config/AI_Tutor_modified.json",
|
| 4 |
+
"var": {
|
| 5 |
+
# Hardcoded
|
| 6 |
+
"answer_2_1": "His mum",
|
| 7 |
+
"answer_2_2": "He signed a recording contract",
|
| 8 |
+
"answer_2_3": "He is a record producer, actor, DJ and a businessman",
|
| 9 |
+
"answer_2_4": "Science and Technology",
|
| 10 |
+
"answer_3_1": "Because he is very talented. He also uses his own money to help young people get a better education",
|
| 11 |
+
"answer_3_2": "She plays her favourite Black Eyed Peas song",
|
| 12 |
+
"file_track_1": "track_1.mp3",
|
| 13 |
+
"file_track_2": "track_2.mp3",
|
| 14 |
+
"file_track_3": "track_3.mp3",
|
| 15 |
+
}
|
| 16 |
+
},
|
| 17 |
+
|
| 18 |
+
"ai_tutor_test": {
|
| 19 |
+
"path": "config/AI_Tutor_short.json",
|
| 20 |
+
"var": {
|
| 21 |
+
# Hardcoded
|
| 22 |
+
"answer_2_1": "His mum",
|
| 23 |
+
"answer_2_2": "He signed a recording contract",
|
| 24 |
+
"answer_2_3": "He is a record producer, actor, DJ and a businessman",
|
| 25 |
+
"answer_2_4": "Science and Technology",
|
| 26 |
+
"answer_3_1": "Because he is very talented. He also uses his own money to help young people get a better education",
|
| 27 |
+
"answer_3_2": "She plays her favourite Black Eyed Peas song",
|
| 28 |
+
"file_track_1": "data/ai_tutor/track_1.mp3",
|
| 29 |
+
"file_track_2": "data/ai_tutor/track_2.mp3",
|
| 30 |
+
"file_track_3": "data/ai_tutor/track_3.mp3",
|
| 31 |
+
}
|
| 32 |
+
},
|
| 33 |
+
|
| 34 |
+
"example": {
|
| 35 |
+
"path": "config/example.json",
|
| 36 |
+
"var": {
|
| 37 |
+
"answer_2_1": "His mum",
|
| 38 |
+
}
|
| 39 |
+
},
|
| 40 |
+
|
| 41 |
+
"[OLLI ft. Mighty Jaxx] Story 1 -- Peppa prepares Lunch": {
|
| 42 |
+
"path": "config/mj_guided_first.json",
|
| 43 |
+
},
|
| 44 |
+
|
| 45 |
+
"[OLLI ft. Mighty Jaxx] Story 2 -- Peppa at the Market": {
|
| 46 |
+
"path": "config/mj_guided_second.json",
|
| 47 |
+
}
|
| 48 |
+
}
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
ai_tutor = "config/AI_Tutor.csv"
|
config/mj_guided_first.json
ADDED
|
@@ -0,0 +1,825 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"intent_config": {
|
| 3 |
+
"rule_based": true,
|
| 4 |
+
"vector": false,
|
| 5 |
+
"llm": true,
|
| 6 |
+
"intent": [
|
| 7 |
+
{
|
| 8 |
+
"name": "TOMATOES",
|
| 9 |
+
"samples": [
|
| 10 |
+
"tomatoes",
|
| 11 |
+
"tomato"
|
| 12 |
+
]
|
| 13 |
+
},
|
| 14 |
+
{
|
| 15 |
+
"name": "CUCUMBERS",
|
| 16 |
+
"samples": [
|
| 17 |
+
"cucumbers",
|
| 18 |
+
"cucumber"
|
| 19 |
+
]
|
| 20 |
+
},
|
| 21 |
+
{
|
| 22 |
+
"name": "CARROTS",
|
| 23 |
+
"samples": [
|
| 24 |
+
"carrots",
|
| 25 |
+
"carrot"
|
| 26 |
+
]
|
| 27 |
+
},
|
| 28 |
+
{
|
| 29 |
+
"name": "PUT_IN_SINK",
|
| 30 |
+
"samples": [
|
| 31 |
+
"sink",
|
| 32 |
+
"put in sink",
|
| 33 |
+
"wash"
|
| 34 |
+
]
|
| 35 |
+
},
|
| 36 |
+
{
|
| 37 |
+
"name": "ASK_NEW_SPOON",
|
| 38 |
+
"samples": [
|
| 39 |
+
"new spoon",
|
| 40 |
+
"ask mummy",
|
| 41 |
+
"get another",
|
| 42 |
+
"ask new",
|
| 43 |
+
"ask for new"
|
| 44 |
+
]
|
| 45 |
+
},
|
| 46 |
+
{
|
| 47 |
+
"name": "MIX",
|
| 48 |
+
"samples": [
|
| 49 |
+
"mix",
|
| 50 |
+
"mix up",
|
| 51 |
+
"mixing",
|
| 52 |
+
"mix it good"
|
| 53 |
+
]
|
| 54 |
+
},
|
| 55 |
+
{
|
| 56 |
+
"name": "TRY_FIRST",
|
| 57 |
+
"samples": [
|
| 58 |
+
"try first",
|
| 59 |
+
"taste",
|
| 60 |
+
"me first"
|
| 61 |
+
]
|
| 62 |
+
},
|
| 63 |
+
{
|
| 64 |
+
"name": "SHARE_WITH_GEORGE",
|
| 65 |
+
"samples": [
|
| 66 |
+
"share",
|
| 67 |
+
"george",
|
| 68 |
+
"give to george",
|
| 69 |
+
"let george"
|
| 70 |
+
]
|
| 71 |
+
},
|
| 72 |
+
{
|
| 73 |
+
"name": "COOKIES",
|
| 74 |
+
"samples": [
|
| 75 |
+
"cookies",
|
| 76 |
+
"bake cookies"
|
| 77 |
+
]
|
| 78 |
+
},
|
| 79 |
+
{
|
| 80 |
+
"name": "SANDWICHES",
|
| 81 |
+
"samples": [
|
| 82 |
+
"sandwiches",
|
| 83 |
+
"make sandwiches"
|
| 84 |
+
]
|
| 85 |
+
}
|
| 86 |
+
],
|
| 87 |
+
"global_intent": [
|
| 88 |
+
{
|
| 89 |
+
"name": "STOP",
|
| 90 |
+
"samples": [
|
| 91 |
+
"stop",
|
| 92 |
+
"let's end here"
|
| 93 |
+
]
|
| 94 |
+
},
|
| 95 |
+
{
|
| 96 |
+
"name": "other",
|
| 97 |
+
"samples": [
|
| 98 |
+
"I don't know"
|
| 99 |
+
]
|
| 100 |
+
}
|
| 101 |
+
]
|
| 102 |
+
},
|
| 103 |
+
"global_route": {
|
| 104 |
+
"stop": [
|
| 105 |
+
{
|
| 106 |
+
"type": "Intent",
|
| 107 |
+
"node_name": "stop_intent",
|
| 108 |
+
"content": [
|
| 109 |
+
"stop"
|
| 110 |
+
],
|
| 111 |
+
"next_node": {
|
| 112 |
+
"stop": "stop_message"
|
| 113 |
+
},
|
| 114 |
+
"on_fail": {
|
| 115 |
+
"route": "{caller}"
|
| 116 |
+
}
|
| 117 |
+
},
|
| 118 |
+
{
|
| 119 |
+
"type": "Agent",
|
| 120 |
+
"node_name": "stop_message",
|
| 121 |
+
"text_candidates": [
|
| 122 |
+
"stop"
|
| 123 |
+
],
|
| 124 |
+
"next_node": {
|
| 125 |
+
"stop": "end"
|
| 126 |
+
}
|
| 127 |
+
}
|
| 128 |
+
],
|
| 129 |
+
"other": [
|
| 130 |
+
{
|
| 131 |
+
"type": "Agent",
|
| 132 |
+
"node_name": "other_intent_message",
|
| 133 |
+
"text_candidates": "Sorry, please try again",
|
| 134 |
+
"next_node": "{sys.caller}"
|
| 135 |
+
}
|
| 136 |
+
]
|
| 137 |
+
},
|
| 138 |
+
"workflow": [
|
| 139 |
+
{
|
| 140 |
+
"type": "Play",
|
| 141 |
+
"node_name": "opening_background_music",
|
| 142 |
+
"audio_url_candidates": [
|
| 143 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/local/from-admin/1734534796395.mp3"
|
| 144 |
+
],
|
| 145 |
+
"next_node": "scene_setting_narration",
|
| 146 |
+
"requires_response": false
|
| 147 |
+
},
|
| 148 |
+
{
|
| 149 |
+
"type": "text_read_along",
|
| 150 |
+
"node_name": "scene_setting_narration",
|
| 151 |
+
"text_candidates": [
|
| 152 |
+
"It's lunchtime at Peppa's house! Peppa is excited to prepare lunch all by herself today. Let's help Peppa make something delicious!"
|
| 153 |
+
],
|
| 154 |
+
"next_node": "peppa_invitation",
|
| 155 |
+
"additional_info": {
|
| 156 |
+
"speaker": "Narrator"
|
| 157 |
+
},
|
| 158 |
+
"requires_response": false,
|
| 159 |
+
"audio_url_candidates": [
|
| 160 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/1-1-narrator-1734597070860.mp3"
|
| 161 |
+
]
|
| 162 |
+
},
|
| 163 |
+
{
|
| 164 |
+
"type": "text_read_along",
|
| 165 |
+
"node_name": "peppa_invitation",
|
| 166 |
+
"text_candidates": [
|
| 167 |
+
"Mummy says I can make lunch today! But I'll need some help. Will you join me in the kitchen?"
|
| 168 |
+
],
|
| 169 |
+
"next_node": "vegetable_selection",
|
| 170 |
+
"additional_info": {
|
| 171 |
+
"speaker": "Peppa"
|
| 172 |
+
},
|
| 173 |
+
"requires_response": false,
|
| 174 |
+
"audio_url_candidates": [
|
| 175 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/1-2-peppa-1734597070861.mp3"
|
| 176 |
+
]
|
| 177 |
+
},
|
| 178 |
+
{
|
| 179 |
+
"type": "text_read_along",
|
| 180 |
+
"node_name": "vegetable_selection",
|
| 181 |
+
"text_candidates": [
|
| 182 |
+
"Peppa needs to choose some vegetables for her salad. Should she use tomatoes, cucumbers, or carrots? Help her decide!"
|
| 183 |
+
],
|
| 184 |
+
"next_node": "vegetable_menu",
|
| 185 |
+
"additional_info": {
|
| 186 |
+
"speaker": "Narrator"
|
| 187 |
+
},
|
| 188 |
+
"requires_response": false,
|
| 189 |
+
"audio_url_candidates": [
|
| 190 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/3-1-question-narrator-1734597071880.mp3"
|
| 191 |
+
]
|
| 192 |
+
},
|
| 193 |
+
{
|
| 194 |
+
"type": "Text_Choice",
|
| 195 |
+
"node_name": "vegetable_menu",
|
| 196 |
+
"text_choices": [
|
| 197 |
+
"Tomatoes",
|
| 198 |
+
"Cucumbers",
|
| 199 |
+
"Carrots"
|
| 200 |
+
],
|
| 201 |
+
"next_node": "vegetable_intent"
|
| 202 |
+
},
|
| 203 |
+
{
|
| 204 |
+
"type": "Intent",
|
| 205 |
+
"node_name": "vegetable_intent",
|
| 206 |
+
"content": [
|
| 207 |
+
"TOMATOES",
|
| 208 |
+
"CUCUMBERS",
|
| 209 |
+
"CARROTS"
|
| 210 |
+
],
|
| 211 |
+
"next_node": {
|
| 212 |
+
"TOMATOES": "peppa_tomato_comment",
|
| 213 |
+
"CUCUMBERS": "peppa_cucumber_comment",
|
| 214 |
+
"CARROTS": "peppa_carrot_comment",
|
| 215 |
+
"STOP": "closing_narration_premature",
|
| 216 |
+
"other": "other_0"
|
| 217 |
+
}
|
| 218 |
+
},
|
| 219 |
+
{
|
| 220 |
+
"type": "text_read_along",
|
| 221 |
+
"node_name": "peppa_tomato_comment",
|
| 222 |
+
"text_candidates": [
|
| 223 |
+
"Tomatoes are so juicy! Let's chop them up carefully! (\n*Chopping sounds*\n)"
|
| 224 |
+
],
|
| 225 |
+
"next_node": "spoon_drop_incident",
|
| 226 |
+
"additional_info": {
|
| 227 |
+
"speaker": "Peppa"
|
| 228 |
+
},
|
| 229 |
+
"requires_response": false,
|
| 230 |
+
"audio_url_candidates": [
|
| 231 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/0-5-answer-peppa-1734597071972.mp3"
|
| 232 |
+
]
|
| 233 |
+
},
|
| 234 |
+
{
|
| 235 |
+
"type": "text_read_along",
|
| 236 |
+
"node_name": "peppa_cucumber_comment",
|
| 237 |
+
"text_candidates": [
|
| 238 |
+
"Cucumbers are so crunchy! Yum! (\n*Slicing sound*\n)"
|
| 239 |
+
],
|
| 240 |
+
"next_node": "spoon_drop_incident",
|
| 241 |
+
"additional_info": {
|
| 242 |
+
"speaker": "Peppa"
|
| 243 |
+
},
|
| 244 |
+
"requires_response": false,
|
| 245 |
+
"audio_url_candidates": [
|
| 246 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/1-6-answer-peppa-1734597071972.mp3"
|
| 247 |
+
]
|
| 248 |
+
},
|
| 249 |
+
{
|
| 250 |
+
"type": "text_read_along",
|
| 251 |
+
"node_name": "peppa_carrot_comment",
|
| 252 |
+
"text_candidates": [
|
| 253 |
+
"Carrots are so sweet! Let's grate some for the salad. (\n*Grating sound*\n)"
|
| 254 |
+
],
|
| 255 |
+
"next_node": "spoon_drop_incident",
|
| 256 |
+
"additional_info": {
|
| 257 |
+
"speaker": "Peppa"
|
| 258 |
+
},
|
| 259 |
+
"requires_response": false,
|
| 260 |
+
"audio_url_candidates": [
|
| 261 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/2-7-answer-peppa-1734597071972.mp3"
|
| 262 |
+
]
|
| 263 |
+
},
|
| 264 |
+
{
|
| 265 |
+
"type": "text_read_along",
|
| 266 |
+
"node_name": "spoon_drop_incident",
|
| 267 |
+
"text_candidates": [
|
| 268 |
+
"(\n*a clatter of a dropped utensil*\n) Oops! I accidentally dropped my spoon on the floor. Mummy says we should always keep things clean while cooking!"
|
| 269 |
+
],
|
| 270 |
+
"next_node": "spoon_cleanup",
|
| 271 |
+
"additional_info": {
|
| 272 |
+
"speaker": "Peppa"
|
| 273 |
+
},
|
| 274 |
+
"requires_response": false,
|
| 275 |
+
"audio_url_candidates": [
|
| 276 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/5-4-peppa-1734597070861.mp3"
|
| 277 |
+
]
|
| 278 |
+
},
|
| 279 |
+
{
|
| 280 |
+
"type": "text_read_along",
|
| 281 |
+
"node_name": "spoon_cleanup",
|
| 282 |
+
"text_candidates": [
|
| 283 |
+
"Peppa needs to clean up! Should she pick up the spoon and put it in the sink or ask Mummy Pig for a new one?"
|
| 284 |
+
],
|
| 285 |
+
"next_node": "spoon_cleanup_menu",
|
| 286 |
+
"additional_info": {
|
| 287 |
+
"speaker": "Narrator"
|
| 288 |
+
},
|
| 289 |
+
"requires_response": false,
|
| 290 |
+
"audio_url_candidates": [
|
| 291 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/1-4-question-narrator-1734597071880.mp3"
|
| 292 |
+
]
|
| 293 |
+
},
|
| 294 |
+
{
|
| 295 |
+
"type": "Text_Choice",
|
| 296 |
+
"node_name": "spoon_cleanup_menu",
|
| 297 |
+
"text_choices": [
|
| 298 |
+
"Put in Sink",
|
| 299 |
+
"Ask for New Spoon"
|
| 300 |
+
],
|
| 301 |
+
"next_node": "spoon_cleanup_intent"
|
| 302 |
+
},
|
| 303 |
+
{
|
| 304 |
+
"type": "Intent",
|
| 305 |
+
"node_name": "spoon_cleanup_intent",
|
| 306 |
+
"content": [
|
| 307 |
+
"PUT_IN_SINK",
|
| 308 |
+
"ASK_NEW_SPOON"
|
| 309 |
+
],
|
| 310 |
+
"next_node": {
|
| 311 |
+
"PUT_IN_SINK": "put_in_sink_response",
|
| 312 |
+
"ASK_NEW_SPOON": "ask_new_spoon_response",
|
| 313 |
+
"STOP": "closing_narration_premature",
|
| 314 |
+
"other": "other_1"
|
| 315 |
+
}
|
| 316 |
+
},
|
| 317 |
+
{
|
| 318 |
+
"type": "text_read_along",
|
| 319 |
+
"node_name": "put_in_sink_response",
|
| 320 |
+
"text_candidates": [
|
| 321 |
+
"Good idea! I'll wash this spoon later. (\n*running water*\n)"
|
| 322 |
+
],
|
| 323 |
+
"next_node": "mix_salad_prompt",
|
| 324 |
+
"additional_info": {
|
| 325 |
+
"speaker": "Peppa"
|
| 326 |
+
},
|
| 327 |
+
"requires_response": false,
|
| 328 |
+
"audio_url_candidates": [
|
| 329 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/0-12-answer-peppa-1734597071972.mp3"
|
| 330 |
+
]
|
| 331 |
+
},
|
| 332 |
+
{
|
| 333 |
+
"type": "text_read_along",
|
| 334 |
+
"node_name": "ask_new_spoon_response",
|
| 335 |
+
"text_candidates": [
|
| 336 |
+
"Mummy, can I have another spoon? Thank you!"
|
| 337 |
+
],
|
| 338 |
+
"next_node": "mix_salad_prompt",
|
| 339 |
+
"additional_info": {
|
| 340 |
+
"speaker": "Peppa"
|
| 341 |
+
},
|
| 342 |
+
"requires_response": false,
|
| 343 |
+
"audio_url_candidates": [
|
| 344 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/1-11-answer-peppa-1734597071972.mp3"
|
| 345 |
+
]
|
| 346 |
+
},
|
| 347 |
+
{
|
| 348 |
+
"type": "text_read_along",
|
| 349 |
+
"node_name": "mix_salad_prompt",
|
| 350 |
+
"text_candidates": [
|
| 351 |
+
"Keeping the kitchen clean is so important. Now let's mix the salad together!"
|
| 352 |
+
],
|
| 353 |
+
"next_node": "mix_instruction",
|
| 354 |
+
"additional_info": {
|
| 355 |
+
"speaker": "Peppa"
|
| 356 |
+
},
|
| 357 |
+
"requires_response": false,
|
| 358 |
+
"audio_url_candidates": [
|
| 359 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/5-14-peppa-1734597070861.mp3"
|
| 360 |
+
]
|
| 361 |
+
},
|
| 362 |
+
{
|
| 363 |
+
"type": "text_read_along",
|
| 364 |
+
"node_name": "mix_instruction",
|
| 365 |
+
"text_candidates": [
|
| 366 |
+
"Say 'mix' three times to help Peppa finish the salad!"
|
| 367 |
+
],
|
| 368 |
+
"next_node": "mix_intent_first",
|
| 369 |
+
"additional_info": {
|
| 370 |
+
"speaker": "Narrator"
|
| 371 |
+
},
|
| 372 |
+
"audio_url_candidates": [
|
| 373 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/3-5-question-narrator-1734597071880.mp3"
|
| 374 |
+
]
|
| 375 |
+
},
|
| 376 |
+
{
|
| 377 |
+
"type": "Intent",
|
| 378 |
+
"node_name": "mix_intent_first",
|
| 379 |
+
"content": [
|
| 380 |
+
"MIX"
|
| 381 |
+
],
|
| 382 |
+
"next_node": {
|
| 383 |
+
"MIX": "first_mix_response",
|
| 384 |
+
"other": "other_2"
|
| 385 |
+
}
|
| 386 |
+
},
|
| 387 |
+
{
|
| 388 |
+
"type": "text_read_along",
|
| 389 |
+
"node_name": "first_mix_response",
|
| 390 |
+
"text_candidates": [
|
| 391 |
+
"Great! Keep mixing! (\n*bowl mixing*\n)"
|
| 392 |
+
],
|
| 393 |
+
"next_node": "mix_intent_second",
|
| 394 |
+
"additional_info": {
|
| 395 |
+
"speaker": "Peppa"
|
| 396 |
+
},
|
| 397 |
+
"requires_response": true,
|
| 398 |
+
"audio_url_candidates": [
|
| 399 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/0-8-answer-peppa-1734597071972.mp3"
|
| 400 |
+
]
|
| 401 |
+
},
|
| 402 |
+
{
|
| 403 |
+
"type": "Intent",
|
| 404 |
+
"node_name": "mix_intent_second",
|
| 405 |
+
"content": [
|
| 406 |
+
"MIX"
|
| 407 |
+
],
|
| 408 |
+
"next_node": {
|
| 409 |
+
"MIX": "second_mix_response",
|
| 410 |
+
"other": "other_3"
|
| 411 |
+
}
|
| 412 |
+
},
|
| 413 |
+
{
|
| 414 |
+
"type": "text_read_along",
|
| 415 |
+
"node_name": "second_mix_response",
|
| 416 |
+
"text_candidates": [
|
| 417 |
+
"Great! Keep mixing! (\n*bowl mixing*\n)"
|
| 418 |
+
],
|
| 419 |
+
"next_node": "mix_intent_third",
|
| 420 |
+
"additional_info": {
|
| 421 |
+
"speaker": "Peppa"
|
| 422 |
+
},
|
| 423 |
+
"requires_response": true,
|
| 424 |
+
"audio_url_candidates": [
|
| 425 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/1-10-answer-peppa-1734597071972.mp3"
|
| 426 |
+
]
|
| 427 |
+
},
|
| 428 |
+
{
|
| 429 |
+
"type": "Intent",
|
| 430 |
+
"node_name": "mix_intent_third",
|
| 431 |
+
"content": [
|
| 432 |
+
"MIX"
|
| 433 |
+
],
|
| 434 |
+
"next_node": {
|
| 435 |
+
"MIX": "third_mix_response",
|
| 436 |
+
"other": "other_4"
|
| 437 |
+
}
|
| 438 |
+
},
|
| 439 |
+
{
|
| 440 |
+
"type": "text_read_along",
|
| 441 |
+
"node_name": "third_mix_response",
|
| 442 |
+
"text_candidates": [
|
| 443 |
+
"Yay! The salad looks delicious!"
|
| 444 |
+
],
|
| 445 |
+
"next_node": "tasting_prompt",
|
| 446 |
+
"additional_info": {
|
| 447 |
+
"speaker": "Peppa"
|
| 448 |
+
},
|
| 449 |
+
"requires_response": false,
|
| 450 |
+
"audio_url_candidates": [
|
| 451 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/2-9-answer-peppa-1734597071972.mp3"
|
| 452 |
+
]
|
| 453 |
+
},
|
| 454 |
+
{
|
| 455 |
+
"type": "text_read_along",
|
| 456 |
+
"node_name": "tasting_prompt",
|
| 457 |
+
"text_candidates": [
|
| 458 |
+
"The salad looks delicious! Time to taste it. Should I try it first or let George have a taste?"
|
| 459 |
+
],
|
| 460 |
+
"next_node": "tasting_menu",
|
| 461 |
+
"additional_info": {
|
| 462 |
+
"speaker": "Peppa"
|
| 463 |
+
},
|
| 464 |
+
"requires_response": false,
|
| 465 |
+
"audio_url_candidates": [
|
| 466 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/4-3-peppa-1734597070861.mp3"
|
| 467 |
+
]
|
| 468 |
+
},
|
| 469 |
+
{
|
| 470 |
+
"type": "Text_Choice",
|
| 471 |
+
"node_name": "tasting_menu",
|
| 472 |
+
"text_choices": [
|
| 473 |
+
"Try First",
|
| 474 |
+
"Share with George"
|
| 475 |
+
],
|
| 476 |
+
"next_node": "tasting_intent"
|
| 477 |
+
},
|
| 478 |
+
{
|
| 479 |
+
"type": "Intent",
|
| 480 |
+
"node_name": "tasting_intent",
|
| 481 |
+
"content": [
|
| 482 |
+
"TRY_FIRST",
|
| 483 |
+
"SHARE_WITH_GEORGE"
|
| 484 |
+
],
|
| 485 |
+
"next_node": {
|
| 486 |
+
"TRY_FIRST": "try_first_response",
|
| 487 |
+
"SHARE_WITH_GEORGE": "share_with_george_response",
|
| 488 |
+
"other": "other_5"
|
| 489 |
+
}
|
| 490 |
+
},
|
| 491 |
+
{
|
| 492 |
+
"type": "text_read_along",
|
| 493 |
+
"node_name": "try_first_response",
|
| 494 |
+
"text_candidates": [
|
| 495 |
+
"(\n*munching sounds*\n) Mmm, this is so yummy! George, you're going to love it too!"
|
| 496 |
+
],
|
| 497 |
+
"next_node": "mmm_response",
|
| 498 |
+
"additional_info": {
|
| 499 |
+
"speaker": "Peppa"
|
| 500 |
+
},
|
| 501 |
+
"requires_response": false,
|
| 502 |
+
"audio_url_candidates": [
|
| 503 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/0-1-answer-peppa-1734597071972.mp3"
|
| 504 |
+
]
|
| 505 |
+
},
|
| 506 |
+
{
|
| 507 |
+
"type": "text_read_along",
|
| 508 |
+
"node_name": "share_with_george_response",
|
| 509 |
+
"text_candidates": [
|
| 510 |
+
"(\n*Fork clinking*\n) Here you go, George! But don't eat it all!"
|
| 511 |
+
],
|
| 512 |
+
"next_node": "mmm_response",
|
| 513 |
+
"additional_info": {
|
| 514 |
+
"speaker": "Peppa"
|
| 515 |
+
},
|
| 516 |
+
"requires_response": false,
|
| 517 |
+
"audio_url_candidates": [
|
| 518 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/1-2-answer-peppa-1734597071972.mp3"
|
| 519 |
+
]
|
| 520 |
+
},
|
| 521 |
+
{
|
| 522 |
+
"type": "text_read_along",
|
| 523 |
+
"node_name": "mmm_response",
|
| 524 |
+
"text_candidates": [
|
| 525 |
+
"Mmm! This is the best salad I've ever had! More, please! (\n*Peppa's giggle]*\n)"
|
| 526 |
+
],
|
| 527 |
+
"next_node": "agent_yummy_salad",
|
| 528 |
+
"additional_info": {
|
| 529 |
+
"speaker": "Peppa"
|
| 530 |
+
},
|
| 531 |
+
"requires_response": false,
|
| 532 |
+
"audio_url_candidates": [
|
| 533 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/4-11-peppa-1734597070861.mp3"
|
| 534 |
+
]
|
| 535 |
+
},
|
| 536 |
+
{
|
| 537 |
+
"type": "text_read_along",
|
| 538 |
+
"node_name": "agent_yummy_salad",
|
| 539 |
+
"text_candidates": [
|
| 540 |
+
"That was such a yummy salad! Thank you for helping me make it. Cooking is so much fun when we do it together!"
|
| 541 |
+
],
|
| 542 |
+
"next_node": "next_cooking_prompt",
|
| 543 |
+
"additional_info": {
|
| 544 |
+
"speaker": "Peppa"
|
| 545 |
+
},
|
| 546 |
+
"requires_response": false,
|
| 547 |
+
"audio_url_candidates": [
|
| 548 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/3-8-peppa-1734597070861.mp3"
|
| 549 |
+
]
|
| 550 |
+
},
|
| 551 |
+
{
|
| 552 |
+
"type": "text_read_along",
|
| 553 |
+
"node_name": "next_cooking_prompt",
|
| 554 |
+
"text_candidates": [
|
| 555 |
+
"What should Peppa make next time? Should she bake cookies or make sandwiches?"
|
| 556 |
+
],
|
| 557 |
+
"next_node": "next_cooking_menu",
|
| 558 |
+
"additional_info": {
|
| 559 |
+
"speaker": "Narrator"
|
| 560 |
+
},
|
| 561 |
+
"requires_response": false,
|
| 562 |
+
"audio_url_candidates": [
|
| 563 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/1-3-question-narrator-1734597071881.mp3"
|
| 564 |
+
]
|
| 565 |
+
},
|
| 566 |
+
{
|
| 567 |
+
"type": "Text_Choice",
|
| 568 |
+
"node_name": "next_cooking_menu",
|
| 569 |
+
"text_choices": [
|
| 570 |
+
"Cookies",
|
| 571 |
+
"Sandwiches"
|
| 572 |
+
],
|
| 573 |
+
"next_node": "next_cooking_intent"
|
| 574 |
+
},
|
| 575 |
+
{
|
| 576 |
+
"type": "Intent",
|
| 577 |
+
"node_name": "next_cooking_intent",
|
| 578 |
+
"content": [
|
| 579 |
+
"COOKIES",
|
| 580 |
+
"SANDWICHES"
|
| 581 |
+
],
|
| 582 |
+
"next_node": {
|
| 583 |
+
"COOKIES": "cookies_response",
|
| 584 |
+
"SANDWICHES": "sandwiches_response",
|
| 585 |
+
"other": "other_6"
|
| 586 |
+
}
|
| 587 |
+
},
|
| 588 |
+
{
|
| 589 |
+
"type": "text_read_along",
|
| 590 |
+
"node_name": "cookies_response",
|
| 591 |
+
"text_candidates": [
|
| 592 |
+
"(\n*Fork clinking*\n) Mmm, cookies sound sweet and yummy!"
|
| 593 |
+
],
|
| 594 |
+
"next_node": "closing_narration",
|
| 595 |
+
"additional_info": {
|
| 596 |
+
"speaker": "Peppa"
|
| 597 |
+
},
|
| 598 |
+
"requires_response": false,
|
| 599 |
+
"audio_url_candidates": [
|
| 600 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/0-3-answer-peppa-1734597071972.mp3"
|
| 601 |
+
]
|
| 602 |
+
},
|
| 603 |
+
{
|
| 604 |
+
"type": "text_read_along",
|
| 605 |
+
"node_name": "sandwiches_response",
|
| 606 |
+
"text_candidates": [
|
| 607 |
+
"(\n*Peppa's giggle]*\n) Sandwiches are perfect for lunch! I can't wait!"
|
| 608 |
+
],
|
| 609 |
+
"next_node": "closing_narration",
|
| 610 |
+
"additional_info": {
|
| 611 |
+
"speaker": "Peppa"
|
| 612 |
+
},
|
| 613 |
+
"requires_response": false,
|
| 614 |
+
"audio_url_candidates": [
|
| 615 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/1-4-answer-peppa-1734597071972.mp3"
|
| 616 |
+
]
|
| 617 |
+
},
|
| 618 |
+
{
|
| 619 |
+
"type": "text_read_along",
|
| 620 |
+
"node_name": "closing_narration",
|
| 621 |
+
"text_candidates": [
|
| 622 |
+
"Peppa had a great time preparing lunch. Thanks to your help, her salad was a big success!"
|
| 623 |
+
],
|
| 624 |
+
"next_node": "final_goodbye",
|
| 625 |
+
"additional_info": {
|
| 626 |
+
"speaker": "Narrator"
|
| 627 |
+
},
|
| 628 |
+
"requires_response": false,
|
| 629 |
+
"audio_url_candidates": [
|
| 630 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/2-6-narrator-1734597070861.mp3"
|
| 631 |
+
]
|
| 632 |
+
},
|
| 633 |
+
{
|
| 634 |
+
"type": "text_read_along",
|
| 635 |
+
"node_name": "final_goodbye",
|
| 636 |
+
"text_candidates": [
|
| 637 |
+
"(\n*Peppa's giggle*\n) Let's make something else together next time! Bye-bye! (\n*a cheerful chime*\n)"
|
| 638 |
+
],
|
| 639 |
+
"next_node": "end",
|
| 640 |
+
"additional_info": {
|
| 641 |
+
"speaker": "Peppa"
|
| 642 |
+
},
|
| 643 |
+
"requires_response": false,
|
| 644 |
+
"audio_url_candidates": [
|
| 645 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/2-7-peppa-1734597070861.mp3"
|
| 646 |
+
]
|
| 647 |
+
},
|
| 648 |
+
{
|
| 649 |
+
"type": "text_read_along",
|
| 650 |
+
"node_name": "closing_narration_premature",
|
| 651 |
+
"text_candidates": [
|
| 652 |
+
"Let's make something else together next time! Bye-bye!"
|
| 653 |
+
],
|
| 654 |
+
"next_node": "end",
|
| 655 |
+
"additional_info": {
|
| 656 |
+
"speaker": "Peppa"
|
| 657 |
+
},
|
| 658 |
+
"requires_response": false,
|
| 659 |
+
"audio_url_candidates": [
|
| 660 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/2-7-peppa-1734597070861.mp3"
|
| 661 |
+
]
|
| 662 |
+
},
|
| 663 |
+
{
|
| 664 |
+
"type": "text_read_along",
|
| 665 |
+
"node_name": "other_0",
|
| 666 |
+
"text_candidates": [
|
| 667 |
+
"Not quite! Try again!",
|
| 668 |
+
"Give it another go!",
|
| 669 |
+
"Let's try that one more time!",
|
| 670 |
+
"Try again. I know you can do it!",
|
| 671 |
+
"Let's give it another try!"
|
| 672 |
+
],
|
| 673 |
+
"next_node": "vegetable_selection",
|
| 674 |
+
"requires_response": false,
|
| 675 |
+
"additional_info": {
|
| 676 |
+
"speaker": "Narrator"
|
| 677 |
+
},
|
| 678 |
+
"audio_url_candidates": [
|
| 679 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Not-quite!.wav",
|
| 680 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Give-it-an.wav",
|
| 681 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-try.wav",
|
| 682 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Try-again..wav",
|
| 683 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-give.wav"
|
| 684 |
+
]
|
| 685 |
+
},
|
| 686 |
+
{
|
| 687 |
+
"type": "text_read_along",
|
| 688 |
+
"node_name": "other_1",
|
| 689 |
+
"text_candidates": [
|
| 690 |
+
"Not quite! Try again!",
|
| 691 |
+
"Give it another go!",
|
| 692 |
+
"Let's try that one more time!",
|
| 693 |
+
"Try again. I know you can do it!",
|
| 694 |
+
"Let's give it another try!"
|
| 695 |
+
],
|
| 696 |
+
"next_node": "spoon_cleanup",
|
| 697 |
+
"requires_response": false,
|
| 698 |
+
"additional_info": {
|
| 699 |
+
"speaker": "Narrator"
|
| 700 |
+
},
|
| 701 |
+
"audio_url_candidates": [
|
| 702 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Not-quite!.wav",
|
| 703 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Give-it-an.wav",
|
| 704 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-try.wav",
|
| 705 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Try-again..wav",
|
| 706 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-give.wav"
|
| 707 |
+
]
|
| 708 |
+
},
|
| 709 |
+
{
|
| 710 |
+
"type": "text_read_along",
|
| 711 |
+
"node_name": "other_2",
|
| 712 |
+
"text_candidates": [
|
| 713 |
+
"Not quite! Try again!",
|
| 714 |
+
"Give it another go!",
|
| 715 |
+
"Let's try that one more time!",
|
| 716 |
+
"Try again. I know you can do it!",
|
| 717 |
+
"Let's give it another try!"
|
| 718 |
+
],
|
| 719 |
+
"next_node": "mix_intent_first",
|
| 720 |
+
"requires_response": true,
|
| 721 |
+
"additional_info": {
|
| 722 |
+
"speaker": "Narrator"
|
| 723 |
+
},
|
| 724 |
+
"audio_url_candidates": [
|
| 725 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Not-quite!.wav",
|
| 726 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Give-it-an.wav",
|
| 727 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-try.wav",
|
| 728 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Try-again..wav",
|
| 729 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-give.wav"
|
| 730 |
+
]
|
| 731 |
+
},
|
| 732 |
+
{
|
| 733 |
+
"type": "text_read_along",
|
| 734 |
+
"node_name": "other_3",
|
| 735 |
+
"text_candidates": [
|
| 736 |
+
"Not quite! Try again!",
|
| 737 |
+
"Give it another go!",
|
| 738 |
+
"Let's try that one more time!",
|
| 739 |
+
"Try again. I know you can do it!",
|
| 740 |
+
"Let's give it another try!"
|
| 741 |
+
],
|
| 742 |
+
"next_node": "mix_intent_second",
|
| 743 |
+
"requires_response": true,
|
| 744 |
+
"additional_info": {
|
| 745 |
+
"speaker": "Narrator"
|
| 746 |
+
},
|
| 747 |
+
"audio_url_candidates": [
|
| 748 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Not-quite!.wav",
|
| 749 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Give-it-an.wav",
|
| 750 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-try.wav",
|
| 751 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Try-again..wav",
|
| 752 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-give.wav"
|
| 753 |
+
]
|
| 754 |
+
},
|
| 755 |
+
{
|
| 756 |
+
"type": "text_read_along",
|
| 757 |
+
"node_name": "other_4",
|
| 758 |
+
"text_candidates": [
|
| 759 |
+
"Not quite! Try again!",
|
| 760 |
+
"Give it another go!",
|
| 761 |
+
"Let's try that one more time!",
|
| 762 |
+
"Try again. I know you can do it!",
|
| 763 |
+
"Let's give it another try!"
|
| 764 |
+
],
|
| 765 |
+
"next_node": "mix_intent_third",
|
| 766 |
+
"requires_response": true,
|
| 767 |
+
"additional_info": {
|
| 768 |
+
"speaker": "Narrator"
|
| 769 |
+
},
|
| 770 |
+
"audio_url_candidates": [
|
| 771 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Not-quite!.wav",
|
| 772 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Give-it-an.wav",
|
| 773 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-try.wav",
|
| 774 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Try-again..wav",
|
| 775 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-give.wav"
|
| 776 |
+
]
|
| 777 |
+
},
|
| 778 |
+
{
|
| 779 |
+
"type": "text_read_along",
|
| 780 |
+
"node_name": "other_5",
|
| 781 |
+
"text_candidates": [
|
| 782 |
+
"Not quite! Try again!",
|
| 783 |
+
"Give it another go!",
|
| 784 |
+
"Let's try that one more time!",
|
| 785 |
+
"Try again. I know you can do it!",
|
| 786 |
+
"Let's give it another try!"
|
| 787 |
+
],
|
| 788 |
+
"next_node": "tasting_prompt",
|
| 789 |
+
"requires_response": false,
|
| 790 |
+
"additional_info": {
|
| 791 |
+
"speaker": "Narrator"
|
| 792 |
+
},
|
| 793 |
+
"audio_url_candidates": [
|
| 794 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Not-quite!.wav",
|
| 795 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Give-it-an.wav",
|
| 796 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-try.wav",
|
| 797 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Try-again..wav",
|
| 798 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-give.wav"
|
| 799 |
+
]
|
| 800 |
+
},
|
| 801 |
+
{
|
| 802 |
+
"type": "text_read_along",
|
| 803 |
+
"node_name": "other_6",
|
| 804 |
+
"text_candidates": [
|
| 805 |
+
"Not quite! Try again!",
|
| 806 |
+
"Give it another go!",
|
| 807 |
+
"Let's try that one more time!",
|
| 808 |
+
"Try again. I know you can do it!",
|
| 809 |
+
"Let's give it another try!"
|
| 810 |
+
],
|
| 811 |
+
"next_node": "next_cooking_prompt",
|
| 812 |
+
"requires_response": false,
|
| 813 |
+
"additional_info": {
|
| 814 |
+
"speaker": "Narrator"
|
| 815 |
+
},
|
| 816 |
+
"audio_url_candidates": [
|
| 817 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Not-quite!.wav",
|
| 818 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Give-it-an.wav",
|
| 819 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-try.wav",
|
| 820 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Try-again..wav",
|
| 821 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-give.wav"
|
| 822 |
+
]
|
| 823 |
+
}
|
| 824 |
+
]
|
| 825 |
+
}
|
config/mj_guided_first_old.json
ADDED
|
@@ -0,0 +1,590 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"intent_config": {
|
| 3 |
+
"rule_based": true,
|
| 4 |
+
"vector": false,
|
| 5 |
+
"llm": true,
|
| 6 |
+
"intent": [
|
| 7 |
+
{
|
| 8 |
+
"name": "TOMATOES",
|
| 9 |
+
"samples": ["tomatoes", "tomato"]
|
| 10 |
+
},
|
| 11 |
+
{
|
| 12 |
+
"name": "CUCUMBERS",
|
| 13 |
+
"samples": ["cucumbers", "cucumber"]
|
| 14 |
+
},
|
| 15 |
+
{
|
| 16 |
+
"name": "CARROTS",
|
| 17 |
+
"samples": ["carrots", "carrot"]
|
| 18 |
+
},
|
| 19 |
+
{
|
| 20 |
+
"name": "PUT_IN_SINK",
|
| 21 |
+
"samples": ["sink", "put in sink", "wash"]
|
| 22 |
+
},
|
| 23 |
+
{
|
| 24 |
+
"name": "ASK_NEW_SPOON",
|
| 25 |
+
"samples": ["new spoon", "ask mummy", "get another", "ask new", "ask for new"]
|
| 26 |
+
},
|
| 27 |
+
{
|
| 28 |
+
"name": "MIX",
|
| 29 |
+
"samples": ["mix", "mix up", "mixing", "mix it good"]
|
| 30 |
+
},
|
| 31 |
+
{
|
| 32 |
+
"name": "TRY_FIRST",
|
| 33 |
+
"samples": ["try first", "taste", "me first"]
|
| 34 |
+
},
|
| 35 |
+
{
|
| 36 |
+
"name": "SHARE_WITH_GEORGE",
|
| 37 |
+
"samples": ["share", "george", "give to george", "let george"]
|
| 38 |
+
},
|
| 39 |
+
{
|
| 40 |
+
"name": "COOKIES",
|
| 41 |
+
"samples": ["cookies", "bake cookies"]
|
| 42 |
+
},
|
| 43 |
+
{
|
| 44 |
+
"name": "SANDWICHES",
|
| 45 |
+
"samples": ["sandwiches", "make sandwiches"]
|
| 46 |
+
}
|
| 47 |
+
],
|
| 48 |
+
"global_intent":[
|
| 49 |
+
{
|
| 50 |
+
"name": "STOP",
|
| 51 |
+
"samples": ["stop", "let's end here"]
|
| 52 |
+
},
|
| 53 |
+
{
|
| 54 |
+
"name": "other",
|
| 55 |
+
"samples": ["I don't know"]
|
| 56 |
+
}
|
| 57 |
+
]
|
| 58 |
+
},
|
| 59 |
+
"global_route": {
|
| 60 |
+
"stop": [
|
| 61 |
+
{
|
| 62 |
+
"type": "Intent",
|
| 63 |
+
"node_name": "stop_intent",
|
| 64 |
+
"content": ["stop"],
|
| 65 |
+
"next_node": {
|
| 66 |
+
"stop": "stop_message"
|
| 67 |
+
},
|
| 68 |
+
"on_fail": {
|
| 69 |
+
"route": "{caller}"
|
| 70 |
+
}
|
| 71 |
+
},
|
| 72 |
+
{
|
| 73 |
+
"type": "Agent",
|
| 74 |
+
"node_name": "stop_message",
|
| 75 |
+
"text_candidates": ["stop"],
|
| 76 |
+
"next_node": {
|
| 77 |
+
"stop": "end"
|
| 78 |
+
}
|
| 79 |
+
}
|
| 80 |
+
],
|
| 81 |
+
"other": [
|
| 82 |
+
{
|
| 83 |
+
"type": "Agent",
|
| 84 |
+
"node_name": "other_intent_message",
|
| 85 |
+
"text_candidates": "Sorry, please try again",
|
| 86 |
+
"next_node": "{sys.caller}"
|
| 87 |
+
}
|
| 88 |
+
]
|
| 89 |
+
},
|
| 90 |
+
"workflow": [
|
| 91 |
+
{
|
| 92 |
+
"type": "Play",
|
| 93 |
+
"node_name": "opening_background_music",
|
| 94 |
+
"audio_url_candidates": ["https://storytellings.s3.ap-southeast-1.amazonaws.com/local/from-admin/1734534796395.mp3"],
|
| 95 |
+
"next_node": "scene_setting_narration",
|
| 96 |
+
"requires_response": false
|
| 97 |
+
},
|
| 98 |
+
{
|
| 99 |
+
"type": "Agent",
|
| 100 |
+
"node_name": "scene_setting_narration",
|
| 101 |
+
"text_candidates": ["It's lunchtime at Peppa's house! Peppa is excited to prepare lunch all by herself today. Let's help Peppa make something delicious!"],
|
| 102 |
+
"next_node": "peppa_invitation",
|
| 103 |
+
"additional_info": {
|
| 104 |
+
"speaker": "Narrator",
|
| 105 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/1-1-narrator-1734597070860.mp3"
|
| 106 |
+
},
|
| 107 |
+
"requires_response": false
|
| 108 |
+
},
|
| 109 |
+
{
|
| 110 |
+
"type": "Agent",
|
| 111 |
+
"node_name": "peppa_invitation",
|
| 112 |
+
"text_candidates": ["Mummy says I can make lunch today! But I'll need some help. Will you join me in the kitchen?"],
|
| 113 |
+
"next_node": "vegetable_selection",
|
| 114 |
+
"additional_info": {
|
| 115 |
+
"speaker": "Peppa",
|
| 116 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/1-2-peppa-1734597070861.mp3"
|
| 117 |
+
},
|
| 118 |
+
"requires_response": false
|
| 119 |
+
},
|
| 120 |
+
{
|
| 121 |
+
"type": "Agent",
|
| 122 |
+
"node_name": "vegetable_selection",
|
| 123 |
+
"text_candidates": ["Peppa needs to choose some vegetables for her salad. Should she use tomatoes, cucumbers, or carrots? Help her decide!"],
|
| 124 |
+
"next_node": "vegetable_menu",
|
| 125 |
+
"additional_info": {
|
| 126 |
+
"speaker": "Narrator",
|
| 127 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/3-1-question-narrator-1734597071880.mp3"
|
| 128 |
+
},
|
| 129 |
+
"requires_response": false
|
| 130 |
+
},
|
| 131 |
+
{
|
| 132 |
+
"type": "Text_Choice",
|
| 133 |
+
"node_name": "vegetable_menu",
|
| 134 |
+
"text_choices": ["Tomatoes", "Cucumbers", "Carrots"],
|
| 135 |
+
"next_node": "vegetable_intent"
|
| 136 |
+
},
|
| 137 |
+
{
|
| 138 |
+
"type": "Intent",
|
| 139 |
+
"node_name": "vegetable_intent",
|
| 140 |
+
"content": ["TOMATOES", "CUCUMBERS", "CARROTS"],
|
| 141 |
+
"next_node": {
|
| 142 |
+
"TOMATOES": "peppa_tomato_comment",
|
| 143 |
+
"CUCUMBERS": "peppa_cucumber_comment",
|
| 144 |
+
"CARROTS": "peppa_carrot_comment",
|
| 145 |
+
"STOP": "closing_narration_premature",
|
| 146 |
+
"other": "other_0"
|
| 147 |
+
}
|
| 148 |
+
},
|
| 149 |
+
{
|
| 150 |
+
"type": "Agent",
|
| 151 |
+
"node_name": "peppa_tomato_comment",
|
| 152 |
+
"text_candidates": ["Tomatoes are so juicy! Let's chop them up carefully! (\n*Chopping sounds*\n)"],
|
| 153 |
+
"next_node": "spoon_drop_incident",
|
| 154 |
+
"additional_info": {
|
| 155 |
+
"speaker": "Peppa",
|
| 156 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/0-5-answer-peppa-1734597071972.mp3"
|
| 157 |
+
},
|
| 158 |
+
"requires_response": false
|
| 159 |
+
},
|
| 160 |
+
{
|
| 161 |
+
"type": "Agent",
|
| 162 |
+
"node_name": "peppa_cucumber_comment",
|
| 163 |
+
"text_candidates": ["Cucumbers are so crunchy! Yum! (\n*Slicing sound*\n)"],
|
| 164 |
+
"next_node": "spoon_drop_incident",
|
| 165 |
+
"additional_info": {
|
| 166 |
+
"speaker": "Peppa",
|
| 167 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/1-6-answer-peppa-1734597071972.mp3"
|
| 168 |
+
},
|
| 169 |
+
"requires_response": false
|
| 170 |
+
},
|
| 171 |
+
{
|
| 172 |
+
"type": "Agent",
|
| 173 |
+
"node_name": "peppa_carrot_comment",
|
| 174 |
+
"text_candidates": ["Carrots are so sweet! Let's grate some for the salad. (\n*Grating sound*\n)"],
|
| 175 |
+
"next_node": "spoon_drop_incident",
|
| 176 |
+
"additional_info": {
|
| 177 |
+
"speaker": "Peppa",
|
| 178 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/2-7-answer-peppa-1734597071972.mp3"
|
| 179 |
+
},
|
| 180 |
+
"requires_response": false
|
| 181 |
+
},
|
| 182 |
+
{
|
| 183 |
+
"type": "Agent",
|
| 184 |
+
"node_name": "spoon_drop_incident",
|
| 185 |
+
"text_candidates": ["(\n*a clatter of a dropped utensil*\n) Oops! I accidentally dropped my spoon on the floor. Mummy says we should always keep things clean while cooking!"],
|
| 186 |
+
"next_node": "spoon_cleanup",
|
| 187 |
+
"additional_info": {
|
| 188 |
+
"speaker": "Peppa",
|
| 189 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/5-4-peppa-1734597070861.mp3"
|
| 190 |
+
},
|
| 191 |
+
"requires_response": false
|
| 192 |
+
},
|
| 193 |
+
{
|
| 194 |
+
"type": "Agent",
|
| 195 |
+
"node_name": "spoon_cleanup",
|
| 196 |
+
"text_candidates": ["Peppa needs to clean up! Should she pick up the spoon and put it in the sink or ask Mummy Pig for a new one?"],
|
| 197 |
+
"next_node": "spoon_cleanup_menu",
|
| 198 |
+
"additional_info": {
|
| 199 |
+
"speaker": "Narrator",
|
| 200 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/1-4-question-narrator-1734597071880.mp3"
|
| 201 |
+
},
|
| 202 |
+
"requires_response": false
|
| 203 |
+
},
|
| 204 |
+
{
|
| 205 |
+
"type": "Text_Choice",
|
| 206 |
+
"node_name": "spoon_cleanup_menu",
|
| 207 |
+
"text_choices": ["Put in Sink", "Ask for New Spoon"],
|
| 208 |
+
"next_node": "spoon_cleanup_intent"
|
| 209 |
+
},
|
| 210 |
+
{
|
| 211 |
+
"type": "Intent",
|
| 212 |
+
"node_name": "spoon_cleanup_intent",
|
| 213 |
+
"content": ["PUT_IN_SINK", "ASK_NEW_SPOON"],
|
| 214 |
+
"next_node": {
|
| 215 |
+
"PUT_IN_SINK": "put_in_sink_response",
|
| 216 |
+
"ASK_NEW_SPOON": "ask_new_spoon_response",
|
| 217 |
+
"STOP": "closing_narration_premature",
|
| 218 |
+
"other": "other_1"
|
| 219 |
+
}
|
| 220 |
+
},
|
| 221 |
+
{
|
| 222 |
+
"type": "Agent",
|
| 223 |
+
"node_name": "put_in_sink_response",
|
| 224 |
+
"text_candidates": ["Good idea! I'll wash this spoon later. (\n*running water*\n)"],
|
| 225 |
+
"next_node": "mix_salad_prompt",
|
| 226 |
+
"additional_info": {
|
| 227 |
+
"speaker": "Peppa",
|
| 228 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/0-12-answer-peppa-1734597071972.mp3"
|
| 229 |
+
},
|
| 230 |
+
"requires_response": false
|
| 231 |
+
},
|
| 232 |
+
{
|
| 233 |
+
"type": "Agent",
|
| 234 |
+
"node_name": "ask_new_spoon_response",
|
| 235 |
+
"text_candidates": ["Mummy, can I have another spoon? Thank you!"],
|
| 236 |
+
"next_node": "mix_salad_prompt",
|
| 237 |
+
"additional_info": {
|
| 238 |
+
"speaker": "Peppa",
|
| 239 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/1-11-answer-peppa-1734597071972.mp3"
|
| 240 |
+
},
|
| 241 |
+
"requires_response": false
|
| 242 |
+
},
|
| 243 |
+
{
|
| 244 |
+
"type": "Agent",
|
| 245 |
+
"node_name": "mix_salad_prompt",
|
| 246 |
+
"text_candidates": ["Keeping the kitchen clean is so important. Now let's mix the salad together!"],
|
| 247 |
+
"next_node": "mix_instruction",
|
| 248 |
+
"additional_info": {
|
| 249 |
+
"speaker": "Peppa",
|
| 250 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/5-14-peppa-1734597070861.mp3"
|
| 251 |
+
},
|
| 252 |
+
"requires_response": false
|
| 253 |
+
},
|
| 254 |
+
{
|
| 255 |
+
"type": "Agent",
|
| 256 |
+
"node_name": "mix_instruction",
|
| 257 |
+
"text_candidates": ["Say 'mix' three times to help Peppa finish the salad!"],
|
| 258 |
+
"next_node": "mix_intent_first",
|
| 259 |
+
"additional_info": {
|
| 260 |
+
"speaker": "Narrator",
|
| 261 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/3-5-question-narrator-1734597071880.mp3"
|
| 262 |
+
}
|
| 263 |
+
},
|
| 264 |
+
{
|
| 265 |
+
"type": "Intent",
|
| 266 |
+
"node_name": "mix_intent_first",
|
| 267 |
+
"content": ["MIX"],
|
| 268 |
+
"next_node": {
|
| 269 |
+
"MIX": "first_mix_response",
|
| 270 |
+
"other": "other_2"
|
| 271 |
+
}
|
| 272 |
+
},
|
| 273 |
+
{
|
| 274 |
+
"type": "Agent",
|
| 275 |
+
"node_name": "first_mix_response",
|
| 276 |
+
"text_candidates": ["Great! Keep mixing! (\n*bowl mixing*\n)"],
|
| 277 |
+
"next_node": "mix_intent_second",
|
| 278 |
+
"additional_info": {
|
| 279 |
+
"speaker": "Peppa",
|
| 280 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/0-8-answer-peppa-1734597071972.mp3"
|
| 281 |
+
},
|
| 282 |
+
"requires_response": true
|
| 283 |
+
},
|
| 284 |
+
{
|
| 285 |
+
"type": "Intent",
|
| 286 |
+
"node_name": "mix_intent_second",
|
| 287 |
+
"content": ["MIX"],
|
| 288 |
+
"next_node": {
|
| 289 |
+
"MIX": "second_mix_response",
|
| 290 |
+
"other": "other_3"
|
| 291 |
+
}
|
| 292 |
+
},
|
| 293 |
+
{
|
| 294 |
+
"type": "Agent",
|
| 295 |
+
"node_name": "second_mix_response",
|
| 296 |
+
"text_candidates": ["Great! Keep mixing! (\n*bowl mixing*\n)"],
|
| 297 |
+
"next_node": "mix_intent_third",
|
| 298 |
+
"additional_info": {
|
| 299 |
+
"speaker": "Peppa",
|
| 300 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/1-10-answer-peppa-1734597071972.mp3"
|
| 301 |
+
},
|
| 302 |
+
"requires_response": true
|
| 303 |
+
},
|
| 304 |
+
{
|
| 305 |
+
"type": "Intent",
|
| 306 |
+
"node_name": "mix_intent_third",
|
| 307 |
+
"content": ["MIX"],
|
| 308 |
+
"next_node": {
|
| 309 |
+
"MIX": "third_mix_response",
|
| 310 |
+
"other": "other_4"
|
| 311 |
+
}
|
| 312 |
+
},
|
| 313 |
+
{
|
| 314 |
+
"type": "Agent",
|
| 315 |
+
"node_name": "third_mix_response",
|
| 316 |
+
"text_candidates": ["Yay! The salad looks delicious!"],
|
| 317 |
+
"next_node": "tasting_prompt",
|
| 318 |
+
"additional_info": {
|
| 319 |
+
"speaker": "Peppa",
|
| 320 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/2-9-answer-peppa-1734597071972.mp3"
|
| 321 |
+
},
|
| 322 |
+
"requires_response": false
|
| 323 |
+
},
|
| 324 |
+
{
|
| 325 |
+
"type": "Agent",
|
| 326 |
+
"node_name": "tasting_prompt",
|
| 327 |
+
"text_candidates": ["The salad looks delicious! Time to taste it. Should I try it first or let George have a taste?"],
|
| 328 |
+
"next_node": "tasting_menu",
|
| 329 |
+
"additional_info": {
|
| 330 |
+
"speaker": "Peppa",
|
| 331 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/4-3-peppa-1734597070861.mp3"
|
| 332 |
+
},
|
| 333 |
+
"requires_response": false
|
| 334 |
+
},
|
| 335 |
+
{
|
| 336 |
+
"type": "Text_Choice",
|
| 337 |
+
"node_name": "tasting_menu",
|
| 338 |
+
"text_choices": ["Try First", "Share with George"],
|
| 339 |
+
"next_node": "tasting_intent"
|
| 340 |
+
},
|
| 341 |
+
{
|
| 342 |
+
"type": "Intent",
|
| 343 |
+
"node_name": "tasting_intent",
|
| 344 |
+
"content": ["TRY_FIRST", "SHARE_WITH_GEORGE"],
|
| 345 |
+
"next_node": {
|
| 346 |
+
"TRY_FIRST": "try_first_response",
|
| 347 |
+
"SHARE_WITH_GEORGE": "share_with_george_response",
|
| 348 |
+
"other": "other_5"
|
| 349 |
+
}
|
| 350 |
+
},
|
| 351 |
+
{
|
| 352 |
+
"type": "Agent",
|
| 353 |
+
"node_name": "try_first_response",
|
| 354 |
+
"text_candidates": ["(\n*munching sounds*\n) Mmm, this is so yummy! George, you're going to love it too!"],
|
| 355 |
+
"next_node": "mmm_response",
|
| 356 |
+
"additional_info": {
|
| 357 |
+
"speaker": "Peppa",
|
| 358 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/0-1-answer-peppa-1734597071972.mp3"
|
| 359 |
+
},
|
| 360 |
+
"requires_response": false
|
| 361 |
+
},
|
| 362 |
+
{
|
| 363 |
+
"type": "Agent",
|
| 364 |
+
"node_name": "share_with_george_response",
|
| 365 |
+
"text_candidates": ["(\n*Fork clinking*\n) Here you go, George! But don't eat it all!"],
|
| 366 |
+
"next_node": "mmm_response",
|
| 367 |
+
"additional_info": {
|
| 368 |
+
"speaker": "Peppa",
|
| 369 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/1-2-answer-peppa-1734597071972.mp3"
|
| 370 |
+
},
|
| 371 |
+
"requires_response": false
|
| 372 |
+
},
|
| 373 |
+
{
|
| 374 |
+
"type": "Agent",
|
| 375 |
+
"node_name": "mmm_response",
|
| 376 |
+
"text_candidates": ["Mmm! This is the best salad I've ever had! More, please! (\n*Peppa's giggle]*\n)"],
|
| 377 |
+
"next_node": "agent_yummy_salad",
|
| 378 |
+
"additional_info": {
|
| 379 |
+
"speaker": "Peppa",
|
| 380 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/4-11-peppa-1734597070861.mp3"
|
| 381 |
+
},
|
| 382 |
+
"requires_response": false
|
| 383 |
+
},
|
| 384 |
+
{
|
| 385 |
+
"type": "Agent",
|
| 386 |
+
"node_name": "agent_yummy_salad",
|
| 387 |
+
"text_candidates": ["That was such a yummy salad! Thank you for helping me make it. Cooking is so much fun when we do it together!"],
|
| 388 |
+
"next_node": "next_cooking_prompt",
|
| 389 |
+
"additional_info": {
|
| 390 |
+
"speaker": "Peppa",
|
| 391 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/3-8-peppa-1734597070861.mp3"
|
| 392 |
+
},
|
| 393 |
+
"requires_response": false
|
| 394 |
+
},
|
| 395 |
+
{
|
| 396 |
+
"type": "Agent",
|
| 397 |
+
"node_name": "next_cooking_prompt",
|
| 398 |
+
"text_candidates": ["What should Peppa make next time? Should she bake cookies or make sandwiches?"],
|
| 399 |
+
"next_node": "next_cooking_menu",
|
| 400 |
+
"additional_info": {
|
| 401 |
+
"speaker": "Narrator",
|
| 402 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/1-3-question-narrator-1734597071881.mp3"
|
| 403 |
+
},
|
| 404 |
+
"requires_response": false
|
| 405 |
+
},
|
| 406 |
+
{
|
| 407 |
+
"type": "Text_Choice",
|
| 408 |
+
"node_name": "next_cooking_menu",
|
| 409 |
+
"text_choices": ["Cookies", "Sandwiches"],
|
| 410 |
+
"next_node": "next_cooking_intent"
|
| 411 |
+
},
|
| 412 |
+
{
|
| 413 |
+
"type": "Intent",
|
| 414 |
+
"node_name": "next_cooking_intent",
|
| 415 |
+
"content": ["COOKIES", "SANDWICHES"],
|
| 416 |
+
"next_node": {
|
| 417 |
+
"COOKIES": "cookies_response",
|
| 418 |
+
"SANDWICHES": "sandwiches_response",
|
| 419 |
+
"other": "other_6"
|
| 420 |
+
}
|
| 421 |
+
},
|
| 422 |
+
{
|
| 423 |
+
"type": "Agent",
|
| 424 |
+
"node_name": "cookies_response",
|
| 425 |
+
"text_candidates": ["(\n*Fork clinking*\n) Mmm, cookies sound sweet and yummy!"],
|
| 426 |
+
"next_node": "closing_narration",
|
| 427 |
+
"additional_info": {
|
| 428 |
+
"speaker": "Peppa",
|
| 429 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/0-3-answer-peppa-1734597071972.mp3"
|
| 430 |
+
},
|
| 431 |
+
"requires_response": false
|
| 432 |
+
},
|
| 433 |
+
{
|
| 434 |
+
"type": "Agent",
|
| 435 |
+
"node_name": "sandwiches_response",
|
| 436 |
+
"text_candidates": ["(\n*Peppa's giggle]*\n) Sandwiches are perfect for lunch! I can't wait!"],
|
| 437 |
+
"next_node": "closing_narration",
|
| 438 |
+
"additional_info": {
|
| 439 |
+
"speaker": "Peppa",
|
| 440 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/1-4-answer-peppa-1734597071972.mp3"
|
| 441 |
+
},
|
| 442 |
+
"requires_response": false
|
| 443 |
+
},
|
| 444 |
+
{
|
| 445 |
+
"type": "Agent",
|
| 446 |
+
"node_name": "closing_narration",
|
| 447 |
+
"text_candidates": ["Peppa had a great time preparing lunch. Thanks to your help, her salad was a big success!"],
|
| 448 |
+
"next_node": "final_goodbye",
|
| 449 |
+
"additional_info": {
|
| 450 |
+
"speaker": "Narrator",
|
| 451 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/2-6-narrator-1734597070861.mp3"
|
| 452 |
+
},
|
| 453 |
+
"requires_response": false
|
| 454 |
+
},
|
| 455 |
+
{
|
| 456 |
+
"type": "Agent",
|
| 457 |
+
"node_name": "final_goodbye",
|
| 458 |
+
"text_candidates": ["(\n*Peppa's giggle*\n) Let's make something else together next time! Bye-bye! (\n*a cheerful chime*\n)"],
|
| 459 |
+
"next_node": "end",
|
| 460 |
+
"additional_info": {
|
| 461 |
+
"speaker": "Peppa",
|
| 462 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/2-7-peppa-1734597070861.mp3"
|
| 463 |
+
},
|
| 464 |
+
"requires_response": false
|
| 465 |
+
},
|
| 466 |
+
{
|
| 467 |
+
"type": "Agent",
|
| 468 |
+
"node_name": "closing_narration_premature",
|
| 469 |
+
"text_candidates": ["Let's make something else together next time! Bye-bye!"],
|
| 470 |
+
"next_node": "end",
|
| 471 |
+
"additional_info": {
|
| 472 |
+
"speaker": "Peppa",
|
| 473 |
+
"audio_url": "https://storytellings.s3.ap-southeast-1.amazonaws.com/dev/guided/2-7-peppa-1734597070861.mp3"
|
| 474 |
+
},
|
| 475 |
+
"requires_response": false
|
| 476 |
+
},
|
| 477 |
+
{
|
| 478 |
+
"type": "Agent",
|
| 479 |
+
"node_name": "other_0",
|
| 480 |
+
"text_candidates": [
|
| 481 |
+
"Not quite! Try again!",
|
| 482 |
+
"Give it another go!",
|
| 483 |
+
"Let’s try that one more time!",
|
| 484 |
+
"Try again—I know you can do it!",
|
| 485 |
+
"Let’s give it another try!"
|
| 486 |
+
],
|
| 487 |
+
"next_node": "vegetable_selection",
|
| 488 |
+
"requires_response": false,
|
| 489 |
+
"additional_info": {
|
| 490 |
+
"speaker": "Narrator"
|
| 491 |
+
}
|
| 492 |
+
},
|
| 493 |
+
{
|
| 494 |
+
"type": "Agent",
|
| 495 |
+
"node_name": "other_1",
|
| 496 |
+
"text_candidates": [
|
| 497 |
+
"Not quite! Try again!",
|
| 498 |
+
"Give it another go!",
|
| 499 |
+
"Let’s try that one more time!",
|
| 500 |
+
"Try again—I know you can do it!",
|
| 501 |
+
"Let’s give it another try!"
|
| 502 |
+
],
|
| 503 |
+
"next_node": "spoon_cleanup",
|
| 504 |
+
"requires_response": false,
|
| 505 |
+
"additional_info": {
|
| 506 |
+
"speaker": "Narrator"
|
| 507 |
+
}
|
| 508 |
+
},
|
| 509 |
+
{
|
| 510 |
+
"type": "Agent",
|
| 511 |
+
"node_name": "other_2",
|
| 512 |
+
"text_candidates": [
|
| 513 |
+
"Not quite! Try again!",
|
| 514 |
+
"Give it another go!",
|
| 515 |
+
"Let’s try that one more time!",
|
| 516 |
+
"Try again—I know you can do it!",
|
| 517 |
+
"Let’s give it another try!"
|
| 518 |
+
],
|
| 519 |
+
"next_node": "mix_intent_first",
|
| 520 |
+
"requires_response": true,
|
| 521 |
+
"additional_info": {
|
| 522 |
+
"speaker": "Narrator"
|
| 523 |
+
}
|
| 524 |
+
},
|
| 525 |
+
{
|
| 526 |
+
"type": "Agent",
|
| 527 |
+
"node_name": "other_3",
|
| 528 |
+
"text_candidates": [
|
| 529 |
+
"Not quite! Try again!",
|
| 530 |
+
"Give it another go!",
|
| 531 |
+
"Let’s try that one more time!",
|
| 532 |
+
"Try again—I know you can do it!",
|
| 533 |
+
"Let’s give it another try!"
|
| 534 |
+
],
|
| 535 |
+
"next_node": "mix_intent_second",
|
| 536 |
+
"requires_response": true,
|
| 537 |
+
"additional_info": {
|
| 538 |
+
"speaker": "Narrator"
|
| 539 |
+
}
|
| 540 |
+
},
|
| 541 |
+
{
|
| 542 |
+
"type": "Agent",
|
| 543 |
+
"node_name": "other_4",
|
| 544 |
+
"text_candidates": [
|
| 545 |
+
"Not quite! Try again!",
|
| 546 |
+
"Give it another go!",
|
| 547 |
+
"Let’s try that one more time!",
|
| 548 |
+
"Try again—I know you can do it!",
|
| 549 |
+
"Let’s give it another try!"
|
| 550 |
+
],
|
| 551 |
+
"next_node": "mix_intent_third",
|
| 552 |
+
"requires_response": true,
|
| 553 |
+
"additional_info": {
|
| 554 |
+
"speaker": "Narrator"
|
| 555 |
+
}
|
| 556 |
+
},
|
| 557 |
+
{
|
| 558 |
+
"type": "Agent",
|
| 559 |
+
"node_name": "other_5",
|
| 560 |
+
"text_candidates": [
|
| 561 |
+
"Not quite! Try again!",
|
| 562 |
+
"Give it another go!",
|
| 563 |
+
"Let’s try that one more time!",
|
| 564 |
+
"Try again—I know you can do it!",
|
| 565 |
+
"Let’s give it another try!"
|
| 566 |
+
],
|
| 567 |
+
"next_node": "tasting_prompt",
|
| 568 |
+
"requires_response": false,
|
| 569 |
+
"additional_info": {
|
| 570 |
+
"speaker": "Narrator"
|
| 571 |
+
}
|
| 572 |
+
},
|
| 573 |
+
{
|
| 574 |
+
"type": "Agent",
|
| 575 |
+
"node_name": "other_6",
|
| 576 |
+
"text_candidates": [
|
| 577 |
+
"Not quite! Try again!",
|
| 578 |
+
"Give it another go!",
|
| 579 |
+
"Let’s try that one more time!",
|
| 580 |
+
"Try again—I know you can do it!",
|
| 581 |
+
"Let’s give it another try!"
|
| 582 |
+
],
|
| 583 |
+
"next_node": "next_cooking_prompt",
|
| 584 |
+
"requires_response": false,
|
| 585 |
+
"additional_info": {
|
| 586 |
+
"speaker": "Narrator"
|
| 587 |
+
}
|
| 588 |
+
}
|
| 589 |
+
]
|
| 590 |
+
}
|
config/mj_guided_second.json
ADDED
|
@@ -0,0 +1,960 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"intent_config": {
|
| 3 |
+
"rule_based": true,
|
| 4 |
+
"vector": false,
|
| 5 |
+
"llm": true,
|
| 6 |
+
"intent": [
|
| 7 |
+
{
|
| 8 |
+
"name": "CHEESE_STALL",
|
| 9 |
+
"samples": [
|
| 10 |
+
"cheese",
|
| 11 |
+
"cheese stall",
|
| 12 |
+
"visit cheese stall"
|
| 13 |
+
]
|
| 14 |
+
},
|
| 15 |
+
{
|
| 16 |
+
"name": "FRUIT_STALL",
|
| 17 |
+
"samples": [
|
| 18 |
+
"fruit",
|
| 19 |
+
"fruit stall",
|
| 20 |
+
"visit fruit stall"
|
| 21 |
+
]
|
| 22 |
+
},
|
| 23 |
+
{
|
| 24 |
+
"name": "TOY_STALL",
|
| 25 |
+
"samples": [
|
| 26 |
+
"toy",
|
| 27 |
+
"toy stall",
|
| 28 |
+
"visit toy stall"
|
| 29 |
+
]
|
| 30 |
+
},
|
| 31 |
+
{
|
| 32 |
+
"name": "SMELLY_CHEESE",
|
| 33 |
+
"samples": [
|
| 34 |
+
"smelly cheese",
|
| 35 |
+
"strong cheese",
|
| 36 |
+
"funny smelling cheese"
|
| 37 |
+
]
|
| 38 |
+
},
|
| 39 |
+
{
|
| 40 |
+
"name": "CREAMY_CHEESE",
|
| 41 |
+
"samples": [
|
| 42 |
+
"creamy cheese",
|
| 43 |
+
"smooth cheese",
|
| 44 |
+
"soft cheese"
|
| 45 |
+
]
|
| 46 |
+
},
|
| 47 |
+
{
|
| 48 |
+
"name": "APPLES",
|
| 49 |
+
"samples": [
|
| 50 |
+
"apples",
|
| 51 |
+
"apple",
|
| 52 |
+
"get apples"
|
| 53 |
+
]
|
| 54 |
+
},
|
| 55 |
+
{
|
| 56 |
+
"name": "BANANAS",
|
| 57 |
+
"samples": [
|
| 58 |
+
"bananas",
|
| 59 |
+
"banana",
|
| 60 |
+
"get bananas"
|
| 61 |
+
]
|
| 62 |
+
},
|
| 63 |
+
{
|
| 64 |
+
"name": "TEDDY_BEAR",
|
| 65 |
+
"samples": [
|
| 66 |
+
"teddy",
|
| 67 |
+
"teddy bear",
|
| 68 |
+
"bear",
|
| 69 |
+
"mr snuggles"
|
| 70 |
+
]
|
| 71 |
+
},
|
| 72 |
+
{
|
| 73 |
+
"name": "SINGING_FISH",
|
| 74 |
+
"samples": [
|
| 75 |
+
"fish",
|
| 76 |
+
"singing fish",
|
| 77 |
+
"silly fish"
|
| 78 |
+
]
|
| 79 |
+
},
|
| 80 |
+
{
|
| 81 |
+
"name": "FOOD",
|
| 82 |
+
"samples": [
|
| 83 |
+
"food",
|
| 84 |
+
"food pile",
|
| 85 |
+
"sort food"
|
| 86 |
+
]
|
| 87 |
+
},
|
| 88 |
+
{
|
| 89 |
+
"name": "TOYS",
|
| 90 |
+
"samples": [
|
| 91 |
+
"toys",
|
| 92 |
+
"toy pile",
|
| 93 |
+
"sort toys"
|
| 94 |
+
]
|
| 95 |
+
},
|
| 96 |
+
{
|
| 97 |
+
"name": "ICE_CREAM",
|
| 98 |
+
"samples": [
|
| 99 |
+
"ice cream",
|
| 100 |
+
"ice-cream",
|
| 101 |
+
"get ice cream"
|
| 102 |
+
]
|
| 103 |
+
},
|
| 104 |
+
{
|
| 105 |
+
"name": "LOLLIPOP",
|
| 106 |
+
"samples": [
|
| 107 |
+
"lollipop",
|
| 108 |
+
"lolly",
|
| 109 |
+
"get lollipop"
|
| 110 |
+
]
|
| 111 |
+
},
|
| 112 |
+
{
|
| 113 |
+
"name": "STOP",
|
| 114 |
+
"samples": [
|
| 115 |
+
"stop",
|
| 116 |
+
"let's end here"
|
| 117 |
+
]
|
| 118 |
+
},
|
| 119 |
+
{
|
| 120 |
+
"name": "other",
|
| 121 |
+
"samples": [
|
| 122 |
+
"I don't know"
|
| 123 |
+
]
|
| 124 |
+
}
|
| 125 |
+
],
|
| 126 |
+
"global_intent": [
|
| 127 |
+
{
|
| 128 |
+
"name": "STOP",
|
| 129 |
+
"samples": [
|
| 130 |
+
"stop",
|
| 131 |
+
"let's end here"
|
| 132 |
+
]
|
| 133 |
+
},
|
| 134 |
+
{
|
| 135 |
+
"name": "other",
|
| 136 |
+
"samples": [
|
| 137 |
+
"I don't know"
|
| 138 |
+
]
|
| 139 |
+
}
|
| 140 |
+
]
|
| 141 |
+
},
|
| 142 |
+
"workflow": [
|
| 143 |
+
{
|
| 144 |
+
"type": "Play",
|
| 145 |
+
"node_name": "opening_background_music",
|
| 146 |
+
"audio_url_candidates": [
|
| 147 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/local/from-admin/1734534796395.mp3"
|
| 148 |
+
],
|
| 149 |
+
"next_node": "market_ambiance",
|
| 150 |
+
"requires_response": false
|
| 151 |
+
},
|
| 152 |
+
{
|
| 153 |
+
"type": "Play",
|
| 154 |
+
"node_name": "market_ambiance",
|
| 155 |
+
"audio_url_candidates": [
|
| 156 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/Market ambiance.mp3"
|
| 157 |
+
],
|
| 158 |
+
"next_node": "opening_narration",
|
| 159 |
+
"requires_response": false
|
| 160 |
+
},
|
| 161 |
+
{
|
| 162 |
+
"type": "text_read_along",
|
| 163 |
+
"node_name": "opening_narration",
|
| 164 |
+
"text_candidates": [
|
| 165 |
+
"Today, Peppa and her family are visiting the market! There are so many stalls to explore. Let's help Peppa find some yummy things to buy!"
|
| 166 |
+
],
|
| 167 |
+
"next_node": "peppa_greeting",
|
| 168 |
+
"additional_info": {
|
| 169 |
+
"speaker": "Narrator"
|
| 170 |
+
},
|
| 171 |
+
"requires_response": false,
|
| 172 |
+
"audio_url_candidates": [
|
| 173 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/opening_narration.mp3"
|
| 174 |
+
]
|
| 175 |
+
},
|
| 176 |
+
{
|
| 177 |
+
"type": "text_read_along",
|
| 178 |
+
"node_name": "peppa_greeting",
|
| 179 |
+
"text_candidates": [
|
| 180 |
+
"Look at all the stalls! I want to pick something fun. Can you help me?"
|
| 181 |
+
],
|
| 182 |
+
"next_node": "stall_selection",
|
| 183 |
+
"additional_info": {
|
| 184 |
+
"speaker": "Peppa"
|
| 185 |
+
},
|
| 186 |
+
"requires_response": false,
|
| 187 |
+
"audio_url_candidates": [
|
| 188 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/peppa_greeting.mp3"
|
| 189 |
+
]
|
| 190 |
+
},
|
| 191 |
+
{
|
| 192 |
+
"type": "text_read_along",
|
| 193 |
+
"node_name": "stall_selection",
|
| 194 |
+
"text_candidates": [
|
| 195 |
+
"Which stall should Peppa visit first? The cheese stall, the fruit stall, or the toy stall?"
|
| 196 |
+
],
|
| 197 |
+
"next_node": "stall_menu",
|
| 198 |
+
"additional_info": {
|
| 199 |
+
"speaker": "Narrator"
|
| 200 |
+
},
|
| 201 |
+
"requires_response": false,
|
| 202 |
+
"audio_url_candidates": [
|
| 203 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/stall_selection.mp3"
|
| 204 |
+
]
|
| 205 |
+
},
|
| 206 |
+
{
|
| 207 |
+
"type": "Text_Choice",
|
| 208 |
+
"node_name": "stall_menu",
|
| 209 |
+
"text_choices": [
|
| 210 |
+
"Cheese",
|
| 211 |
+
"Fruit",
|
| 212 |
+
"Toy"
|
| 213 |
+
],
|
| 214 |
+
"next_node": "stall_intent"
|
| 215 |
+
},
|
| 216 |
+
{
|
| 217 |
+
"type": "Intent",
|
| 218 |
+
"node_name": "stall_intent",
|
| 219 |
+
"content": [
|
| 220 |
+
"CHEESE_STALL",
|
| 221 |
+
"FRUIT_STALL",
|
| 222 |
+
"TOY_STALL",
|
| 223 |
+
"STOP"
|
| 224 |
+
],
|
| 225 |
+
"next_node": {
|
| 226 |
+
"CHEESE_STALL": "cheese_stall_response",
|
| 227 |
+
"FRUIT_STALL": "fruit_stall_response",
|
| 228 |
+
"TOY_STALL": "toy_stall_response",
|
| 229 |
+
"STOP": "closing_narration_premature",
|
| 230 |
+
"other": "other_0"
|
| 231 |
+
}
|
| 232 |
+
},
|
| 233 |
+
{
|
| 234 |
+
"type": "text_read_along",
|
| 235 |
+
"node_name": "cheese_stall_response",
|
| 236 |
+
"text_candidates": [
|
| 237 |
+
"Mmm, I love cheese! Let's find one for Mummy Pig."
|
| 238 |
+
],
|
| 239 |
+
"next_node": "vendor_sounds_cheese",
|
| 240 |
+
"additional_info": {
|
| 241 |
+
"speaker": "Peppa"
|
| 242 |
+
},
|
| 243 |
+
"requires_response": false,
|
| 244 |
+
"audio_url_candidates": [
|
| 245 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/cheese_stall_response.mp3"
|
| 246 |
+
]
|
| 247 |
+
},
|
| 248 |
+
{
|
| 249 |
+
"type": "Play",
|
| 250 |
+
"node_name": "vendor_sounds_cheese",
|
| 251 |
+
"audio_url_candidates": [
|
| 252 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/Vendor greetings, rustling bags, and items being placed in basket.mp3"
|
| 253 |
+
],
|
| 254 |
+
"next_node": "peppa_cheese_selection",
|
| 255 |
+
"requires_response": false
|
| 256 |
+
},
|
| 257 |
+
{
|
| 258 |
+
"type": "text_read_along",
|
| 259 |
+
"node_name": "peppa_cheese_selection",
|
| 260 |
+
"text_candidates": [
|
| 261 |
+
"Let\u2019s pick something from this stall!"
|
| 262 |
+
],
|
| 263 |
+
"next_node": "cheese_selection",
|
| 264 |
+
"additional_info": {
|
| 265 |
+
"speaker": "Peppa"
|
| 266 |
+
},
|
| 267 |
+
"requires_response": false,
|
| 268 |
+
"audio_url_candidates": [
|
| 269 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/cheese_selection.mp3"
|
| 270 |
+
]
|
| 271 |
+
},
|
| 272 |
+
{
|
| 273 |
+
"type": "text_read_along",
|
| 274 |
+
"node_name": "cheese_selection",
|
| 275 |
+
"text_candidates": [
|
| 276 |
+
"What should Peppa choose, smelly cheese or creamy cheese? Help her pick one item!"
|
| 277 |
+
],
|
| 278 |
+
"next_node": "cheese_menu",
|
| 279 |
+
"additional_info": {
|
| 280 |
+
"speaker": "Narrator"
|
| 281 |
+
},
|
| 282 |
+
"requires_response": false,
|
| 283 |
+
"audio_url_candidates": [
|
| 284 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/cheese_selection.mp3"
|
| 285 |
+
]
|
| 286 |
+
},
|
| 287 |
+
{
|
| 288 |
+
"type": "Text_Choice",
|
| 289 |
+
"node_name": "cheese_menu",
|
| 290 |
+
"text_choices": [
|
| 291 |
+
"Smelly Cheese",
|
| 292 |
+
"Creamy Cheese"
|
| 293 |
+
],
|
| 294 |
+
"next_node": "cheese_choice_intent"
|
| 295 |
+
},
|
| 296 |
+
{
|
| 297 |
+
"type": "Intent",
|
| 298 |
+
"node_name": "cheese_choice_intent",
|
| 299 |
+
"content": [
|
| 300 |
+
"SMELLY_CHEESE",
|
| 301 |
+
"CREAMY_CHEESE",
|
| 302 |
+
"STOP"
|
| 303 |
+
],
|
| 304 |
+
"next_node": {
|
| 305 |
+
"SMELLY_CHEESE": "smelly_cheese_response",
|
| 306 |
+
"CREAMY_CHEESE": "creamy_cheese_response",
|
| 307 |
+
"STOP": "closing_narration_premature",
|
| 308 |
+
"other": "other_1"
|
| 309 |
+
}
|
| 310 |
+
},
|
| 311 |
+
{
|
| 312 |
+
"type": "text_read_along",
|
| 313 |
+
"node_name": "fruit_stall_response",
|
| 314 |
+
"text_candidates": [
|
| 315 |
+
"Yummy, I love fruit! Let's find something juicy."
|
| 316 |
+
],
|
| 317 |
+
"next_node": "vendor_sounds_fruit",
|
| 318 |
+
"additional_info": {
|
| 319 |
+
"speaker": "Peppa"
|
| 320 |
+
},
|
| 321 |
+
"requires_response": false,
|
| 322 |
+
"audio_url_candidates": [
|
| 323 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/fruit_stall_response.mp3"
|
| 324 |
+
]
|
| 325 |
+
},
|
| 326 |
+
{
|
| 327 |
+
"type": "Play",
|
| 328 |
+
"node_name": "vendor_sounds_fruit",
|
| 329 |
+
"audio_url_candidates": [
|
| 330 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/Vendor greetings, rustling bags, and items being placed in basket.mp3"
|
| 331 |
+
],
|
| 332 |
+
"next_node": "peppa_fruit_selection",
|
| 333 |
+
"requires_response": false
|
| 334 |
+
},
|
| 335 |
+
{
|
| 336 |
+
"type": "text_read_along",
|
| 337 |
+
"node_name": "peppa_fruit_selection",
|
| 338 |
+
"text_candidates": [
|
| 339 |
+
"Let\u2019s pick something from this stall!"
|
| 340 |
+
],
|
| 341 |
+
"next_node": "fruit_selection",
|
| 342 |
+
"additional_info": {
|
| 343 |
+
"speaker": "Peppa"
|
| 344 |
+
},
|
| 345 |
+
"requires_response": false,
|
| 346 |
+
"audio_url_candidates": [
|
| 347 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/narrator_toy_selection.mp3"
|
| 348 |
+
]
|
| 349 |
+
},
|
| 350 |
+
{
|
| 351 |
+
"type": "text_read_along",
|
| 352 |
+
"node_name": "fruit_selection",
|
| 353 |
+
"text_candidates": [
|
| 354 |
+
"What should Peppa choose, apples or bananas? Help her pick one item!"
|
| 355 |
+
],
|
| 356 |
+
"next_node": "fruit_menu",
|
| 357 |
+
"additional_info": {
|
| 358 |
+
"speaker": "Narrator"
|
| 359 |
+
},
|
| 360 |
+
"requires_response": false,
|
| 361 |
+
"audio_url_candidates": [
|
| 362 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/fruit_selection.mp3"
|
| 363 |
+
]
|
| 364 |
+
},
|
| 365 |
+
{
|
| 366 |
+
"type": "Text_Choice",
|
| 367 |
+
"node_name": "fruit_menu",
|
| 368 |
+
"text_choices": [
|
| 369 |
+
"Apples",
|
| 370 |
+
"Bananas"
|
| 371 |
+
],
|
| 372 |
+
"next_node": "fruit_choice_intent"
|
| 373 |
+
},
|
| 374 |
+
{
|
| 375 |
+
"type": "Intent",
|
| 376 |
+
"node_name": "fruit_choice_intent",
|
| 377 |
+
"content": [
|
| 378 |
+
"APPLES",
|
| 379 |
+
"BANANAS",
|
| 380 |
+
"STOP"
|
| 381 |
+
],
|
| 382 |
+
"next_node": {
|
| 383 |
+
"APPLES": "apples_response",
|
| 384 |
+
"BANANAS": "bananas_response",
|
| 385 |
+
"STOP": "closing_narration_premature",
|
| 386 |
+
"other": "other_2"
|
| 387 |
+
}
|
| 388 |
+
},
|
| 389 |
+
{
|
| 390 |
+
"type": "text_read_along",
|
| 391 |
+
"node_name": "toy_stall_response",
|
| 392 |
+
"text_candidates": [
|
| 393 |
+
"Oh look, a toy stall! What fun things can we find here?"
|
| 394 |
+
],
|
| 395 |
+
"next_node": "vendor_sounds_toy",
|
| 396 |
+
"additional_info": {
|
| 397 |
+
"speaker": "Peppa"
|
| 398 |
+
},
|
| 399 |
+
"requires_response": false,
|
| 400 |
+
"audio_url_candidates": [
|
| 401 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/toy_stall_response.mp3"
|
| 402 |
+
]
|
| 403 |
+
},
|
| 404 |
+
{
|
| 405 |
+
"type": "Play",
|
| 406 |
+
"node_name": "vendor_sounds_toy",
|
| 407 |
+
"audio_url_candidates": [
|
| 408 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/Vendor greetings, rustling bags, and items being placed in basket.mp3"
|
| 409 |
+
],
|
| 410 |
+
"next_node": "peppa_toy_selection",
|
| 411 |
+
"requires_response": false
|
| 412 |
+
},
|
| 413 |
+
{
|
| 414 |
+
"type": "text_read_along",
|
| 415 |
+
"node_name": "peppa_toy_selection",
|
| 416 |
+
"text_candidates": [
|
| 417 |
+
"Let\u2019s pick something from this stall!"
|
| 418 |
+
],
|
| 419 |
+
"next_node": "toy_selection",
|
| 420 |
+
"additional_info": {
|
| 421 |
+
"speaker": "Peppa"
|
| 422 |
+
},
|
| 423 |
+
"requires_response": false,
|
| 424 |
+
"audio_url_candidates": [
|
| 425 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/narrator_toy_selection.mp3"
|
| 426 |
+
]
|
| 427 |
+
},
|
| 428 |
+
{
|
| 429 |
+
"type": "text_read_along",
|
| 430 |
+
"node_name": "toy_selection",
|
| 431 |
+
"text_candidates": [
|
| 432 |
+
"What should Peppa choose, a Teddy Bear of a singing fish toy? Help her pick one item!"
|
| 433 |
+
],
|
| 434 |
+
"next_node": "toy_menu",
|
| 435 |
+
"additional_info": {
|
| 436 |
+
"speaker": "Narrator"
|
| 437 |
+
},
|
| 438 |
+
"requires_response": false,
|
| 439 |
+
"audio_url_candidates": [
|
| 440 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/toy_selection.mp3"
|
| 441 |
+
]
|
| 442 |
+
},
|
| 443 |
+
{
|
| 444 |
+
"type": "Text_Choice",
|
| 445 |
+
"node_name": "toy_menu",
|
| 446 |
+
"text_choices": [
|
| 447 |
+
"Teddy Bear",
|
| 448 |
+
"Singing Fish"
|
| 449 |
+
],
|
| 450 |
+
"next_node": "toy_choice_intent"
|
| 451 |
+
},
|
| 452 |
+
{
|
| 453 |
+
"type": "Intent",
|
| 454 |
+
"node_name": "toy_choice_intent",
|
| 455 |
+
"content": [
|
| 456 |
+
"TEDDY_BEAR",
|
| 457 |
+
"SINGING_FISH",
|
| 458 |
+
"STOP"
|
| 459 |
+
],
|
| 460 |
+
"next_node": {
|
| 461 |
+
"TEDDY_BEAR": "teddy_response",
|
| 462 |
+
"SINGING_FISH": "fish_response",
|
| 463 |
+
"STOP": "closing_narration_premature",
|
| 464 |
+
"other": "other_3"
|
| 465 |
+
}
|
| 466 |
+
},
|
| 467 |
+
{
|
| 468 |
+
"type": "text_read_along",
|
| 469 |
+
"node_name": "smelly_cheese_response",
|
| 470 |
+
"text_candidates": [
|
| 471 |
+
"This one smells funny! Mummy Pig will love it!"
|
| 472 |
+
],
|
| 473 |
+
"next_node": "sound_basket",
|
| 474 |
+
"additional_info": {
|
| 475 |
+
"speaker": "Peppa"
|
| 476 |
+
},
|
| 477 |
+
"requires_response": false,
|
| 478 |
+
"audio_url_candidates": [
|
| 479 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/smelly_cheese_response.mp3"
|
| 480 |
+
]
|
| 481 |
+
},
|
| 482 |
+
{
|
| 483 |
+
"type": "text_read_along",
|
| 484 |
+
"node_name": "creamy_cheese_response",
|
| 485 |
+
"text_candidates": [
|
| 486 |
+
"So smooth! I think Daddy Pig will like this one."
|
| 487 |
+
],
|
| 488 |
+
"next_node": "sound_basket",
|
| 489 |
+
"additional_info": {
|
| 490 |
+
"speaker": "Peppa"
|
| 491 |
+
},
|
| 492 |
+
"requires_response": false,
|
| 493 |
+
"audio_url_candidates": [
|
| 494 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/creamy_cheese_response.mp3"
|
| 495 |
+
]
|
| 496 |
+
},
|
| 497 |
+
{
|
| 498 |
+
"type": "text_read_along",
|
| 499 |
+
"node_name": "apples_response",
|
| 500 |
+
"text_candidates": [
|
| 501 |
+
"Crunchy and sweet! Apples are my favorite!"
|
| 502 |
+
],
|
| 503 |
+
"next_node": "sound_basket",
|
| 504 |
+
"additional_info": {
|
| 505 |
+
"speaker": "Peppa"
|
| 506 |
+
},
|
| 507 |
+
"requires_response": false,
|
| 508 |
+
"audio_url_candidates": [
|
| 509 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/apples_response.mp3"
|
| 510 |
+
]
|
| 511 |
+
},
|
| 512 |
+
{
|
| 513 |
+
"type": "text_read_along",
|
| 514 |
+
"node_name": "bananas_response",
|
| 515 |
+
"text_candidates": [
|
| 516 |
+
"Bananas are so yummy! George will love these!"
|
| 517 |
+
],
|
| 518 |
+
"next_node": "sound_basket",
|
| 519 |
+
"additional_info": {
|
| 520 |
+
"speaker": "Peppa"
|
| 521 |
+
},
|
| 522 |
+
"requires_response": false,
|
| 523 |
+
"audio_url_candidates": [
|
| 524 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/bananas_response.mp3"
|
| 525 |
+
]
|
| 526 |
+
},
|
| 527 |
+
{
|
| 528 |
+
"type": "text_read_along",
|
| 529 |
+
"node_name": "teddy_response",
|
| 530 |
+
"text_candidates": [
|
| 531 |
+
"It's so cuddly! I'll name it Mr. Snuggles."
|
| 532 |
+
],
|
| 533 |
+
"next_node": "sound_basket",
|
| 534 |
+
"additional_info": {
|
| 535 |
+
"speaker": "Peppa"
|
| 536 |
+
},
|
| 537 |
+
"requires_response": false,
|
| 538 |
+
"audio_url_candidates": [
|
| 539 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/teddy_response.mp3"
|
| 540 |
+
]
|
| 541 |
+
},
|
| 542 |
+
{
|
| 543 |
+
"type": "text_read_along",
|
| 544 |
+
"node_name": "fish_response",
|
| 545 |
+
"text_candidates": [
|
| 546 |
+
"Hehe, this fish is so silly! It sings a funny song!"
|
| 547 |
+
],
|
| 548 |
+
"next_node": "sound_basket",
|
| 549 |
+
"additional_info": {
|
| 550 |
+
"speaker": "Peppa"
|
| 551 |
+
},
|
| 552 |
+
"requires_response": false,
|
| 553 |
+
"audio_url_candidates": [
|
| 554 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/fish_response.mp3"
|
| 555 |
+
]
|
| 556 |
+
},
|
| 557 |
+
{
|
| 558 |
+
"type": "Play",
|
| 559 |
+
"node_name": "sound_basket",
|
| 560 |
+
"audio_url_candidates": [
|
| 561 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/Rustling bags, clinking items.mp3"
|
| 562 |
+
],
|
| 563 |
+
"next_node": "basket_prompt",
|
| 564 |
+
"requires_response": false
|
| 565 |
+
},
|
| 566 |
+
{
|
| 567 |
+
"type": "text_read_along",
|
| 568 |
+
"node_name": "basket_prompt",
|
| 569 |
+
"text_candidates": [
|
| 570 |
+
"Oh no! My basket is getting so heavy, and everything is mixed up! Let's tidy it up so I can carry it better!"
|
| 571 |
+
],
|
| 572 |
+
"next_node": "sorting_prompt",
|
| 573 |
+
"additional_info": {
|
| 574 |
+
"speaker": "Peppa"
|
| 575 |
+
},
|
| 576 |
+
"requires_response": false,
|
| 577 |
+
"audio_url_candidates": [
|
| 578 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/basket_prompt.mp3"
|
| 579 |
+
]
|
| 580 |
+
},
|
| 581 |
+
{
|
| 582 |
+
"type": "text_read_along",
|
| 583 |
+
"node_name": "sorting_prompt",
|
| 584 |
+
"text_candidates": [
|
| 585 |
+
"Help Peppa sort her items! Put the food in one pile and the toys in another. Which should go first?"
|
| 586 |
+
],
|
| 587 |
+
"next_node": "sorting_menu",
|
| 588 |
+
"additional_info": {
|
| 589 |
+
"speaker": "Narrator"
|
| 590 |
+
},
|
| 591 |
+
"requires_response": false,
|
| 592 |
+
"audio_url_candidates": [
|
| 593 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/sorting_prompt.mp3"
|
| 594 |
+
]
|
| 595 |
+
},
|
| 596 |
+
{
|
| 597 |
+
"type": "Text_Choice",
|
| 598 |
+
"node_name": "sorting_menu",
|
| 599 |
+
"text_choices": [
|
| 600 |
+
"Food",
|
| 601 |
+
"Toys"
|
| 602 |
+
],
|
| 603 |
+
"next_node": "sorting_intent"
|
| 604 |
+
},
|
| 605 |
+
{
|
| 606 |
+
"type": "Intent",
|
| 607 |
+
"node_name": "sorting_intent",
|
| 608 |
+
"content": [
|
| 609 |
+
"FOOD",
|
| 610 |
+
"TOYS",
|
| 611 |
+
"STOP"
|
| 612 |
+
],
|
| 613 |
+
"next_node": {
|
| 614 |
+
"FOOD": "food_sorting_response",
|
| 615 |
+
"TOYS": "toys_sorting_response",
|
| 616 |
+
"STOP": "closing_narration_premature",
|
| 617 |
+
"other": "other_4"
|
| 618 |
+
}
|
| 619 |
+
},
|
| 620 |
+
{
|
| 621 |
+
"type": "text_read_along",
|
| 622 |
+
"node_name": "food_sorting_response",
|
| 623 |
+
"text_candidates": [
|
| 624 |
+
"Great choice! Let's put the apples and cheese together. All neat and tidy!"
|
| 625 |
+
],
|
| 626 |
+
"next_node": "organization_thanks",
|
| 627 |
+
"additional_info": {
|
| 628 |
+
"speaker": "Peppa"
|
| 629 |
+
},
|
| 630 |
+
"requires_response": false,
|
| 631 |
+
"audio_url_candidates": [
|
| 632 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/food_sorting_response.mp3"
|
| 633 |
+
]
|
| 634 |
+
},
|
| 635 |
+
{
|
| 636 |
+
"type": "text_read_along",
|
| 637 |
+
"node_name": "toys_sorting_response",
|
| 638 |
+
"text_candidates": [
|
| 639 |
+
"Good idea! Let's put the teddy bear and singing fish in their own spot!"
|
| 640 |
+
],
|
| 641 |
+
"next_node": "organization_thanks",
|
| 642 |
+
"additional_info": {
|
| 643 |
+
"speaker": "Peppa"
|
| 644 |
+
},
|
| 645 |
+
"requires_response": false,
|
| 646 |
+
"audio_url_candidates": [
|
| 647 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/toys_sorting_response.mp3"
|
| 648 |
+
]
|
| 649 |
+
},
|
| 650 |
+
{
|
| 651 |
+
"type": "text_read_along",
|
| 652 |
+
"node_name": "organization_thanks",
|
| 653 |
+
"text_candidates": [
|
| 654 |
+
"Thank you for helping me organize my basket! Now it's so much easier to carry."
|
| 655 |
+
],
|
| 656 |
+
"next_node": "treat_sounds",
|
| 657 |
+
"additional_info": {
|
| 658 |
+
"speaker": "Peppa"
|
| 659 |
+
},
|
| 660 |
+
"requires_response": false,
|
| 661 |
+
"audio_url_candidates": [
|
| 662 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/organization_thanks.mp3"
|
| 663 |
+
]
|
| 664 |
+
},
|
| 665 |
+
{
|
| 666 |
+
"type": "Play",
|
| 667 |
+
"node_name": "treat_sounds",
|
| 668 |
+
"audio_url_candidates": [
|
| 669 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/Rustling bags, cheerful chatter.mp3"
|
| 670 |
+
],
|
| 671 |
+
"next_node": "treat_prompt",
|
| 672 |
+
"requires_response": false
|
| 673 |
+
},
|
| 674 |
+
{
|
| 675 |
+
"type": "text_read_along",
|
| 676 |
+
"node_name": "treat_prompt",
|
| 677 |
+
"text_candidates": [
|
| 678 |
+
"Before we leave, I want to get a treat! What should I choose?"
|
| 679 |
+
],
|
| 680 |
+
"next_node": "treat_selection_prompt",
|
| 681 |
+
"additional_info": {
|
| 682 |
+
"speaker": "Peppa"
|
| 683 |
+
},
|
| 684 |
+
"requires_response": false,
|
| 685 |
+
"audio_url_candidates": [
|
| 686 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/treat_prompt.mp3"
|
| 687 |
+
]
|
| 688 |
+
},
|
| 689 |
+
{
|
| 690 |
+
"type": "text_read_along",
|
| 691 |
+
"node_name": "treat_selection_prompt",
|
| 692 |
+
"text_candidates": [
|
| 693 |
+
"Help Peppa pick a treat. Should she get an ice cream or a lollipop?"
|
| 694 |
+
],
|
| 695 |
+
"next_node": "treat_menu",
|
| 696 |
+
"additional_info": {
|
| 697 |
+
"speaker": "Narrator"
|
| 698 |
+
},
|
| 699 |
+
"requires_response": false,
|
| 700 |
+
"audio_url_candidates": [
|
| 701 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/treat_selection_prompt.mp3"
|
| 702 |
+
]
|
| 703 |
+
},
|
| 704 |
+
{
|
| 705 |
+
"type": "Text_Choice",
|
| 706 |
+
"node_name": "treat_menu",
|
| 707 |
+
"text_choices": [
|
| 708 |
+
"Ice Cream",
|
| 709 |
+
"Lollipop"
|
| 710 |
+
],
|
| 711 |
+
"next_node": "treat_intent"
|
| 712 |
+
},
|
| 713 |
+
{
|
| 714 |
+
"type": "Intent",
|
| 715 |
+
"node_name": "treat_intent",
|
| 716 |
+
"content": [
|
| 717 |
+
"ICE_CREAM",
|
| 718 |
+
"LOLLIPOP",
|
| 719 |
+
"STOP"
|
| 720 |
+
],
|
| 721 |
+
"next_node": {
|
| 722 |
+
"ICE_CREAM": "ice_cream_response",
|
| 723 |
+
"LOLLIPOP": "lollipop_response",
|
| 724 |
+
"STOP": "closing_narration_premature",
|
| 725 |
+
"other": "other_5"
|
| 726 |
+
}
|
| 727 |
+
},
|
| 728 |
+
{
|
| 729 |
+
"type": "text_read_along",
|
| 730 |
+
"node_name": "ice_cream_response",
|
| 731 |
+
"text_candidates": [
|
| 732 |
+
"Yummy! Ice cream is the best on a sunny day!"
|
| 733 |
+
],
|
| 734 |
+
"next_node": "closing_narration",
|
| 735 |
+
"additional_info": {
|
| 736 |
+
"speaker": "Peppa"
|
| 737 |
+
},
|
| 738 |
+
"requires_response": false,
|
| 739 |
+
"audio_url_candidates": [
|
| 740 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/ice_cream_response.mp3"
|
| 741 |
+
]
|
| 742 |
+
},
|
| 743 |
+
{
|
| 744 |
+
"type": "text_read_along",
|
| 745 |
+
"node_name": "lollipop_response",
|
| 746 |
+
"text_candidates": [
|
| 747 |
+
"This lollipop is so colorful! I'll save it for later."
|
| 748 |
+
],
|
| 749 |
+
"next_node": "closing_narration",
|
| 750 |
+
"additional_info": {
|
| 751 |
+
"speaker": "Peppa"
|
| 752 |
+
},
|
| 753 |
+
"requires_response": false,
|
| 754 |
+
"audio_url_candidates": [
|
| 755 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/lollipop_response.mp3"
|
| 756 |
+
]
|
| 757 |
+
},
|
| 758 |
+
{
|
| 759 |
+
"type": "text_read_along",
|
| 760 |
+
"node_name": "closing_narration",
|
| 761 |
+
"text_candidates": [
|
| 762 |
+
"Peppa had a wonderful time at the market! Thanks to your help, she found some great things to take home."
|
| 763 |
+
],
|
| 764 |
+
"next_node": "final_goodbye",
|
| 765 |
+
"additional_info": {
|
| 766 |
+
"speaker": "Narrator"
|
| 767 |
+
},
|
| 768 |
+
"requires_response": false,
|
| 769 |
+
"audio_url_candidates": [
|
| 770 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/closing_narration.mp3"
|
| 771 |
+
]
|
| 772 |
+
},
|
| 773 |
+
{
|
| 774 |
+
"type": "text_read_along",
|
| 775 |
+
"node_name": "final_goodbye",
|
| 776 |
+
"text_candidates": [
|
| 777 |
+
"Markets are so much fun! Let's go shopping again soon!"
|
| 778 |
+
],
|
| 779 |
+
"next_node": "peppa_giggle",
|
| 780 |
+
"additional_info": {
|
| 781 |
+
"speaker": "Peppa"
|
| 782 |
+
},
|
| 783 |
+
"requires_response": false,
|
| 784 |
+
"audio_url_candidates": [
|
| 785 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/final_goodbye.mp3"
|
| 786 |
+
]
|
| 787 |
+
},
|
| 788 |
+
{
|
| 789 |
+
"type": "Play",
|
| 790 |
+
"node_name": "peppa_giggle",
|
| 791 |
+
"audio_url_candidates": [
|
| 792 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/peppa_giggle.mp3"
|
| 793 |
+
],
|
| 794 |
+
"next_node": "final_chime",
|
| 795 |
+
"requires_response": false
|
| 796 |
+
},
|
| 797 |
+
{
|
| 798 |
+
"type": "Play",
|
| 799 |
+
"node_name": "final_chime",
|
| 800 |
+
"audio_url_candidates": [
|
| 801 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/cheerful chime.mp3"
|
| 802 |
+
],
|
| 803 |
+
"next_node": "end",
|
| 804 |
+
"requires_response": false
|
| 805 |
+
},
|
| 806 |
+
{
|
| 807 |
+
"type": "text_read_along",
|
| 808 |
+
"node_name": "closing_narration_premature",
|
| 809 |
+
"text_candidates": [
|
| 810 |
+
"Let's go shopping again another time! Bye-bye!"
|
| 811 |
+
],
|
| 812 |
+
"next_node": "end",
|
| 813 |
+
"additional_info": {
|
| 814 |
+
"speaker": "Peppa"
|
| 815 |
+
},
|
| 816 |
+
"requires_response": false,
|
| 817 |
+
"audio_url_candidates": [
|
| 818 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/closing_narration_premature.mp3"
|
| 819 |
+
]
|
| 820 |
+
},
|
| 821 |
+
{
|
| 822 |
+
"type": "text_read_along",
|
| 823 |
+
"node_name": "other_0",
|
| 824 |
+
"text_candidates": [
|
| 825 |
+
"Not quite! Try again!",
|
| 826 |
+
"Give it another go!",
|
| 827 |
+
"Let's try that one more time!",
|
| 828 |
+
"Try again. I know you can do it!",
|
| 829 |
+
"Let's give it another try!"
|
| 830 |
+
],
|
| 831 |
+
"next_node": "stall_selection",
|
| 832 |
+
"requires_response": false,
|
| 833 |
+
"additional_info": {
|
| 834 |
+
"speaker": "Narrator"
|
| 835 |
+
},
|
| 836 |
+
"audio_url_candidates": [
|
| 837 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Not-quite!.wav",
|
| 838 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Give-it-an.wav",
|
| 839 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-try.wav",
|
| 840 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Try-again..wav",
|
| 841 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-give.wav"
|
| 842 |
+
]
|
| 843 |
+
},
|
| 844 |
+
{
|
| 845 |
+
"type": "text_read_along",
|
| 846 |
+
"node_name": "other_1",
|
| 847 |
+
"text_candidates": [
|
| 848 |
+
"Not quite! Try again!",
|
| 849 |
+
"Give it another go!",
|
| 850 |
+
"Let's try that one more time!",
|
| 851 |
+
"Try again. I know you can do it!",
|
| 852 |
+
"Let's give it another try!"
|
| 853 |
+
],
|
| 854 |
+
"next_node": "cheese_selection",
|
| 855 |
+
"requires_response": false,
|
| 856 |
+
"additional_info": {
|
| 857 |
+
"speaker": "Narrator"
|
| 858 |
+
},
|
| 859 |
+
"audio_url_candidates": [
|
| 860 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Not-quite!.wav",
|
| 861 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Give-it-an.wav",
|
| 862 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-try.wav",
|
| 863 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Try-again..wav",
|
| 864 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-give.wav"
|
| 865 |
+
]
|
| 866 |
+
},
|
| 867 |
+
{
|
| 868 |
+
"type": "text_read_along",
|
| 869 |
+
"node_name": "other_2",
|
| 870 |
+
"text_candidates": [
|
| 871 |
+
"Not quite! Try again!",
|
| 872 |
+
"Give it another go!",
|
| 873 |
+
"Let's try that one more time!",
|
| 874 |
+
"Try again. I know you can do it!",
|
| 875 |
+
"Let's give it another try!"
|
| 876 |
+
],
|
| 877 |
+
"next_node": "fruit_selection",
|
| 878 |
+
"requires_response": false,
|
| 879 |
+
"additional_info": {
|
| 880 |
+
"speaker": "Narrator"
|
| 881 |
+
},
|
| 882 |
+
"audio_url_candidates": [
|
| 883 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Not-quite!.wav",
|
| 884 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Give-it-an.wav",
|
| 885 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-try.wav",
|
| 886 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Try-again..wav",
|
| 887 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-give.wav"
|
| 888 |
+
]
|
| 889 |
+
},
|
| 890 |
+
{
|
| 891 |
+
"type": "text_read_along",
|
| 892 |
+
"node_name": "other_3",
|
| 893 |
+
"text_candidates": [
|
| 894 |
+
"Not quite! Try again!",
|
| 895 |
+
"Give it another go!",
|
| 896 |
+
"Let's try that one more time!",
|
| 897 |
+
"Try again. I know you can do it!",
|
| 898 |
+
"Let's give it another try!"
|
| 899 |
+
],
|
| 900 |
+
"next_node": "toy_selection",
|
| 901 |
+
"requires_response": false,
|
| 902 |
+
"additional_info": {
|
| 903 |
+
"speaker": "Narrator"
|
| 904 |
+
},
|
| 905 |
+
"audio_url_candidates": [
|
| 906 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Not-quite!.wav",
|
| 907 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Give-it-an.wav",
|
| 908 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-try.wav",
|
| 909 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Try-again..wav",
|
| 910 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-give.wav"
|
| 911 |
+
]
|
| 912 |
+
},
|
| 913 |
+
{
|
| 914 |
+
"type": "text_read_along",
|
| 915 |
+
"node_name": "other_4",
|
| 916 |
+
"text_candidates": [
|
| 917 |
+
"Not quite! Try again!",
|
| 918 |
+
"Give it another go!",
|
| 919 |
+
"Let's try that one more time!",
|
| 920 |
+
"Try again. I know you can do it!",
|
| 921 |
+
"Let's give it another try!"
|
| 922 |
+
],
|
| 923 |
+
"next_node": "sorting_prompt",
|
| 924 |
+
"requires_response": false,
|
| 925 |
+
"additional_info": {
|
| 926 |
+
"speaker": "Narrator"
|
| 927 |
+
},
|
| 928 |
+
"audio_url_candidates": [
|
| 929 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Not-quite!.wav",
|
| 930 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Give-it-an.wav",
|
| 931 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-try.wav",
|
| 932 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Try-again..wav",
|
| 933 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-give.wav"
|
| 934 |
+
]
|
| 935 |
+
},
|
| 936 |
+
{
|
| 937 |
+
"type": "text_read_along",
|
| 938 |
+
"node_name": "other_5",
|
| 939 |
+
"text_candidates": [
|
| 940 |
+
"Not quite! Try again!",
|
| 941 |
+
"Give it another go!",
|
| 942 |
+
"Let's try that one more time!",
|
| 943 |
+
"Try again. I know you can do it!",
|
| 944 |
+
"Let's give it another try!"
|
| 945 |
+
],
|
| 946 |
+
"next_node": "treat_selection_prompt",
|
| 947 |
+
"requires_response": false,
|
| 948 |
+
"additional_info": {
|
| 949 |
+
"speaker": "Narrator"
|
| 950 |
+
},
|
| 951 |
+
"audio_url_candidates": [
|
| 952 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Not-quite!.wav",
|
| 953 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Give-it-an.wav",
|
| 954 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-try.wav",
|
| 955 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Try-again..wav",
|
| 956 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/preset-audio/Narrator-Let's-give.wav"
|
| 957 |
+
]
|
| 958 |
+
}
|
| 959 |
+
]
|
| 960 |
+
}
|
config/mj_guided_second_old.json
ADDED
|
@@ -0,0 +1,858 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"intent_config": {
|
| 3 |
+
"rule_based": true,
|
| 4 |
+
"vector": false,
|
| 5 |
+
"llm": true,
|
| 6 |
+
"intent": [
|
| 7 |
+
{
|
| 8 |
+
"name": "CHEESE_STALL",
|
| 9 |
+
"samples": [
|
| 10 |
+
"cheese",
|
| 11 |
+
"cheese stall",
|
| 12 |
+
"visit cheese stall"
|
| 13 |
+
]
|
| 14 |
+
},
|
| 15 |
+
{
|
| 16 |
+
"name": "FRUIT_STALL",
|
| 17 |
+
"samples": [
|
| 18 |
+
"fruit",
|
| 19 |
+
"fruit stall",
|
| 20 |
+
"visit fruit stall"
|
| 21 |
+
]
|
| 22 |
+
},
|
| 23 |
+
{
|
| 24 |
+
"name": "TOY_STALL",
|
| 25 |
+
"samples": [
|
| 26 |
+
"toy",
|
| 27 |
+
"toy stall",
|
| 28 |
+
"visit toy stall"
|
| 29 |
+
]
|
| 30 |
+
},
|
| 31 |
+
{
|
| 32 |
+
"name": "SMELLY_CHEESE",
|
| 33 |
+
"samples": [
|
| 34 |
+
"smelly cheese",
|
| 35 |
+
"strong cheese",
|
| 36 |
+
"funny smelling cheese"
|
| 37 |
+
]
|
| 38 |
+
},
|
| 39 |
+
{
|
| 40 |
+
"name": "CREAMY_CHEESE",
|
| 41 |
+
"samples": [
|
| 42 |
+
"creamy cheese",
|
| 43 |
+
"smooth cheese",
|
| 44 |
+
"soft cheese"
|
| 45 |
+
]
|
| 46 |
+
},
|
| 47 |
+
{
|
| 48 |
+
"name": "APPLES",
|
| 49 |
+
"samples": [
|
| 50 |
+
"apples",
|
| 51 |
+
"apple",
|
| 52 |
+
"get apples"
|
| 53 |
+
]
|
| 54 |
+
},
|
| 55 |
+
{
|
| 56 |
+
"name": "BANANAS",
|
| 57 |
+
"samples": [
|
| 58 |
+
"bananas",
|
| 59 |
+
"banana",
|
| 60 |
+
"get bananas"
|
| 61 |
+
]
|
| 62 |
+
},
|
| 63 |
+
{
|
| 64 |
+
"name": "TEDDY_BEAR",
|
| 65 |
+
"samples": [
|
| 66 |
+
"teddy",
|
| 67 |
+
"teddy bear",
|
| 68 |
+
"bear",
|
| 69 |
+
"mr snuggles"
|
| 70 |
+
]
|
| 71 |
+
},
|
| 72 |
+
{
|
| 73 |
+
"name": "SINGING_FISH",
|
| 74 |
+
"samples": [
|
| 75 |
+
"fish",
|
| 76 |
+
"singing fish",
|
| 77 |
+
"silly fish"
|
| 78 |
+
]
|
| 79 |
+
},
|
| 80 |
+
{
|
| 81 |
+
"name": "FOOD",
|
| 82 |
+
"samples": [
|
| 83 |
+
"food",
|
| 84 |
+
"food pile",
|
| 85 |
+
"sort food"
|
| 86 |
+
]
|
| 87 |
+
},
|
| 88 |
+
{
|
| 89 |
+
"name": "TOYS",
|
| 90 |
+
"samples": [
|
| 91 |
+
"toys",
|
| 92 |
+
"toy pile",
|
| 93 |
+
"sort toys"
|
| 94 |
+
]
|
| 95 |
+
},
|
| 96 |
+
{
|
| 97 |
+
"name": "ICE_CREAM",
|
| 98 |
+
"samples": [
|
| 99 |
+
"ice cream",
|
| 100 |
+
"ice-cream",
|
| 101 |
+
"get ice cream"
|
| 102 |
+
]
|
| 103 |
+
},
|
| 104 |
+
{
|
| 105 |
+
"name": "LOLLIPOP",
|
| 106 |
+
"samples": [
|
| 107 |
+
"lollipop",
|
| 108 |
+
"lolly",
|
| 109 |
+
"get lollipop"
|
| 110 |
+
]
|
| 111 |
+
},
|
| 112 |
+
{
|
| 113 |
+
"name": "STOP",
|
| 114 |
+
"samples": [
|
| 115 |
+
"stop",
|
| 116 |
+
"let's end here"
|
| 117 |
+
]
|
| 118 |
+
},
|
| 119 |
+
{
|
| 120 |
+
"name": "other",
|
| 121 |
+
"samples": [
|
| 122 |
+
"I don't know"
|
| 123 |
+
]
|
| 124 |
+
}
|
| 125 |
+
],
|
| 126 |
+
"global_intent": [
|
| 127 |
+
{
|
| 128 |
+
"name": "STOP",
|
| 129 |
+
"samples": [
|
| 130 |
+
"stop",
|
| 131 |
+
"let's end here"
|
| 132 |
+
]
|
| 133 |
+
},
|
| 134 |
+
{
|
| 135 |
+
"name": "other",
|
| 136 |
+
"samples": [
|
| 137 |
+
"I don't know"
|
| 138 |
+
]
|
| 139 |
+
}
|
| 140 |
+
]
|
| 141 |
+
},
|
| 142 |
+
"workflow": [
|
| 143 |
+
{
|
| 144 |
+
"type": "Play",
|
| 145 |
+
"node_name": "opening_background_music",
|
| 146 |
+
"audio_url_candidates": [
|
| 147 |
+
"https://storytellings.s3.ap-southeast-1.amazonaws.com/local/from-admin/1734534796395.mp3"
|
| 148 |
+
],
|
| 149 |
+
"next_node": "market_ambiance",
|
| 150 |
+
"requires_response": false
|
| 151 |
+
},
|
| 152 |
+
{
|
| 153 |
+
"type": "Play",
|
| 154 |
+
"node_name": "market_ambiance",
|
| 155 |
+
"audio_url_candidates": [
|
| 156 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/Market ambiance.mp3"
|
| 157 |
+
],
|
| 158 |
+
"next_node": "opening_narration",
|
| 159 |
+
"requires_response": false
|
| 160 |
+
},
|
| 161 |
+
{
|
| 162 |
+
"type": "Agent",
|
| 163 |
+
"node_name": "opening_narration",
|
| 164 |
+
"text_candidates": [
|
| 165 |
+
"Today, Peppa and her family are visiting the market! There are so many stalls to explore. Let's help Peppa find some yummy things to buy!"
|
| 166 |
+
],
|
| 167 |
+
"next_node": "peppa_greeting",
|
| 168 |
+
"additional_info": {
|
| 169 |
+
"speaker": "Narrator",
|
| 170 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/opening_narration.mp3"
|
| 171 |
+
},
|
| 172 |
+
"requires_response": false
|
| 173 |
+
},
|
| 174 |
+
{
|
| 175 |
+
"type": "Agent",
|
| 176 |
+
"node_name": "peppa_greeting",
|
| 177 |
+
"text_candidates": [
|
| 178 |
+
"Look at all the stalls! I want to pick something fun. Can you help me?"
|
| 179 |
+
],
|
| 180 |
+
"next_node": "stall_selection",
|
| 181 |
+
"additional_info": {
|
| 182 |
+
"speaker": "Peppa",
|
| 183 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/peppa_greeting.mp3"
|
| 184 |
+
},
|
| 185 |
+
"requires_response": false
|
| 186 |
+
},
|
| 187 |
+
{
|
| 188 |
+
"type": "Agent",
|
| 189 |
+
"node_name": "stall_selection",
|
| 190 |
+
"text_candidates": [
|
| 191 |
+
"Which stall should Peppa visit first? The cheese stall, the fruit stall, or the toy stall?"
|
| 192 |
+
],
|
| 193 |
+
"next_node": "stall_menu",
|
| 194 |
+
"additional_info": {
|
| 195 |
+
"speaker": "Narrator",
|
| 196 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/stall_selection.mp3"
|
| 197 |
+
},
|
| 198 |
+
"requires_response": false
|
| 199 |
+
},
|
| 200 |
+
{
|
| 201 |
+
"type": "Text_Choice",
|
| 202 |
+
"node_name": "stall_menu",
|
| 203 |
+
"text_choices": [
|
| 204 |
+
"Cheese",
|
| 205 |
+
"Fruit",
|
| 206 |
+
"Toy"
|
| 207 |
+
],
|
| 208 |
+
"next_node": "stall_intent"
|
| 209 |
+
},
|
| 210 |
+
{
|
| 211 |
+
"type": "Intent",
|
| 212 |
+
"node_name": "stall_intent",
|
| 213 |
+
"content": [
|
| 214 |
+
"CHEESE_STALL",
|
| 215 |
+
"FRUIT_STALL",
|
| 216 |
+
"TOY_STALL",
|
| 217 |
+
"STOP"
|
| 218 |
+
],
|
| 219 |
+
"next_node": {
|
| 220 |
+
"CHEESE_STALL": "cheese_stall_response",
|
| 221 |
+
"FRUIT_STALL": "fruit_stall_response",
|
| 222 |
+
"TOY_STALL": "toy_stall_response",
|
| 223 |
+
"STOP": "closing_narration_premature",
|
| 224 |
+
"other": "other_0"
|
| 225 |
+
}
|
| 226 |
+
},
|
| 227 |
+
{
|
| 228 |
+
"type": "Agent",
|
| 229 |
+
"node_name": "cheese_stall_response",
|
| 230 |
+
"text_candidates": [
|
| 231 |
+
"Mmm, I love cheese! Let's find one for Mummy Pig."
|
| 232 |
+
],
|
| 233 |
+
"next_node": "vendor_sounds_cheese",
|
| 234 |
+
"additional_info": {
|
| 235 |
+
"speaker": "Peppa",
|
| 236 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/cheese_stall_response.mp3"
|
| 237 |
+
},
|
| 238 |
+
"requires_response": false
|
| 239 |
+
},
|
| 240 |
+
{
|
| 241 |
+
"type": "Play",
|
| 242 |
+
"node_name": "vendor_sounds_cheese",
|
| 243 |
+
"audio_url_candidates": [
|
| 244 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/Vendor greetings, rustling bags, and items being placed in basket.mp3"
|
| 245 |
+
],
|
| 246 |
+
"next_node": "peppa_cheese_selection",
|
| 247 |
+
"requires_response": false
|
| 248 |
+
},
|
| 249 |
+
{
|
| 250 |
+
"type": "Agent",
|
| 251 |
+
"node_name": "peppa_cheese_selection",
|
| 252 |
+
"text_candidates": [
|
| 253 |
+
"Let’s pick something from this stall!"
|
| 254 |
+
],
|
| 255 |
+
"next_node": "cheese_selection",
|
| 256 |
+
"additional_info": {
|
| 257 |
+
"speaker": "Peppa",
|
| 258 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/cheese_selection.mp3"
|
| 259 |
+
},
|
| 260 |
+
"requires_response": false
|
| 261 |
+
},
|
| 262 |
+
{
|
| 263 |
+
"type": "Agent",
|
| 264 |
+
"node_name": "cheese_selection",
|
| 265 |
+
"text_candidates": [
|
| 266 |
+
"What should Peppa choose, smelly cheese or creamy cheese? Help her pick one item!"
|
| 267 |
+
],
|
| 268 |
+
"next_node": "cheese_menu",
|
| 269 |
+
"additional_info": {
|
| 270 |
+
"speaker": "Narrator",
|
| 271 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/cheese_selection.mp3"
|
| 272 |
+
},
|
| 273 |
+
"requires_response": false
|
| 274 |
+
},
|
| 275 |
+
{
|
| 276 |
+
"type": "Text_Choice",
|
| 277 |
+
"node_name": "cheese_menu",
|
| 278 |
+
"text_choices": [
|
| 279 |
+
"Smelly Cheese",
|
| 280 |
+
"Creamy Cheese"
|
| 281 |
+
],
|
| 282 |
+
"next_node": "cheese_choice_intent"
|
| 283 |
+
},
|
| 284 |
+
{
|
| 285 |
+
"type": "Intent",
|
| 286 |
+
"node_name": "cheese_choice_intent",
|
| 287 |
+
"content": [
|
| 288 |
+
"SMELLY_CHEESE",
|
| 289 |
+
"CREAMY_CHEESE",
|
| 290 |
+
"STOP"
|
| 291 |
+
],
|
| 292 |
+
"next_node": {
|
| 293 |
+
"SMELLY_CHEESE": "smelly_cheese_response",
|
| 294 |
+
"CREAMY_CHEESE": "creamy_cheese_response",
|
| 295 |
+
"STOP": "closing_narration_premature",
|
| 296 |
+
"other": "other_1"
|
| 297 |
+
}
|
| 298 |
+
},
|
| 299 |
+
{
|
| 300 |
+
"type": "Agent",
|
| 301 |
+
"node_name": "fruit_stall_response",
|
| 302 |
+
"text_candidates": [
|
| 303 |
+
"Yummy, I love fruit! Let's find something juicy."
|
| 304 |
+
],
|
| 305 |
+
"next_node": "vendor_sounds_fruit",
|
| 306 |
+
"additional_info": {
|
| 307 |
+
"speaker": "Peppa",
|
| 308 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/fruit_stall_response.mp3"
|
| 309 |
+
},
|
| 310 |
+
"requires_response": false
|
| 311 |
+
},
|
| 312 |
+
{
|
| 313 |
+
"type": "Play",
|
| 314 |
+
"node_name": "vendor_sounds_fruit",
|
| 315 |
+
"audio_url_candidates": [
|
| 316 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/Vendor greetings, rustling bags, and items being placed in basket.mp3"
|
| 317 |
+
],
|
| 318 |
+
"next_node": "peppa_fruit_selection",
|
| 319 |
+
"requires_response": false
|
| 320 |
+
},
|
| 321 |
+
{
|
| 322 |
+
"type": "Agent",
|
| 323 |
+
"node_name": "peppa_fruit_selection",
|
| 324 |
+
"text_candidates": [
|
| 325 |
+
"Let’s pick something from this stall!"
|
| 326 |
+
],
|
| 327 |
+
"next_node": "fruit_selection",
|
| 328 |
+
"additional_info": {
|
| 329 |
+
"speaker": "Peppa",
|
| 330 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/narrator_toy_selection.mp3"
|
| 331 |
+
},
|
| 332 |
+
"requires_response": false
|
| 333 |
+
},
|
| 334 |
+
{
|
| 335 |
+
"type": "Agent",
|
| 336 |
+
"node_name": "fruit_selection",
|
| 337 |
+
"text_candidates": [
|
| 338 |
+
"What should Peppa choose, apples or bananas? Help her pick one item!"
|
| 339 |
+
],
|
| 340 |
+
"next_node": "fruit_menu",
|
| 341 |
+
"additional_info": {
|
| 342 |
+
"speaker": "Narrator",
|
| 343 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/fruit_selection.mp3"
|
| 344 |
+
},
|
| 345 |
+
"requires_response": false
|
| 346 |
+
},
|
| 347 |
+
{
|
| 348 |
+
"type": "Text_Choice",
|
| 349 |
+
"node_name": "fruit_menu",
|
| 350 |
+
"text_choices": [
|
| 351 |
+
"Apples",
|
| 352 |
+
"Bananas"
|
| 353 |
+
],
|
| 354 |
+
"next_node": "fruit_choice_intent"
|
| 355 |
+
},
|
| 356 |
+
{
|
| 357 |
+
"type": "Intent",
|
| 358 |
+
"node_name": "fruit_choice_intent",
|
| 359 |
+
"content": [
|
| 360 |
+
"APPLES",
|
| 361 |
+
"BANANAS",
|
| 362 |
+
"STOP"
|
| 363 |
+
],
|
| 364 |
+
"next_node": {
|
| 365 |
+
"APPLES": "apples_response",
|
| 366 |
+
"BANANAS": "bananas_response",
|
| 367 |
+
"STOP": "closing_narration_premature",
|
| 368 |
+
"other": "other_2"
|
| 369 |
+
}
|
| 370 |
+
},
|
| 371 |
+
{
|
| 372 |
+
"type": "Agent",
|
| 373 |
+
"node_name": "toy_stall_response",
|
| 374 |
+
"text_candidates": [
|
| 375 |
+
"Oh look, a toy stall! What fun things can we find here?"
|
| 376 |
+
],
|
| 377 |
+
"next_node": "vendor_sounds_toy",
|
| 378 |
+
"additional_info": {
|
| 379 |
+
"speaker": "Peppa",
|
| 380 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/toy_stall_response.mp3"
|
| 381 |
+
},
|
| 382 |
+
"requires_response": false
|
| 383 |
+
},
|
| 384 |
+
{
|
| 385 |
+
"type": "Play",
|
| 386 |
+
"node_name": "vendor_sounds_toy",
|
| 387 |
+
"audio_url_candidates": [
|
| 388 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/Vendor greetings, rustling bags, and items being placed in basket.mp3"
|
| 389 |
+
],
|
| 390 |
+
"next_node": "peppa_toy_selection",
|
| 391 |
+
"requires_response": false
|
| 392 |
+
},
|
| 393 |
+
{
|
| 394 |
+
"type": "Agent",
|
| 395 |
+
"node_name": "peppa_toy_selection",
|
| 396 |
+
"text_candidates": [
|
| 397 |
+
"Let’s pick something from this stall!"
|
| 398 |
+
],
|
| 399 |
+
"next_node": "toy_selection",
|
| 400 |
+
"additional_info": {
|
| 401 |
+
"speaker": "Peppa",
|
| 402 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/narrator_toy_selection.mp3"
|
| 403 |
+
},
|
| 404 |
+
"requires_response": false
|
| 405 |
+
},
|
| 406 |
+
{
|
| 407 |
+
"type": "Agent",
|
| 408 |
+
"node_name": "toy_selection",
|
| 409 |
+
"text_candidates": [
|
| 410 |
+
"What should Peppa choose, a Teddy Bear of a singing fish toy? Help her pick one item!"
|
| 411 |
+
],
|
| 412 |
+
"next_node": "toy_menu",
|
| 413 |
+
"additional_info": {
|
| 414 |
+
"speaker": "Narrator",
|
| 415 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/toy_selection.mp3"
|
| 416 |
+
},
|
| 417 |
+
"requires_response": false
|
| 418 |
+
},
|
| 419 |
+
{
|
| 420 |
+
"type": "Text_Choice",
|
| 421 |
+
"node_name": "toy_menu",
|
| 422 |
+
"text_choices": [
|
| 423 |
+
"Teddy Bear",
|
| 424 |
+
"Singing Fish"
|
| 425 |
+
],
|
| 426 |
+
"next_node": "toy_choice_intent"
|
| 427 |
+
},
|
| 428 |
+
{
|
| 429 |
+
"type": "Intent",
|
| 430 |
+
"node_name": "toy_choice_intent",
|
| 431 |
+
"content": [
|
| 432 |
+
"TEDDY_BEAR",
|
| 433 |
+
"SINGING_FISH",
|
| 434 |
+
"STOP"
|
| 435 |
+
],
|
| 436 |
+
"next_node": {
|
| 437 |
+
"TEDDY_BEAR": "teddy_response",
|
| 438 |
+
"SINGING_FISH": "fish_response",
|
| 439 |
+
"STOP": "closing_narration_premature",
|
| 440 |
+
"other": "other_3"
|
| 441 |
+
}
|
| 442 |
+
},
|
| 443 |
+
{
|
| 444 |
+
"type": "Agent",
|
| 445 |
+
"node_name": "smelly_cheese_response",
|
| 446 |
+
"text_candidates": [
|
| 447 |
+
"This one smells funny! Mummy Pig will love it!"
|
| 448 |
+
],
|
| 449 |
+
"next_node": "sound_basket",
|
| 450 |
+
"additional_info": {
|
| 451 |
+
"speaker": "Peppa",
|
| 452 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/smelly_cheese_response.mp3"
|
| 453 |
+
},
|
| 454 |
+
"requires_response": false
|
| 455 |
+
},
|
| 456 |
+
{
|
| 457 |
+
"type": "Agent",
|
| 458 |
+
"node_name": "creamy_cheese_response",
|
| 459 |
+
"text_candidates": [
|
| 460 |
+
"So smooth! I think Daddy Pig will like this one."
|
| 461 |
+
],
|
| 462 |
+
"next_node": "sound_basket",
|
| 463 |
+
"additional_info": {
|
| 464 |
+
"speaker": "Peppa",
|
| 465 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/creamy_cheese_response.mp3"
|
| 466 |
+
},
|
| 467 |
+
"requires_response": false
|
| 468 |
+
},
|
| 469 |
+
{
|
| 470 |
+
"type": "Agent",
|
| 471 |
+
"node_name": "apples_response",
|
| 472 |
+
"text_candidates": [
|
| 473 |
+
"Crunchy and sweet! Apples are my favorite!"
|
| 474 |
+
],
|
| 475 |
+
"next_node": "sound_basket",
|
| 476 |
+
"additional_info": {
|
| 477 |
+
"speaker": "Peppa",
|
| 478 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/apples_response.mp3"
|
| 479 |
+
},
|
| 480 |
+
"requires_response": false
|
| 481 |
+
},
|
| 482 |
+
{
|
| 483 |
+
"type": "Agent",
|
| 484 |
+
"node_name": "bananas_response",
|
| 485 |
+
"text_candidates": [
|
| 486 |
+
"Bananas are so yummy! George will love these!"
|
| 487 |
+
],
|
| 488 |
+
"next_node": "sound_basket",
|
| 489 |
+
"additional_info": {
|
| 490 |
+
"speaker": "Peppa",
|
| 491 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/bananas_response.mp3"
|
| 492 |
+
},
|
| 493 |
+
"requires_response": false
|
| 494 |
+
},
|
| 495 |
+
{
|
| 496 |
+
"type": "Agent",
|
| 497 |
+
"node_name": "teddy_response",
|
| 498 |
+
"text_candidates": [
|
| 499 |
+
"It's so cuddly! I'll name it Mr. Snuggles."
|
| 500 |
+
],
|
| 501 |
+
"next_node": "sound_basket",
|
| 502 |
+
"additional_info": {
|
| 503 |
+
"speaker": "Peppa",
|
| 504 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/teddy_response.mp3"
|
| 505 |
+
},
|
| 506 |
+
"requires_response": false
|
| 507 |
+
},
|
| 508 |
+
{
|
| 509 |
+
"type": "Agent",
|
| 510 |
+
"node_name": "fish_response",
|
| 511 |
+
"text_candidates": [
|
| 512 |
+
"Hehe, this fish is so silly! It sings a funny song!"
|
| 513 |
+
],
|
| 514 |
+
"next_node": "sound_basket",
|
| 515 |
+
"additional_info": {
|
| 516 |
+
"speaker": "Peppa",
|
| 517 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/fish_response.mp3"
|
| 518 |
+
},
|
| 519 |
+
"requires_response": false
|
| 520 |
+
},
|
| 521 |
+
{
|
| 522 |
+
"type": "Play",
|
| 523 |
+
"node_name": "sound_basket",
|
| 524 |
+
"audio_url_candidates": [
|
| 525 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/Rustling bags, clinking items.mp3"
|
| 526 |
+
],
|
| 527 |
+
"next_node": "basket_prompt",
|
| 528 |
+
"requires_response": false
|
| 529 |
+
},
|
| 530 |
+
{
|
| 531 |
+
"type": "Agent",
|
| 532 |
+
"node_name": "basket_prompt",
|
| 533 |
+
"text_candidates": [
|
| 534 |
+
"Oh no! My basket is getting so heavy, and everything is mixed up! Let's tidy it up so I can carry it better!"
|
| 535 |
+
],
|
| 536 |
+
"next_node": "sorting_prompt",
|
| 537 |
+
"additional_info": {
|
| 538 |
+
"speaker": "Peppa",
|
| 539 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/basket_prompt.mp3"
|
| 540 |
+
},
|
| 541 |
+
"requires_response": false
|
| 542 |
+
},
|
| 543 |
+
{
|
| 544 |
+
"type": "Agent",
|
| 545 |
+
"node_name": "sorting_prompt",
|
| 546 |
+
"text_candidates": [
|
| 547 |
+
"Help Peppa sort her items! Put the food in one pile and the toys in another. Which should go first?"
|
| 548 |
+
],
|
| 549 |
+
"next_node": "sorting_menu",
|
| 550 |
+
"additional_info": {
|
| 551 |
+
"speaker": "Narrator",
|
| 552 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/sorting_prompt.mp3"
|
| 553 |
+
},
|
| 554 |
+
"requires_response": false
|
| 555 |
+
},
|
| 556 |
+
{
|
| 557 |
+
"type": "Text_Choice",
|
| 558 |
+
"node_name": "sorting_menu",
|
| 559 |
+
"text_choices": [
|
| 560 |
+
"Food",
|
| 561 |
+
"Toys"
|
| 562 |
+
],
|
| 563 |
+
"next_node": "sorting_intent"
|
| 564 |
+
},
|
| 565 |
+
{
|
| 566 |
+
"type": "Intent",
|
| 567 |
+
"node_name": "sorting_intent",
|
| 568 |
+
"content": [
|
| 569 |
+
"FOOD",
|
| 570 |
+
"TOYS",
|
| 571 |
+
"STOP"
|
| 572 |
+
],
|
| 573 |
+
"next_node": {
|
| 574 |
+
"FOOD": "food_sorting_response",
|
| 575 |
+
"TOYS": "toys_sorting_response",
|
| 576 |
+
"STOP": "closing_narration_premature",
|
| 577 |
+
"other": "other_4"
|
| 578 |
+
}
|
| 579 |
+
},
|
| 580 |
+
{
|
| 581 |
+
"type": "Agent",
|
| 582 |
+
"node_name": "food_sorting_response",
|
| 583 |
+
"text_candidates": [
|
| 584 |
+
"Great choice! Let's put the apples and cheese together. All neat and tidy!"
|
| 585 |
+
],
|
| 586 |
+
"next_node": "organization_thanks",
|
| 587 |
+
"additional_info": {
|
| 588 |
+
"speaker": "Peppa",
|
| 589 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/food_sorting_response.mp3"
|
| 590 |
+
},
|
| 591 |
+
"requires_response": false
|
| 592 |
+
},
|
| 593 |
+
{
|
| 594 |
+
"type": "Agent",
|
| 595 |
+
"node_name": "toys_sorting_response",
|
| 596 |
+
"text_candidates": [
|
| 597 |
+
"Good idea! Let's put the teddy bear and singing fish in their own spot!"
|
| 598 |
+
],
|
| 599 |
+
"next_node": "organization_thanks",
|
| 600 |
+
"additional_info": {
|
| 601 |
+
"speaker": "Peppa",
|
| 602 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/toys_sorting_response.mp3"
|
| 603 |
+
},
|
| 604 |
+
"requires_response": false
|
| 605 |
+
},
|
| 606 |
+
{
|
| 607 |
+
"type": "Agent",
|
| 608 |
+
"node_name": "organization_thanks",
|
| 609 |
+
"text_candidates": [
|
| 610 |
+
"Thank you for helping me organize my basket! Now it's so much easier to carry."
|
| 611 |
+
],
|
| 612 |
+
"next_node": "treat_sounds",
|
| 613 |
+
"additional_info": {
|
| 614 |
+
"speaker": "Peppa",
|
| 615 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/organization_thanks.mp3"
|
| 616 |
+
},
|
| 617 |
+
"requires_response": false
|
| 618 |
+
},
|
| 619 |
+
{
|
| 620 |
+
"type": "Play",
|
| 621 |
+
"node_name": "treat_sounds",
|
| 622 |
+
"audio_url_candidates": [
|
| 623 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/Rustling bags, cheerful chatter.mp3"
|
| 624 |
+
],
|
| 625 |
+
"next_node": "treat_prompt",
|
| 626 |
+
"requires_response": false
|
| 627 |
+
},
|
| 628 |
+
{
|
| 629 |
+
"type": "Agent",
|
| 630 |
+
"node_name": "treat_prompt",
|
| 631 |
+
"text_candidates": [
|
| 632 |
+
"Before we leave, I want to get a treat! What should I choose?"
|
| 633 |
+
],
|
| 634 |
+
"next_node": "treat_selection_prompt",
|
| 635 |
+
"additional_info": {
|
| 636 |
+
"speaker": "Peppa",
|
| 637 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/treat_prompt.mp3"
|
| 638 |
+
},
|
| 639 |
+
"requires_response": false
|
| 640 |
+
},
|
| 641 |
+
{
|
| 642 |
+
"type": "Agent",
|
| 643 |
+
"node_name": "treat_selection_prompt",
|
| 644 |
+
"text_candidates": [
|
| 645 |
+
"Help Peppa pick a treat. Should she get an ice cream or a lollipop?"
|
| 646 |
+
],
|
| 647 |
+
"next_node": "treat_menu",
|
| 648 |
+
"additional_info": {
|
| 649 |
+
"speaker": "Narrator",
|
| 650 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/treat_selection_prompt.mp3"
|
| 651 |
+
},
|
| 652 |
+
"requires_response": false
|
| 653 |
+
},
|
| 654 |
+
{
|
| 655 |
+
"type": "Text_Choice",
|
| 656 |
+
"node_name": "treat_menu",
|
| 657 |
+
"text_choices": [
|
| 658 |
+
"Ice Cream",
|
| 659 |
+
"Lollipop"
|
| 660 |
+
],
|
| 661 |
+
"next_node": "treat_intent"
|
| 662 |
+
},
|
| 663 |
+
{
|
| 664 |
+
"type": "Intent",
|
| 665 |
+
"node_name": "treat_intent",
|
| 666 |
+
"content": [
|
| 667 |
+
"ICE_CREAM",
|
| 668 |
+
"LOLLIPOP",
|
| 669 |
+
"STOP"
|
| 670 |
+
],
|
| 671 |
+
"next_node": {
|
| 672 |
+
"ICE_CREAM": "ice_cream_response",
|
| 673 |
+
"LOLLIPOP": "lollipop_response",
|
| 674 |
+
"STOP": "closing_narration_premature",
|
| 675 |
+
"other": "other_5"
|
| 676 |
+
}
|
| 677 |
+
},
|
| 678 |
+
{
|
| 679 |
+
"type": "Agent",
|
| 680 |
+
"node_name": "ice_cream_response",
|
| 681 |
+
"text_candidates": [
|
| 682 |
+
"Yummy! Ice cream is the best on a sunny day!"
|
| 683 |
+
],
|
| 684 |
+
"next_node": "closing_narration",
|
| 685 |
+
"additional_info": {
|
| 686 |
+
"speaker": "Peppa",
|
| 687 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/ice_cream_response.mp3"
|
| 688 |
+
},
|
| 689 |
+
"requires_response": false
|
| 690 |
+
},
|
| 691 |
+
{
|
| 692 |
+
"type": "Agent",
|
| 693 |
+
"node_name": "lollipop_response",
|
| 694 |
+
"text_candidates": [
|
| 695 |
+
"This lollipop is so colorful! I'll save it for later."
|
| 696 |
+
],
|
| 697 |
+
"next_node": "closing_narration",
|
| 698 |
+
"additional_info": {
|
| 699 |
+
"speaker": "Peppa",
|
| 700 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/lollipop_response.mp3"
|
| 701 |
+
},
|
| 702 |
+
"requires_response": false
|
| 703 |
+
},
|
| 704 |
+
{
|
| 705 |
+
"type": "Agent",
|
| 706 |
+
"node_name": "closing_narration",
|
| 707 |
+
"text_candidates": [
|
| 708 |
+
"Peppa had a wonderful time at the market! Thanks to your help, she found some great things to take home."
|
| 709 |
+
],
|
| 710 |
+
"next_node": "final_goodbye",
|
| 711 |
+
"additional_info": {
|
| 712 |
+
"speaker": "Narrator",
|
| 713 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/closing_narration.mp3"
|
| 714 |
+
},
|
| 715 |
+
"requires_response": false
|
| 716 |
+
},
|
| 717 |
+
{
|
| 718 |
+
"type": "Agent",
|
| 719 |
+
"node_name": "final_goodbye",
|
| 720 |
+
"text_candidates": [
|
| 721 |
+
"Markets are so much fun! Let's go shopping again soon!"
|
| 722 |
+
],
|
| 723 |
+
"next_node": "peppa_giggle",
|
| 724 |
+
"additional_info": {
|
| 725 |
+
"speaker": "Peppa",
|
| 726 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/final_goodbye.mp3"
|
| 727 |
+
},
|
| 728 |
+
"requires_response": false
|
| 729 |
+
},
|
| 730 |
+
{
|
| 731 |
+
"type": "Play",
|
| 732 |
+
"node_name": "peppa_giggle",
|
| 733 |
+
"audio_url_candidates": [
|
| 734 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/peppa_giggle.mp3"
|
| 735 |
+
],
|
| 736 |
+
"next_node": "final_chime",
|
| 737 |
+
"requires_response": false
|
| 738 |
+
},
|
| 739 |
+
{
|
| 740 |
+
"type": "Play",
|
| 741 |
+
"node_name": "final_chime",
|
| 742 |
+
"audio_url_candidates": [
|
| 743 |
+
"https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/cheerful chime.mp3"
|
| 744 |
+
],
|
| 745 |
+
"next_node": "end",
|
| 746 |
+
"requires_response": false
|
| 747 |
+
},
|
| 748 |
+
{
|
| 749 |
+
"type": "Agent",
|
| 750 |
+
"node_name": "closing_narration_premature",
|
| 751 |
+
"text_candidates": [
|
| 752 |
+
"Let's go shopping again another time! Bye-bye!"
|
| 753 |
+
],
|
| 754 |
+
"next_node": "end",
|
| 755 |
+
"additional_info": {
|
| 756 |
+
"speaker": "Peppa",
|
| 757 |
+
"audio_url": "https://storage.googleapis.com/maika-ai-ext/flowbot/guided-story/peppa-market/closing_narration_premature.mp3"
|
| 758 |
+
},
|
| 759 |
+
"requires_response": false
|
| 760 |
+
},
|
| 761 |
+
{
|
| 762 |
+
"type": "Agent",
|
| 763 |
+
"node_name": "other_0",
|
| 764 |
+
"text_candidates": [
|
| 765 |
+
"Not quite! Try again!",
|
| 766 |
+
"Give it another go!",
|
| 767 |
+
"Let's try that one more time!",
|
| 768 |
+
"Try again. I know you can do it!",
|
| 769 |
+
"Let's give it another try!"
|
| 770 |
+
],
|
| 771 |
+
"next_node": "stall_selection",
|
| 772 |
+
"requires_response": false,
|
| 773 |
+
"additional_info": {
|
| 774 |
+
"speaker": "Narrator"
|
| 775 |
+
}
|
| 776 |
+
},
|
| 777 |
+
{
|
| 778 |
+
"type": "Agent",
|
| 779 |
+
"node_name": "other_1",
|
| 780 |
+
"text_candidates": [
|
| 781 |
+
"Not quite! Try again!",
|
| 782 |
+
"Give it another go!",
|
| 783 |
+
"Let's try that one more time!",
|
| 784 |
+
"Try again. I know you can do it!",
|
| 785 |
+
"Let's give it another try!"
|
| 786 |
+
],
|
| 787 |
+
"next_node": "cheese_selection",
|
| 788 |
+
"requires_response": false,
|
| 789 |
+
"additional_info": {
|
| 790 |
+
"speaker": "Narrator"
|
| 791 |
+
}
|
| 792 |
+
},
|
| 793 |
+
{
|
| 794 |
+
"type": "Agent",
|
| 795 |
+
"node_name": "other_2",
|
| 796 |
+
"text_candidates": [
|
| 797 |
+
"Not quite! Try again!",
|
| 798 |
+
"Give it another go!",
|
| 799 |
+
"Let's try that one more time!",
|
| 800 |
+
"Try again. I know you can do it!",
|
| 801 |
+
"Let's give it another try!"
|
| 802 |
+
],
|
| 803 |
+
"next_node": "fruit_selection",
|
| 804 |
+
"requires_response": false,
|
| 805 |
+
"additional_info": {
|
| 806 |
+
"speaker": "Narrator"
|
| 807 |
+
}
|
| 808 |
+
},
|
| 809 |
+
{
|
| 810 |
+
"type": "Agent",
|
| 811 |
+
"node_name": "other_3",
|
| 812 |
+
"text_candidates": [
|
| 813 |
+
"Not quite! Try again!",
|
| 814 |
+
"Give it another go!",
|
| 815 |
+
"Let's try that one more time!",
|
| 816 |
+
"Try again. I know you can do it!",
|
| 817 |
+
"Let's give it another try!"
|
| 818 |
+
],
|
| 819 |
+
"next_node": "toy_selection",
|
| 820 |
+
"requires_response": false,
|
| 821 |
+
"additional_info": {
|
| 822 |
+
"speaker": "Narrator"
|
| 823 |
+
}
|
| 824 |
+
},
|
| 825 |
+
{
|
| 826 |
+
"type": "Agent",
|
| 827 |
+
"node_name": "other_4",
|
| 828 |
+
"text_candidates": [
|
| 829 |
+
"Not quite! Try again!",
|
| 830 |
+
"Give it another go!",
|
| 831 |
+
"Let's try that one more time!",
|
| 832 |
+
"Try again. I know you can do it!",
|
| 833 |
+
"Let's give it another try!"
|
| 834 |
+
],
|
| 835 |
+
"next_node": "sorting_prompt",
|
| 836 |
+
"requires_response": false,
|
| 837 |
+
"additional_info": {
|
| 838 |
+
"speaker": "Narrator"
|
| 839 |
+
}
|
| 840 |
+
},
|
| 841 |
+
{
|
| 842 |
+
"type": "Agent",
|
| 843 |
+
"node_name": "other_5",
|
| 844 |
+
"text_candidates": [
|
| 845 |
+
"Not quite! Try again!",
|
| 846 |
+
"Give it another go!",
|
| 847 |
+
"Let's try that one more time!",
|
| 848 |
+
"Try again. I know you can do it!",
|
| 849 |
+
"Let's give it another try!"
|
| 850 |
+
],
|
| 851 |
+
"next_node": "treat_selection_prompt",
|
| 852 |
+
"requires_response": false,
|
| 853 |
+
"additional_info": {
|
| 854 |
+
"speaker": "Narrator"
|
| 855 |
+
}
|
| 856 |
+
}
|
| 857 |
+
]
|
| 858 |
+
}
|
grpc_services/generated/commons_pb2.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
| 3 |
+
# source: commons.proto
|
| 4 |
+
# Protobuf Python Version: 5.26.1
|
| 5 |
+
"""Generated protocol buffer code."""
|
| 6 |
+
from google.protobuf import descriptor as _descriptor
|
| 7 |
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
| 8 |
+
from google.protobuf import symbol_database as _symbol_database
|
| 9 |
+
from google.protobuf.internal import builder as _builder
|
| 10 |
+
# @@protoc_insertion_point(imports)
|
| 11 |
+
|
| 12 |
+
_sym_db = _symbol_database.Default()
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\rcommons.proto\x12\x07\x63ommons\"6\n\x0bSessionInfo\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12\x13\n\x0btool_config\x18\x02 \x01(\tb\x06proto3')
|
| 18 |
+
|
| 19 |
+
_globals = globals()
|
| 20 |
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
| 21 |
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'commons_pb2', _globals)
|
| 22 |
+
if not _descriptor._USE_C_DESCRIPTORS:
|
| 23 |
+
DESCRIPTOR._loaded_options = None
|
| 24 |
+
_globals['_SESSIONINFO']._serialized_start=26
|
| 25 |
+
_globals['_SESSIONINFO']._serialized_end=80
|
| 26 |
+
# @@protoc_insertion_point(module_scope)
|
grpc_services/generated/commons_pb2.pyi
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
@generated by mypy-protobuf. Do not edit manually!
|
| 3 |
+
isort:skip_file
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import builtins
|
| 7 |
+
import google.protobuf.descriptor
|
| 8 |
+
import google.protobuf.message
|
| 9 |
+
import typing
|
| 10 |
+
|
| 11 |
+
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
|
| 12 |
+
|
| 13 |
+
@typing.final
|
| 14 |
+
class SessionInfo(google.protobuf.message.Message):
|
| 15 |
+
"""atomic type"""
|
| 16 |
+
|
| 17 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 18 |
+
|
| 19 |
+
SESSION_ID_FIELD_NUMBER: builtins.int
|
| 20 |
+
TOOL_CONFIG_FIELD_NUMBER: builtins.int
|
| 21 |
+
session_id: builtins.str
|
| 22 |
+
tool_config: builtins.str
|
| 23 |
+
"""json (key: eval_prompt)"""
|
| 24 |
+
def __init__(
|
| 25 |
+
self,
|
| 26 |
+
*,
|
| 27 |
+
session_id: builtins.str = ...,
|
| 28 |
+
tool_config: builtins.str = ...,
|
| 29 |
+
) -> None: ...
|
| 30 |
+
def ClearField(self, field_name: typing.Literal["session_id", b"session_id", "tool_config", b"tool_config"]) -> None: ...
|
| 31 |
+
|
| 32 |
+
global___SessionInfo = SessionInfo
|
grpc_services/generated/commons_pb2_grpc.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
|
| 2 |
+
"""Client and server classes corresponding to protobuf-defined services."""
|
| 3 |
+
import grpc
|
| 4 |
+
import warnings
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
GRPC_GENERATED_VERSION = '1.63.0'
|
| 8 |
+
GRPC_VERSION = grpc.__version__
|
| 9 |
+
EXPECTED_ERROR_RELEASE = '1.65.0'
|
| 10 |
+
SCHEDULED_RELEASE_DATE = 'June 25, 2024'
|
| 11 |
+
_version_not_supported = False
|
| 12 |
+
|
| 13 |
+
try:
|
| 14 |
+
from grpc._utilities import first_version_is_lower
|
| 15 |
+
_version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
|
| 16 |
+
except ImportError:
|
| 17 |
+
_version_not_supported = True
|
| 18 |
+
|
| 19 |
+
if _version_not_supported:
|
| 20 |
+
warnings.warn(
|
| 21 |
+
f'The grpc package installed is at version {GRPC_VERSION},'
|
| 22 |
+
+ f' but the generated code in commons_pb2_grpc.py depends on'
|
| 23 |
+
+ f' grpcio>={GRPC_GENERATED_VERSION}.'
|
| 24 |
+
+ f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
|
| 25 |
+
+ f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
|
| 26 |
+
+ f' This warning will become an error in {EXPECTED_ERROR_RELEASE},'
|
| 27 |
+
+ f' scheduled for release on {SCHEDULED_RELEASE_DATE}.',
|
| 28 |
+
RuntimeWarning
|
| 29 |
+
)
|
grpc_services/generated/flowbot_service_pb2.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
| 3 |
+
# source: flowbot_service.proto
|
| 4 |
+
# Protobuf Python Version: 5.26.1
|
| 5 |
+
"""Generated protocol buffer code."""
|
| 6 |
+
from google.protobuf import descriptor as _descriptor
|
| 7 |
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
| 8 |
+
from google.protobuf import symbol_database as _symbol_database
|
| 9 |
+
from google.protobuf.internal import builder as _builder
|
| 10 |
+
# @@protoc_insertion_point(imports)
|
| 11 |
+
|
| 12 |
+
_sym_db = _symbol_database.Default()
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
import grpc_services.generated.commons_pb2 as commons__pb2
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x15\x66lowbot_service.proto\x12\x07\x66lowbot\x1a\rcommons.proto\"5\n\x08Variable\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\x12\x0c\n\x04type\x18\x03 \x01(\t\"5\n\tBotScript\x12\x13\n\x0bscript_type\x18\x01 \x01(\t\x12\x13\n\x0bscript_data\x18\x02 \x01(\x0c\"K\n\tBotConfig\x12*\n\x0csession_info\x18\x01 \x01(\x0b\x32\x14.commons.SessionInfo\x12\x12\n\nbot_config\x18\x02 \x01(\t\"1\n\tBotParams\x12$\n\tvariables\x18\x01 \x03(\x0b\x32\x11.flowbot.Variable\"n\n\x11\x41udioStreamConfig\x12\x0e\n\x06stream\x18\x01 \x01(\x08\x12\x13\n\x0bsample_rate\x18\x02 \x01(\x05\x12\x10\n\x08\x63hannels\x18\x03 \x01(\x05\x12\x0e\n\x06\x66ormat\x18\x04 \x01(\t\x12\x12\n\nchunk_size\x18\x05 \x01(\x05\"\xac\x01\n\nChatParams\x12*\n\x0csession_info\x18\x01 \x01(\x0b\x32\x14.commons.SessionInfo\x12&\n\nbot_params\x18\x02 \x01(\x0b\x32\x12.flowbot.BotParams\x12\x39\n\x13\x61udio_stream_config\x18\x03 \x01(\x0b\x32\x1a.flowbot.AudioStreamConfigH\x00\x42\x0f\n\rstream_config\"\x1d\n\nInitStatus\x12\x0f\n\x07success\x18\x01 \x01(\x08\":\n\x0cStartRequest\x12*\n\x0csession_info\x18\x01 \x01(\x0b\x32\x14.commons.SessionInfo\"9\n\x0bStopRequest\x12*\n\x0csession_info\x18\x01 \x01(\x0b\x32\x14.commons.SessionInfo\"J\n\x0b\x43hatRequest\x12*\n\x0csession_info\x18\x01 \x01(\x0b\x32\x14.commons.SessionInfo\x12\x0f\n\x07message\x18\x02 \x01(\t\"i\n\x14\x43hatResponseMetadata\x12\x10\n\x08is_final\x18\x01 \x01(\x08\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\t\x12\x15\n\rresponse_type\x18\x03 \x01(\t\x12\x17\n\x0f\x61\x64\x64itional_info\x18\x04 \x01(\t\")\n\nAudioChunk\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\r\n\x05\x63hunk\x18\x02 \x01(\x0c\"y\n\x0c\x43hatResponse\x12\x31\n\x08metadata\x18\x01 \x01(\x0b\x32\x1d.flowbot.ChatResponseMetadataH\x00\x12*\n\x0b\x61udio_chunk\x18\x02 \x01(\x0b\x32\x13.flowbot.AudioChunkH\x00\x42\n\n\x08response\"\x18\n\x08\x43hatInfo\x12\x0c\n\x04info\x18\x01 \x01(\t2\xae\x03\n\x0e\x46lowBotService\x12\x34\n\x08ParseBot\x12\x12.flowbot.BotScript\x1a\x12.flowbot.BotConfig\"\x00\x12\x36\n\tCreateBot\x12\x12.flowbot.BotConfig\x1a\x13.flowbot.ChatParams\"\x00\x12\x35\n\x07InitBot\x12\x13.flowbot.ChatParams\x1a\x13.flowbot.InitStatus\"\x00\x12\x45\n\x11StartConversation\x12\x15.flowbot.StartRequest\x1a\x15.flowbot.ChatResponse\"\x00\x30\x01\x12=\n\x10StopConversation\x12\x14.flowbot.StopRequest\x1a\x11.flowbot.ChatInfo\"\x00\x12\x37\n\x04\x43hat\x12\x14.flowbot.ChatRequest\x1a\x15.flowbot.ChatResponse\"\x00\x30\x01\x12\x38\n\x0b\x45xtractInfo\x12\x14.commons.SessionInfo\x1a\x11.flowbot.ChatInfo\"\x00\x62\x06proto3')
|
| 19 |
+
|
| 20 |
+
_globals = globals()
|
| 21 |
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
| 22 |
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'flowbot_service_pb2', _globals)
|
| 23 |
+
if not _descriptor._USE_C_DESCRIPTORS:
|
| 24 |
+
DESCRIPTOR._loaded_options = None
|
| 25 |
+
_globals['_VARIABLE']._serialized_start=49
|
| 26 |
+
_globals['_VARIABLE']._serialized_end=102
|
| 27 |
+
_globals['_BOTSCRIPT']._serialized_start=104
|
| 28 |
+
_globals['_BOTSCRIPT']._serialized_end=157
|
| 29 |
+
_globals['_BOTCONFIG']._serialized_start=159
|
| 30 |
+
_globals['_BOTCONFIG']._serialized_end=234
|
| 31 |
+
_globals['_BOTPARAMS']._serialized_start=236
|
| 32 |
+
_globals['_BOTPARAMS']._serialized_end=285
|
| 33 |
+
_globals['_AUDIOSTREAMCONFIG']._serialized_start=287
|
| 34 |
+
_globals['_AUDIOSTREAMCONFIG']._serialized_end=397
|
| 35 |
+
_globals['_CHATPARAMS']._serialized_start=400
|
| 36 |
+
_globals['_CHATPARAMS']._serialized_end=572
|
| 37 |
+
_globals['_INITSTATUS']._serialized_start=574
|
| 38 |
+
_globals['_INITSTATUS']._serialized_end=603
|
| 39 |
+
_globals['_STARTREQUEST']._serialized_start=605
|
| 40 |
+
_globals['_STARTREQUEST']._serialized_end=663
|
| 41 |
+
_globals['_STOPREQUEST']._serialized_start=665
|
| 42 |
+
_globals['_STOPREQUEST']._serialized_end=722
|
| 43 |
+
_globals['_CHATREQUEST']._serialized_start=724
|
| 44 |
+
_globals['_CHATREQUEST']._serialized_end=798
|
| 45 |
+
_globals['_CHATRESPONSEMETADATA']._serialized_start=800
|
| 46 |
+
_globals['_CHATRESPONSEMETADATA']._serialized_end=905
|
| 47 |
+
_globals['_AUDIOCHUNK']._serialized_start=907
|
| 48 |
+
_globals['_AUDIOCHUNK']._serialized_end=948
|
| 49 |
+
_globals['_CHATRESPONSE']._serialized_start=950
|
| 50 |
+
_globals['_CHATRESPONSE']._serialized_end=1071
|
| 51 |
+
_globals['_CHATINFO']._serialized_start=1073
|
| 52 |
+
_globals['_CHATINFO']._serialized_end=1097
|
| 53 |
+
_globals['_FLOWBOTSERVICE']._serialized_start=1100
|
| 54 |
+
_globals['_FLOWBOTSERVICE']._serialized_end=1530
|
| 55 |
+
# @@protoc_insertion_point(module_scope)
|
grpc_services/generated/flowbot_service_pb2.pyi
ADDED
|
@@ -0,0 +1,297 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
@generated by mypy-protobuf. Do not edit manually!
|
| 3 |
+
isort:skip_file
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import builtins
|
| 7 |
+
import collections.abc
|
| 8 |
+
import grpc_services.generated.commons_pb2 as commons_pb2
|
| 9 |
+
import google.protobuf.descriptor
|
| 10 |
+
import google.protobuf.internal.containers
|
| 11 |
+
import google.protobuf.message
|
| 12 |
+
import typing
|
| 13 |
+
|
| 14 |
+
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
|
| 15 |
+
|
| 16 |
+
@typing.final
|
| 17 |
+
class Variable(google.protobuf.message.Message):
|
| 18 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 19 |
+
|
| 20 |
+
NAME_FIELD_NUMBER: builtins.int
|
| 21 |
+
VALUE_FIELD_NUMBER: builtins.int
|
| 22 |
+
TYPE_FIELD_NUMBER: builtins.int
|
| 23 |
+
name: builtins.str
|
| 24 |
+
value: builtins.str
|
| 25 |
+
type: builtins.str
|
| 26 |
+
def __init__(
|
| 27 |
+
self,
|
| 28 |
+
*,
|
| 29 |
+
name: builtins.str = ...,
|
| 30 |
+
value: builtins.str = ...,
|
| 31 |
+
type: builtins.str = ...,
|
| 32 |
+
) -> None: ...
|
| 33 |
+
def ClearField(self, field_name: typing.Literal["name", b"name", "type", b"type", "value", b"value"]) -> None: ...
|
| 34 |
+
|
| 35 |
+
global___Variable = Variable
|
| 36 |
+
|
| 37 |
+
@typing.final
|
| 38 |
+
class BotScript(google.protobuf.message.Message):
|
| 39 |
+
"""server params"""
|
| 40 |
+
|
| 41 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 42 |
+
|
| 43 |
+
SCRIPT_TYPE_FIELD_NUMBER: builtins.int
|
| 44 |
+
SCRIPT_DATA_FIELD_NUMBER: builtins.int
|
| 45 |
+
script_type: builtins.str
|
| 46 |
+
"""be-[name]"""
|
| 47 |
+
script_data: builtins.bytes
|
| 48 |
+
def __init__(
|
| 49 |
+
self,
|
| 50 |
+
*,
|
| 51 |
+
script_type: builtins.str = ...,
|
| 52 |
+
script_data: builtins.bytes = ...,
|
| 53 |
+
) -> None: ...
|
| 54 |
+
def ClearField(self, field_name: typing.Literal["script_data", b"script_data", "script_type", b"script_type"]) -> None: ...
|
| 55 |
+
|
| 56 |
+
global___BotScript = BotScript
|
| 57 |
+
|
| 58 |
+
@typing.final
|
| 59 |
+
class BotConfig(google.protobuf.message.Message):
|
| 60 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 61 |
+
|
| 62 |
+
SESSION_INFO_FIELD_NUMBER: builtins.int
|
| 63 |
+
BOT_CONFIG_FIELD_NUMBER: builtins.int
|
| 64 |
+
bot_config: builtins.str
|
| 65 |
+
"""json"""
|
| 66 |
+
@property
|
| 67 |
+
def session_info(self) -> commons_pb2.SessionInfo: ...
|
| 68 |
+
def __init__(
|
| 69 |
+
self,
|
| 70 |
+
*,
|
| 71 |
+
session_info: commons_pb2.SessionInfo | None = ...,
|
| 72 |
+
bot_config: builtins.str = ...,
|
| 73 |
+
) -> None: ...
|
| 74 |
+
def HasField(self, field_name: typing.Literal["session_info", b"session_info"]) -> builtins.bool: ...
|
| 75 |
+
def ClearField(self, field_name: typing.Literal["bot_config", b"bot_config", "session_info", b"session_info"]) -> None: ...
|
| 76 |
+
|
| 77 |
+
global___BotConfig = BotConfig
|
| 78 |
+
|
| 79 |
+
@typing.final
|
| 80 |
+
class BotParams(google.protobuf.message.Message):
|
| 81 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 82 |
+
|
| 83 |
+
VARIABLES_FIELD_NUMBER: builtins.int
|
| 84 |
+
@property
|
| 85 |
+
def variables(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Variable]: ...
|
| 86 |
+
def __init__(
|
| 87 |
+
self,
|
| 88 |
+
*,
|
| 89 |
+
variables: collections.abc.Iterable[global___Variable] | None = ...,
|
| 90 |
+
) -> None: ...
|
| 91 |
+
def ClearField(self, field_name: typing.Literal["variables", b"variables"]) -> None: ...
|
| 92 |
+
|
| 93 |
+
global___BotParams = BotParams
|
| 94 |
+
|
| 95 |
+
@typing.final
|
| 96 |
+
class AudioStreamConfig(google.protobuf.message.Message):
|
| 97 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 98 |
+
|
| 99 |
+
STREAM_FIELD_NUMBER: builtins.int
|
| 100 |
+
SAMPLE_RATE_FIELD_NUMBER: builtins.int
|
| 101 |
+
CHANNELS_FIELD_NUMBER: builtins.int
|
| 102 |
+
FORMAT_FIELD_NUMBER: builtins.int
|
| 103 |
+
CHUNK_SIZE_FIELD_NUMBER: builtins.int
|
| 104 |
+
stream: builtins.bool
|
| 105 |
+
sample_rate: builtins.int
|
| 106 |
+
channels: builtins.int
|
| 107 |
+
format: builtins.str
|
| 108 |
+
chunk_size: builtins.int
|
| 109 |
+
def __init__(
|
| 110 |
+
self,
|
| 111 |
+
*,
|
| 112 |
+
stream: builtins.bool = ...,
|
| 113 |
+
sample_rate: builtins.int = ...,
|
| 114 |
+
channels: builtins.int = ...,
|
| 115 |
+
format: builtins.str = ...,
|
| 116 |
+
chunk_size: builtins.int = ...,
|
| 117 |
+
) -> None: ...
|
| 118 |
+
def ClearField(self, field_name: typing.Literal["channels", b"channels", "chunk_size", b"chunk_size", "format", b"format", "sample_rate", b"sample_rate", "stream", b"stream"]) -> None: ...
|
| 119 |
+
|
| 120 |
+
global___AudioStreamConfig = AudioStreamConfig
|
| 121 |
+
|
| 122 |
+
@typing.final
|
| 123 |
+
class ChatParams(google.protobuf.message.Message):
|
| 124 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 125 |
+
|
| 126 |
+
SESSION_INFO_FIELD_NUMBER: builtins.int
|
| 127 |
+
BOT_PARAMS_FIELD_NUMBER: builtins.int
|
| 128 |
+
AUDIO_STREAM_CONFIG_FIELD_NUMBER: builtins.int
|
| 129 |
+
@property
|
| 130 |
+
def session_info(self) -> commons_pb2.SessionInfo: ...
|
| 131 |
+
@property
|
| 132 |
+
def bot_params(self) -> global___BotParams: ...
|
| 133 |
+
@property
|
| 134 |
+
def audio_stream_config(self) -> global___AudioStreamConfig: ...
|
| 135 |
+
def __init__(
|
| 136 |
+
self,
|
| 137 |
+
*,
|
| 138 |
+
session_info: commons_pb2.SessionInfo | None = ...,
|
| 139 |
+
bot_params: global___BotParams | None = ...,
|
| 140 |
+
audio_stream_config: global___AudioStreamConfig | None = ...,
|
| 141 |
+
) -> None: ...
|
| 142 |
+
def HasField(self, field_name: typing.Literal["audio_stream_config", b"audio_stream_config", "bot_params", b"bot_params", "session_info", b"session_info", "stream_config", b"stream_config"]) -> builtins.bool: ...
|
| 143 |
+
def ClearField(self, field_name: typing.Literal["audio_stream_config", b"audio_stream_config", "bot_params", b"bot_params", "session_info", b"session_info", "stream_config", b"stream_config"]) -> None: ...
|
| 144 |
+
def WhichOneof(self, oneof_group: typing.Literal["stream_config", b"stream_config"]) -> typing.Literal["audio_stream_config"] | None: ...
|
| 145 |
+
|
| 146 |
+
global___ChatParams = ChatParams
|
| 147 |
+
|
| 148 |
+
@typing.final
|
| 149 |
+
class InitStatus(google.protobuf.message.Message):
|
| 150 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 151 |
+
|
| 152 |
+
SUCCESS_FIELD_NUMBER: builtins.int
|
| 153 |
+
success: builtins.bool
|
| 154 |
+
def __init__(
|
| 155 |
+
self,
|
| 156 |
+
*,
|
| 157 |
+
success: builtins.bool = ...,
|
| 158 |
+
) -> None: ...
|
| 159 |
+
def ClearField(self, field_name: typing.Literal["success", b"success"]) -> None: ...
|
| 160 |
+
|
| 161 |
+
global___InitStatus = InitStatus
|
| 162 |
+
|
| 163 |
+
@typing.final
|
| 164 |
+
class StartRequest(google.protobuf.message.Message):
|
| 165 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 166 |
+
|
| 167 |
+
SESSION_INFO_FIELD_NUMBER: builtins.int
|
| 168 |
+
@property
|
| 169 |
+
def session_info(self) -> commons_pb2.SessionInfo: ...
|
| 170 |
+
def __init__(
|
| 171 |
+
self,
|
| 172 |
+
*,
|
| 173 |
+
session_info: commons_pb2.SessionInfo | None = ...,
|
| 174 |
+
) -> None: ...
|
| 175 |
+
def HasField(self, field_name: typing.Literal["session_info", b"session_info"]) -> builtins.bool: ...
|
| 176 |
+
def ClearField(self, field_name: typing.Literal["session_info", b"session_info"]) -> None: ...
|
| 177 |
+
|
| 178 |
+
global___StartRequest = StartRequest
|
| 179 |
+
|
| 180 |
+
@typing.final
|
| 181 |
+
class StopRequest(google.protobuf.message.Message):
|
| 182 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 183 |
+
|
| 184 |
+
SESSION_INFO_FIELD_NUMBER: builtins.int
|
| 185 |
+
@property
|
| 186 |
+
def session_info(self) -> commons_pb2.SessionInfo: ...
|
| 187 |
+
def __init__(
|
| 188 |
+
self,
|
| 189 |
+
*,
|
| 190 |
+
session_info: commons_pb2.SessionInfo | None = ...,
|
| 191 |
+
) -> None: ...
|
| 192 |
+
def HasField(self, field_name: typing.Literal["session_info", b"session_info"]) -> builtins.bool: ...
|
| 193 |
+
def ClearField(self, field_name: typing.Literal["session_info", b"session_info"]) -> None: ...
|
| 194 |
+
|
| 195 |
+
global___StopRequest = StopRequest
|
| 196 |
+
|
| 197 |
+
@typing.final
|
| 198 |
+
class ChatRequest(google.protobuf.message.Message):
|
| 199 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 200 |
+
|
| 201 |
+
SESSION_INFO_FIELD_NUMBER: builtins.int
|
| 202 |
+
MESSAGE_FIELD_NUMBER: builtins.int
|
| 203 |
+
message: builtins.str
|
| 204 |
+
@property
|
| 205 |
+
def session_info(self) -> commons_pb2.SessionInfo: ...
|
| 206 |
+
def __init__(
|
| 207 |
+
self,
|
| 208 |
+
*,
|
| 209 |
+
session_info: commons_pb2.SessionInfo | None = ...,
|
| 210 |
+
message: builtins.str = ...,
|
| 211 |
+
) -> None: ...
|
| 212 |
+
def HasField(self, field_name: typing.Literal["session_info", b"session_info"]) -> builtins.bool: ...
|
| 213 |
+
def ClearField(self, field_name: typing.Literal["message", b"message", "session_info", b"session_info"]) -> None: ...
|
| 214 |
+
|
| 215 |
+
global___ChatRequest = ChatRequest
|
| 216 |
+
|
| 217 |
+
@typing.final
|
| 218 |
+
class ChatResponseMetadata(google.protobuf.message.Message):
|
| 219 |
+
"""TODO: Make it streamable?"""
|
| 220 |
+
|
| 221 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 222 |
+
|
| 223 |
+
IS_FINAL_FIELD_NUMBER: builtins.int
|
| 224 |
+
CONTENT_FIELD_NUMBER: builtins.int
|
| 225 |
+
RESPONSE_TYPE_FIELD_NUMBER: builtins.int
|
| 226 |
+
ADDITIONAL_INFO_FIELD_NUMBER: builtins.int
|
| 227 |
+
is_final: builtins.bool
|
| 228 |
+
content: builtins.str
|
| 229 |
+
response_type: builtins.str
|
| 230 |
+
additional_info: builtins.str
|
| 231 |
+
"""json"""
|
| 232 |
+
def __init__(
|
| 233 |
+
self,
|
| 234 |
+
*,
|
| 235 |
+
is_final: builtins.bool = ...,
|
| 236 |
+
content: builtins.str = ...,
|
| 237 |
+
response_type: builtins.str = ...,
|
| 238 |
+
additional_info: builtins.str = ...,
|
| 239 |
+
) -> None: ...
|
| 240 |
+
def ClearField(self, field_name: typing.Literal["additional_info", b"additional_info", "content", b"content", "is_final", b"is_final", "response_type", b"response_type"]) -> None: ...
|
| 241 |
+
|
| 242 |
+
global___ChatResponseMetadata = ChatResponseMetadata
|
| 243 |
+
|
| 244 |
+
@typing.final
|
| 245 |
+
class AudioChunk(google.protobuf.message.Message):
|
| 246 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 247 |
+
|
| 248 |
+
TYPE_FIELD_NUMBER: builtins.int
|
| 249 |
+
CHUNK_FIELD_NUMBER: builtins.int
|
| 250 |
+
type: builtins.str
|
| 251 |
+
chunk: builtins.bytes
|
| 252 |
+
def __init__(
|
| 253 |
+
self,
|
| 254 |
+
*,
|
| 255 |
+
type: builtins.str = ...,
|
| 256 |
+
chunk: builtins.bytes = ...,
|
| 257 |
+
) -> None: ...
|
| 258 |
+
def ClearField(self, field_name: typing.Literal["chunk", b"chunk", "type", b"type"]) -> None: ...
|
| 259 |
+
|
| 260 |
+
global___AudioChunk = AudioChunk
|
| 261 |
+
|
| 262 |
+
@typing.final
|
| 263 |
+
class ChatResponse(google.protobuf.message.Message):
|
| 264 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 265 |
+
|
| 266 |
+
METADATA_FIELD_NUMBER: builtins.int
|
| 267 |
+
AUDIO_CHUNK_FIELD_NUMBER: builtins.int
|
| 268 |
+
@property
|
| 269 |
+
def metadata(self) -> global___ChatResponseMetadata: ...
|
| 270 |
+
@property
|
| 271 |
+
def audio_chunk(self) -> global___AudioChunk: ...
|
| 272 |
+
def __init__(
|
| 273 |
+
self,
|
| 274 |
+
*,
|
| 275 |
+
metadata: global___ChatResponseMetadata | None = ...,
|
| 276 |
+
audio_chunk: global___AudioChunk | None = ...,
|
| 277 |
+
) -> None: ...
|
| 278 |
+
def HasField(self, field_name: typing.Literal["audio_chunk", b"audio_chunk", "metadata", b"metadata", "response", b"response"]) -> builtins.bool: ...
|
| 279 |
+
def ClearField(self, field_name: typing.Literal["audio_chunk", b"audio_chunk", "metadata", b"metadata", "response", b"response"]) -> None: ...
|
| 280 |
+
def WhichOneof(self, oneof_group: typing.Literal["response", b"response"]) -> typing.Literal["metadata", "audio_chunk"] | None: ...
|
| 281 |
+
|
| 282 |
+
global___ChatResponse = ChatResponse
|
| 283 |
+
|
| 284 |
+
@typing.final
|
| 285 |
+
class ChatInfo(google.protobuf.message.Message):
|
| 286 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 287 |
+
|
| 288 |
+
INFO_FIELD_NUMBER: builtins.int
|
| 289 |
+
info: builtins.str
|
| 290 |
+
def __init__(
|
| 291 |
+
self,
|
| 292 |
+
*,
|
| 293 |
+
info: builtins.str = ...,
|
| 294 |
+
) -> None: ...
|
| 295 |
+
def ClearField(self, field_name: typing.Literal["info", b"info"]) -> None: ...
|
| 296 |
+
|
| 297 |
+
global___ChatInfo = ChatInfo
|
grpc_services/generated/flowbot_service_pb2_grpc.py
ADDED
|
@@ -0,0 +1,363 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
|
| 2 |
+
"""Client and server classes corresponding to protobuf-defined services."""
|
| 3 |
+
import grpc
|
| 4 |
+
import warnings
|
| 5 |
+
|
| 6 |
+
import grpc_services.generated.commons_pb2 as commons__pb2
|
| 7 |
+
import grpc_services.generated.flowbot_service_pb2 as flowbot__service__pb2
|
| 8 |
+
|
| 9 |
+
GRPC_GENERATED_VERSION = '1.63.0'
|
| 10 |
+
GRPC_VERSION = grpc.__version__
|
| 11 |
+
EXPECTED_ERROR_RELEASE = '1.65.0'
|
| 12 |
+
SCHEDULED_RELEASE_DATE = 'June 25, 2024'
|
| 13 |
+
_version_not_supported = False
|
| 14 |
+
|
| 15 |
+
try:
|
| 16 |
+
from grpc._utilities import first_version_is_lower
|
| 17 |
+
_version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
|
| 18 |
+
except ImportError:
|
| 19 |
+
_version_not_supported = True
|
| 20 |
+
|
| 21 |
+
if _version_not_supported:
|
| 22 |
+
warnings.warn(
|
| 23 |
+
f'The grpc package installed is at version {GRPC_VERSION},'
|
| 24 |
+
+ f' but the generated code in flowbot_service_pb2_grpc.py depends on'
|
| 25 |
+
+ f' grpcio>={GRPC_GENERATED_VERSION}.'
|
| 26 |
+
+ f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
|
| 27 |
+
+ f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
|
| 28 |
+
+ f' This warning will become an error in {EXPECTED_ERROR_RELEASE},'
|
| 29 |
+
+ f' scheduled for release on {SCHEDULED_RELEASE_DATE}.',
|
| 30 |
+
RuntimeWarning
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class FlowBotServiceStub(object):
|
| 35 |
+
"""Missing associated documentation comment in .proto file."""
|
| 36 |
+
|
| 37 |
+
def __init__(self, channel):
|
| 38 |
+
"""Constructor.
|
| 39 |
+
|
| 40 |
+
Args:
|
| 41 |
+
channel: A grpc.Channel.
|
| 42 |
+
"""
|
| 43 |
+
self.ParseBot = channel.unary_unary(
|
| 44 |
+
'/flowbot.FlowBotService/ParseBot',
|
| 45 |
+
request_serializer=flowbot__service__pb2.BotScript.SerializeToString,
|
| 46 |
+
response_deserializer=flowbot__service__pb2.BotConfig.FromString,
|
| 47 |
+
_registered_method=True)
|
| 48 |
+
self.CreateBot = channel.unary_unary(
|
| 49 |
+
'/flowbot.FlowBotService/CreateBot',
|
| 50 |
+
request_serializer=flowbot__service__pb2.BotConfig.SerializeToString,
|
| 51 |
+
response_deserializer=flowbot__service__pb2.ChatParams.FromString,
|
| 52 |
+
_registered_method=True)
|
| 53 |
+
self.InitBot = channel.unary_unary(
|
| 54 |
+
'/flowbot.FlowBotService/InitBot',
|
| 55 |
+
request_serializer=flowbot__service__pb2.ChatParams.SerializeToString,
|
| 56 |
+
response_deserializer=flowbot__service__pb2.InitStatus.FromString,
|
| 57 |
+
_registered_method=True)
|
| 58 |
+
self.StartConversation = channel.unary_stream(
|
| 59 |
+
'/flowbot.FlowBotService/StartConversation',
|
| 60 |
+
request_serializer=flowbot__service__pb2.StartRequest.SerializeToString,
|
| 61 |
+
response_deserializer=flowbot__service__pb2.ChatResponse.FromString,
|
| 62 |
+
_registered_method=True)
|
| 63 |
+
self.StopConversation = channel.unary_unary(
|
| 64 |
+
'/flowbot.FlowBotService/StopConversation',
|
| 65 |
+
request_serializer=flowbot__service__pb2.StopRequest.SerializeToString,
|
| 66 |
+
response_deserializer=flowbot__service__pb2.ChatInfo.FromString,
|
| 67 |
+
_registered_method=True)
|
| 68 |
+
self.Chat = channel.unary_stream(
|
| 69 |
+
'/flowbot.FlowBotService/Chat',
|
| 70 |
+
request_serializer=flowbot__service__pb2.ChatRequest.SerializeToString,
|
| 71 |
+
response_deserializer=flowbot__service__pb2.ChatResponse.FromString,
|
| 72 |
+
_registered_method=True)
|
| 73 |
+
self.ExtractInfo = channel.unary_unary(
|
| 74 |
+
'/flowbot.FlowBotService/ExtractInfo',
|
| 75 |
+
request_serializer=commons__pb2.SessionInfo.SerializeToString,
|
| 76 |
+
response_deserializer=flowbot__service__pb2.ChatInfo.FromString,
|
| 77 |
+
_registered_method=True)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
class FlowBotServiceServicer(object):
|
| 81 |
+
"""Missing associated documentation comment in .proto file."""
|
| 82 |
+
|
| 83 |
+
def ParseBot(self, request, context):
|
| 84 |
+
"""Parse bot: from csv to json
|
| 85 |
+
"""
|
| 86 |
+
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
| 87 |
+
context.set_details('Method not implemented!')
|
| 88 |
+
raise NotImplementedError('Method not implemented!')
|
| 89 |
+
|
| 90 |
+
def CreateBot(self, request, context):
|
| 91 |
+
"""Create dialog
|
| 92 |
+
"""
|
| 93 |
+
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
| 94 |
+
context.set_details('Method not implemented!')
|
| 95 |
+
raise NotImplementedError('Method not implemented!')
|
| 96 |
+
|
| 97 |
+
def InitBot(self, request, context):
|
| 98 |
+
"""Init bot: Update variables, create dialog
|
| 99 |
+
"""
|
| 100 |
+
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
| 101 |
+
context.set_details('Method not implemented!')
|
| 102 |
+
raise NotImplementedError('Method not implemented!')
|
| 103 |
+
|
| 104 |
+
def StartConversation(self, request, context):
|
| 105 |
+
"""Missing associated documentation comment in .proto file."""
|
| 106 |
+
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
| 107 |
+
context.set_details('Method not implemented!')
|
| 108 |
+
raise NotImplementedError('Method not implemented!')
|
| 109 |
+
|
| 110 |
+
def StopConversation(self, request, context):
|
| 111 |
+
"""Missing associated documentation comment in .proto file."""
|
| 112 |
+
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
| 113 |
+
context.set_details('Method not implemented!')
|
| 114 |
+
raise NotImplementedError('Method not implemented!')
|
| 115 |
+
|
| 116 |
+
def Chat(self, request, context):
|
| 117 |
+
"""Missing associated documentation comment in .proto file."""
|
| 118 |
+
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
| 119 |
+
context.set_details('Method not implemented!')
|
| 120 |
+
raise NotImplementedError('Method not implemented!')
|
| 121 |
+
|
| 122 |
+
def ExtractInfo(self, request, context):
|
| 123 |
+
"""Missing associated documentation comment in .proto file."""
|
| 124 |
+
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
| 125 |
+
context.set_details('Method not implemented!')
|
| 126 |
+
raise NotImplementedError('Method not implemented!')
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
def add_FlowBotServiceServicer_to_server(servicer, server):
|
| 130 |
+
rpc_method_handlers = {
|
| 131 |
+
'ParseBot': grpc.unary_unary_rpc_method_handler(
|
| 132 |
+
servicer.ParseBot,
|
| 133 |
+
request_deserializer=flowbot__service__pb2.BotScript.FromString,
|
| 134 |
+
response_serializer=flowbot__service__pb2.BotConfig.SerializeToString,
|
| 135 |
+
),
|
| 136 |
+
'CreateBot': grpc.unary_unary_rpc_method_handler(
|
| 137 |
+
servicer.CreateBot,
|
| 138 |
+
request_deserializer=flowbot__service__pb2.BotConfig.FromString,
|
| 139 |
+
response_serializer=flowbot__service__pb2.ChatParams.SerializeToString,
|
| 140 |
+
),
|
| 141 |
+
'InitBot': grpc.unary_unary_rpc_method_handler(
|
| 142 |
+
servicer.InitBot,
|
| 143 |
+
request_deserializer=flowbot__service__pb2.ChatParams.FromString,
|
| 144 |
+
response_serializer=flowbot__service__pb2.InitStatus.SerializeToString,
|
| 145 |
+
),
|
| 146 |
+
'StartConversation': grpc.unary_stream_rpc_method_handler(
|
| 147 |
+
servicer.StartConversation,
|
| 148 |
+
request_deserializer=flowbot__service__pb2.StartRequest.FromString,
|
| 149 |
+
response_serializer=flowbot__service__pb2.ChatResponse.SerializeToString,
|
| 150 |
+
),
|
| 151 |
+
'StopConversation': grpc.unary_unary_rpc_method_handler(
|
| 152 |
+
servicer.StopConversation,
|
| 153 |
+
request_deserializer=flowbot__service__pb2.StopRequest.FromString,
|
| 154 |
+
response_serializer=flowbot__service__pb2.ChatInfo.SerializeToString,
|
| 155 |
+
),
|
| 156 |
+
'Chat': grpc.unary_stream_rpc_method_handler(
|
| 157 |
+
servicer.Chat,
|
| 158 |
+
request_deserializer=flowbot__service__pb2.ChatRequest.FromString,
|
| 159 |
+
response_serializer=flowbot__service__pb2.ChatResponse.SerializeToString,
|
| 160 |
+
),
|
| 161 |
+
'ExtractInfo': grpc.unary_unary_rpc_method_handler(
|
| 162 |
+
servicer.ExtractInfo,
|
| 163 |
+
request_deserializer=commons__pb2.SessionInfo.FromString,
|
| 164 |
+
response_serializer=flowbot__service__pb2.ChatInfo.SerializeToString,
|
| 165 |
+
),
|
| 166 |
+
}
|
| 167 |
+
generic_handler = grpc.method_handlers_generic_handler(
|
| 168 |
+
'flowbot.FlowBotService', rpc_method_handlers)
|
| 169 |
+
server.add_generic_rpc_handlers((generic_handler,))
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
# This class is part of an EXPERIMENTAL API.
|
| 173 |
+
class FlowBotService(object):
|
| 174 |
+
"""Missing associated documentation comment in .proto file."""
|
| 175 |
+
|
| 176 |
+
@staticmethod
|
| 177 |
+
def ParseBot(request,
|
| 178 |
+
target,
|
| 179 |
+
options=(),
|
| 180 |
+
channel_credentials=None,
|
| 181 |
+
call_credentials=None,
|
| 182 |
+
insecure=False,
|
| 183 |
+
compression=None,
|
| 184 |
+
wait_for_ready=None,
|
| 185 |
+
timeout=None,
|
| 186 |
+
metadata=None):
|
| 187 |
+
return grpc.experimental.unary_unary(
|
| 188 |
+
request,
|
| 189 |
+
target,
|
| 190 |
+
'/flowbot.FlowBotService/ParseBot',
|
| 191 |
+
flowbot__service__pb2.BotScript.SerializeToString,
|
| 192 |
+
flowbot__service__pb2.BotConfig.FromString,
|
| 193 |
+
options,
|
| 194 |
+
channel_credentials,
|
| 195 |
+
insecure,
|
| 196 |
+
call_credentials,
|
| 197 |
+
compression,
|
| 198 |
+
wait_for_ready,
|
| 199 |
+
timeout,
|
| 200 |
+
metadata,
|
| 201 |
+
_registered_method=True)
|
| 202 |
+
|
| 203 |
+
@staticmethod
|
| 204 |
+
def CreateBot(request,
|
| 205 |
+
target,
|
| 206 |
+
options=(),
|
| 207 |
+
channel_credentials=None,
|
| 208 |
+
call_credentials=None,
|
| 209 |
+
insecure=False,
|
| 210 |
+
compression=None,
|
| 211 |
+
wait_for_ready=None,
|
| 212 |
+
timeout=None,
|
| 213 |
+
metadata=None):
|
| 214 |
+
return grpc.experimental.unary_unary(
|
| 215 |
+
request,
|
| 216 |
+
target,
|
| 217 |
+
'/flowbot.FlowBotService/CreateBot',
|
| 218 |
+
flowbot__service__pb2.BotConfig.SerializeToString,
|
| 219 |
+
flowbot__service__pb2.ChatParams.FromString,
|
| 220 |
+
options,
|
| 221 |
+
channel_credentials,
|
| 222 |
+
insecure,
|
| 223 |
+
call_credentials,
|
| 224 |
+
compression,
|
| 225 |
+
wait_for_ready,
|
| 226 |
+
timeout,
|
| 227 |
+
metadata,
|
| 228 |
+
_registered_method=True)
|
| 229 |
+
|
| 230 |
+
@staticmethod
|
| 231 |
+
def InitBot(request,
|
| 232 |
+
target,
|
| 233 |
+
options=(),
|
| 234 |
+
channel_credentials=None,
|
| 235 |
+
call_credentials=None,
|
| 236 |
+
insecure=False,
|
| 237 |
+
compression=None,
|
| 238 |
+
wait_for_ready=None,
|
| 239 |
+
timeout=None,
|
| 240 |
+
metadata=None):
|
| 241 |
+
return grpc.experimental.unary_unary(
|
| 242 |
+
request,
|
| 243 |
+
target,
|
| 244 |
+
'/flowbot.FlowBotService/InitBot',
|
| 245 |
+
flowbot__service__pb2.ChatParams.SerializeToString,
|
| 246 |
+
flowbot__service__pb2.InitStatus.FromString,
|
| 247 |
+
options,
|
| 248 |
+
channel_credentials,
|
| 249 |
+
insecure,
|
| 250 |
+
call_credentials,
|
| 251 |
+
compression,
|
| 252 |
+
wait_for_ready,
|
| 253 |
+
timeout,
|
| 254 |
+
metadata,
|
| 255 |
+
_registered_method=True)
|
| 256 |
+
|
| 257 |
+
@staticmethod
|
| 258 |
+
def StartConversation(request,
|
| 259 |
+
target,
|
| 260 |
+
options=(),
|
| 261 |
+
channel_credentials=None,
|
| 262 |
+
call_credentials=None,
|
| 263 |
+
insecure=False,
|
| 264 |
+
compression=None,
|
| 265 |
+
wait_for_ready=None,
|
| 266 |
+
timeout=None,
|
| 267 |
+
metadata=None):
|
| 268 |
+
return grpc.experimental.unary_stream(
|
| 269 |
+
request,
|
| 270 |
+
target,
|
| 271 |
+
'/flowbot.FlowBotService/StartConversation',
|
| 272 |
+
flowbot__service__pb2.StartRequest.SerializeToString,
|
| 273 |
+
flowbot__service__pb2.ChatResponse.FromString,
|
| 274 |
+
options,
|
| 275 |
+
channel_credentials,
|
| 276 |
+
insecure,
|
| 277 |
+
call_credentials,
|
| 278 |
+
compression,
|
| 279 |
+
wait_for_ready,
|
| 280 |
+
timeout,
|
| 281 |
+
metadata,
|
| 282 |
+
_registered_method=True)
|
| 283 |
+
|
| 284 |
+
@staticmethod
|
| 285 |
+
def StopConversation(request,
|
| 286 |
+
target,
|
| 287 |
+
options=(),
|
| 288 |
+
channel_credentials=None,
|
| 289 |
+
call_credentials=None,
|
| 290 |
+
insecure=False,
|
| 291 |
+
compression=None,
|
| 292 |
+
wait_for_ready=None,
|
| 293 |
+
timeout=None,
|
| 294 |
+
metadata=None):
|
| 295 |
+
return grpc.experimental.unary_unary(
|
| 296 |
+
request,
|
| 297 |
+
target,
|
| 298 |
+
'/flowbot.FlowBotService/StopConversation',
|
| 299 |
+
flowbot__service__pb2.StopRequest.SerializeToString,
|
| 300 |
+
flowbot__service__pb2.ChatInfo.FromString,
|
| 301 |
+
options,
|
| 302 |
+
channel_credentials,
|
| 303 |
+
insecure,
|
| 304 |
+
call_credentials,
|
| 305 |
+
compression,
|
| 306 |
+
wait_for_ready,
|
| 307 |
+
timeout,
|
| 308 |
+
metadata,
|
| 309 |
+
_registered_method=True)
|
| 310 |
+
|
| 311 |
+
@staticmethod
|
| 312 |
+
def Chat(request,
|
| 313 |
+
target,
|
| 314 |
+
options=(),
|
| 315 |
+
channel_credentials=None,
|
| 316 |
+
call_credentials=None,
|
| 317 |
+
insecure=False,
|
| 318 |
+
compression=None,
|
| 319 |
+
wait_for_ready=None,
|
| 320 |
+
timeout=None,
|
| 321 |
+
metadata=None):
|
| 322 |
+
return grpc.experimental.unary_stream(
|
| 323 |
+
request,
|
| 324 |
+
target,
|
| 325 |
+
'/flowbot.FlowBotService/Chat',
|
| 326 |
+
flowbot__service__pb2.ChatRequest.SerializeToString,
|
| 327 |
+
flowbot__service__pb2.ChatResponse.FromString,
|
| 328 |
+
options,
|
| 329 |
+
channel_credentials,
|
| 330 |
+
insecure,
|
| 331 |
+
call_credentials,
|
| 332 |
+
compression,
|
| 333 |
+
wait_for_ready,
|
| 334 |
+
timeout,
|
| 335 |
+
metadata,
|
| 336 |
+
_registered_method=True)
|
| 337 |
+
|
| 338 |
+
@staticmethod
|
| 339 |
+
def ExtractInfo(request,
|
| 340 |
+
target,
|
| 341 |
+
options=(),
|
| 342 |
+
channel_credentials=None,
|
| 343 |
+
call_credentials=None,
|
| 344 |
+
insecure=False,
|
| 345 |
+
compression=None,
|
| 346 |
+
wait_for_ready=None,
|
| 347 |
+
timeout=None,
|
| 348 |
+
metadata=None):
|
| 349 |
+
return grpc.experimental.unary_unary(
|
| 350 |
+
request,
|
| 351 |
+
target,
|
| 352 |
+
'/flowbot.FlowBotService/ExtractInfo',
|
| 353 |
+
commons__pb2.SessionInfo.SerializeToString,
|
| 354 |
+
flowbot__service__pb2.ChatInfo.FromString,
|
| 355 |
+
options,
|
| 356 |
+
channel_credentials,
|
| 357 |
+
insecure,
|
| 358 |
+
call_credentials,
|
| 359 |
+
compression,
|
| 360 |
+
wait_for_ready,
|
| 361 |
+
timeout,
|
| 362 |
+
metadata,
|
| 363 |
+
_registered_method=True)
|
grpc_services/generated/question_gen_service_pb2.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
| 3 |
+
# source: question_gen_service.proto
|
| 4 |
+
# Protobuf Python Version: 5.26.1
|
| 5 |
+
"""Generated protocol buffer code."""
|
| 6 |
+
from google.protobuf import descriptor as _descriptor
|
| 7 |
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
| 8 |
+
from google.protobuf import symbol_database as _symbol_database
|
| 9 |
+
from google.protobuf.internal import builder as _builder
|
| 10 |
+
# @@protoc_insertion_point(imports)
|
| 11 |
+
|
| 12 |
+
_sym_db = _symbol_database.Default()
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
import grpc_services.generated.commons_pb2 as commons__pb2
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1aquestion_gen_service.proto\x12\x0cquestion_gen\x1a\rcommons.proto\"4\n\rQAExplanation\x12\x0e\n\x06option\x18\x01 \x01(\t\x12\x13\n\x0b\x65xplanation\x18\x02 \x01(\t\"\xb4\x01\n\x06QAItem\x12+\n\rquestion_type\x18\x02 \x01(\x0e\x32\x14.question_gen.QAType\x12\x10\n\x08question\x18\x03 \x01(\t\x12\x16\n\x0e\x63orrect_answer\x18\x04 \x01(\t\x12\x18\n\x10relevant_context\x18\x05 \x01(\t\x12\x39\n\x14question_explanation\x18\x06 \x03(\x0b\x32\x1b.question_gen.QAExplanation\"]\n\x05\x46ield\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12 \n\x04type\x18\x03 \x01(\x0e\x32\x12.question_gen.Type\x12\x0f\n\x07\x64\x65\x66\x61ult\x18\x04 \x01(\t\"R\n\tModelArgs\x12\x12\n\nmodel_name\x18\x01 \x01(\t\x12\x13\n\x0btemperature\x18\x02 \x01(\x02\x12\r\n\x05top_k\x18\x03 \x01(\x05\x12\r\n\x05top_p\x18\x04 \x01(\x02\"G\n\x10ModelInfoRequest\x12\x33\n\x0emodel_supplier\x18\x01 \x01(\x0e\x32\x1b.question_gen.ModelSupplier\"\'\n\x11ModelInfoResponse\x12\x12\n\nmodel_name\x18\x01 \x03(\t\"B\n\x13QuestionTypeRequest\x12+\n\rquestion_type\x18\x01 \x01(\x0e\x32\x14.question_gen.QAType\";\n\x14QuestionTypeResponse\x12#\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x13.question_gen.Field\"\xb2\x02\n\x12QuestionGenRequest\x12*\n\x0csession_info\x18\x01 \x01(\x0b\x32\x14.commons.SessionInfo\x12\x13\n\x0bquestion_id\x18\x02 \x01(\t\x12\x33\n\x0emodel_supplier\x18\x03 \x01(\x0e\x32\x1b.question_gen.ModelSupplier\x12+\n\nmodel_args\x18\x04 \x01(\x0b\x32\x17.question_gen.ModelArgs\x12\x15\n\rquestion_args\x18\x05 \x01(\t\x12\x15\n\rnum_questions\x18\x06 \x01(\x05\x12%\n\x07qa_item\x18\x07 \x01(\x0b\x32\x14.question_gen.QAItem\x12$\n\x06\x61\x63tion\x18\x08 \x01(\x0e\x32\x14.question_gen.Action\"Z\n\x1bQuestionGenResponseMetadata\x12*\n\x0csession_info\x18\x01 \x01(\x0b\x32\x14.commons.SessionInfo\x12\x0f\n\x07success\x18\x02 \x01(\x08\"\x81\x01\n\x17QuestionGenResponseData\x12*\n\x0csession_info\x18\x01 \x01(\x0b\x32\x14.commons.SessionInfo\x12\x13\n\x0bquestion_id\x18\x02 \x01(\t\x12%\n\x07qa_item\x18\x03 \x01(\x0b\x32\x14.question_gen.QAItem\"\xa4\x01\n\x13QuestionGenResponse\x12=\n\x08metadata\x18\x01 \x01(\x0b\x32).question_gen.QuestionGenResponseMetadataH\x00\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.question_gen.QuestionGenResponseDataH\x00\x42\x17\n\x15question_gen_response*;\n\x06QAType\x12\n\n\x06YES_NO\x10\x00\x12\x13\n\x0fMULTIPLE_CHOICE\x10\x01\x12\x10\n\x0cSHORT_ANSWER\x10\x02*:\n\x06\x41\x63tion\x12\n\n\x06\x43REATE\x10\x00\x12\x0e\n\nREGENERATE\x10\x01\x12\n\n\x06\x44\x45LETE\x10\x02\x12\x08\n\x04LIST\x10\x03*z\n\x04Type\x12\x12\n\x0eLIST_OF_STRING\x10\x00\x12\x0f\n\x0bLIST_OF_INT\x10\x01\x12\x11\n\rLIST_OF_FLOAT\x10\x02\x12\x10\n\x0cLIST_OF_BOOL\x10\x03\x12\n\n\x06STRING\x10\x04\x12\x07\n\x03INT\x10\x05\x12\t\n\x05\x46LOAT\x10\x06\x12\x08\n\x04\x42OOL\x10\x07*\x1b\n\rModelSupplier\x12\n\n\x06OPENAI\x10\x00\x32\xa2\x02\n\x12QuestionGenService\x12Q\n\x0cGetModelInfo\x12\x1e.question_gen.ModelInfoRequest\x1a\x1f.question_gen.ModelInfoResponse\"\x00\x12^\n\x13GetQuestionTypeInfo\x12!.question_gen.QuestionTypeRequest\x1a\".question_gen.QuestionTypeResponse\"\x00\x12Y\n\x0e\x43reateQuestion\x12 .question_gen.QuestionGenRequest\x1a!.question_gen.QuestionGenResponse\"\x00\x30\x01\x62\x06proto3')
|
| 19 |
+
|
| 20 |
+
_globals = globals()
|
| 21 |
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
| 22 |
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'question_gen_service_pb2', _globals)
|
| 23 |
+
if not _descriptor._USE_C_DESCRIPTORS:
|
| 24 |
+
DESCRIPTOR._loaded_options = None
|
| 25 |
+
_globals['_QATYPE']._serialized_start=1418
|
| 26 |
+
_globals['_QATYPE']._serialized_end=1477
|
| 27 |
+
_globals['_ACTION']._serialized_start=1479
|
| 28 |
+
_globals['_ACTION']._serialized_end=1537
|
| 29 |
+
_globals['_TYPE']._serialized_start=1539
|
| 30 |
+
_globals['_TYPE']._serialized_end=1661
|
| 31 |
+
_globals['_MODELSUPPLIER']._serialized_start=1663
|
| 32 |
+
_globals['_MODELSUPPLIER']._serialized_end=1690
|
| 33 |
+
_globals['_QAEXPLANATION']._serialized_start=59
|
| 34 |
+
_globals['_QAEXPLANATION']._serialized_end=111
|
| 35 |
+
_globals['_QAITEM']._serialized_start=114
|
| 36 |
+
_globals['_QAITEM']._serialized_end=294
|
| 37 |
+
_globals['_FIELD']._serialized_start=296
|
| 38 |
+
_globals['_FIELD']._serialized_end=389
|
| 39 |
+
_globals['_MODELARGS']._serialized_start=391
|
| 40 |
+
_globals['_MODELARGS']._serialized_end=473
|
| 41 |
+
_globals['_MODELINFOREQUEST']._serialized_start=475
|
| 42 |
+
_globals['_MODELINFOREQUEST']._serialized_end=546
|
| 43 |
+
_globals['_MODELINFORESPONSE']._serialized_start=548
|
| 44 |
+
_globals['_MODELINFORESPONSE']._serialized_end=587
|
| 45 |
+
_globals['_QUESTIONTYPEREQUEST']._serialized_start=589
|
| 46 |
+
_globals['_QUESTIONTYPEREQUEST']._serialized_end=655
|
| 47 |
+
_globals['_QUESTIONTYPERESPONSE']._serialized_start=657
|
| 48 |
+
_globals['_QUESTIONTYPERESPONSE']._serialized_end=716
|
| 49 |
+
_globals['_QUESTIONGENREQUEST']._serialized_start=719
|
| 50 |
+
_globals['_QUESTIONGENREQUEST']._serialized_end=1025
|
| 51 |
+
_globals['_QUESTIONGENRESPONSEMETADATA']._serialized_start=1027
|
| 52 |
+
_globals['_QUESTIONGENRESPONSEMETADATA']._serialized_end=1117
|
| 53 |
+
_globals['_QUESTIONGENRESPONSEDATA']._serialized_start=1120
|
| 54 |
+
_globals['_QUESTIONGENRESPONSEDATA']._serialized_end=1249
|
| 55 |
+
_globals['_QUESTIONGENRESPONSE']._serialized_start=1252
|
| 56 |
+
_globals['_QUESTIONGENRESPONSE']._serialized_end=1416
|
| 57 |
+
_globals['_QUESTIONGENSERVICE']._serialized_start=1693
|
| 58 |
+
_globals['_QUESTIONGENSERVICE']._serialized_end=1983
|
| 59 |
+
# @@protoc_insertion_point(module_scope)
|
grpc_services/generated/question_gen_service_pb2.pyi
ADDED
|
@@ -0,0 +1,366 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
@generated by mypy-protobuf. Do not edit manually!
|
| 3 |
+
isort:skip_file
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import builtins
|
| 7 |
+
import collections.abc
|
| 8 |
+
import grpc_services.generated.commons_pb2 as commons_pb2
|
| 9 |
+
import google.protobuf.descriptor
|
| 10 |
+
import google.protobuf.internal.containers
|
| 11 |
+
import google.protobuf.internal.enum_type_wrapper
|
| 12 |
+
import google.protobuf.message
|
| 13 |
+
import sys
|
| 14 |
+
import typing
|
| 15 |
+
|
| 16 |
+
if sys.version_info >= (3, 10):
|
| 17 |
+
import typing as typing_extensions
|
| 18 |
+
else:
|
| 19 |
+
import typing_extensions
|
| 20 |
+
|
| 21 |
+
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
|
| 22 |
+
|
| 23 |
+
class _QAType:
|
| 24 |
+
ValueType = typing.NewType("ValueType", builtins.int)
|
| 25 |
+
V: typing_extensions.TypeAlias = ValueType
|
| 26 |
+
|
| 27 |
+
class _QATypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_QAType.ValueType], builtins.type):
|
| 28 |
+
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
|
| 29 |
+
YES_NO: _QAType.ValueType # 0
|
| 30 |
+
MULTIPLE_CHOICE: _QAType.ValueType # 1
|
| 31 |
+
SHORT_ANSWER: _QAType.ValueType # 2
|
| 32 |
+
|
| 33 |
+
class QAType(_QAType, metaclass=_QATypeEnumTypeWrapper): ...
|
| 34 |
+
|
| 35 |
+
YES_NO: QAType.ValueType # 0
|
| 36 |
+
MULTIPLE_CHOICE: QAType.ValueType # 1
|
| 37 |
+
SHORT_ANSWER: QAType.ValueType # 2
|
| 38 |
+
global___QAType = QAType
|
| 39 |
+
|
| 40 |
+
class _Action:
|
| 41 |
+
ValueType = typing.NewType("ValueType", builtins.int)
|
| 42 |
+
V: typing_extensions.TypeAlias = ValueType
|
| 43 |
+
|
| 44 |
+
class _ActionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_Action.ValueType], builtins.type):
|
| 45 |
+
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
|
| 46 |
+
CREATE: _Action.ValueType # 0
|
| 47 |
+
REGENERATE: _Action.ValueType # 1
|
| 48 |
+
DELETE: _Action.ValueType # 2
|
| 49 |
+
LIST: _Action.ValueType # 3
|
| 50 |
+
|
| 51 |
+
class Action(_Action, metaclass=_ActionEnumTypeWrapper): ...
|
| 52 |
+
|
| 53 |
+
CREATE: Action.ValueType # 0
|
| 54 |
+
REGENERATE: Action.ValueType # 1
|
| 55 |
+
DELETE: Action.ValueType # 2
|
| 56 |
+
LIST: Action.ValueType # 3
|
| 57 |
+
global___Action = Action
|
| 58 |
+
|
| 59 |
+
class _Type:
|
| 60 |
+
ValueType = typing.NewType("ValueType", builtins.int)
|
| 61 |
+
V: typing_extensions.TypeAlias = ValueType
|
| 62 |
+
|
| 63 |
+
class _TypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_Type.ValueType], builtins.type):
|
| 64 |
+
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
|
| 65 |
+
LIST_OF_STRING: _Type.ValueType # 0
|
| 66 |
+
LIST_OF_INT: _Type.ValueType # 1
|
| 67 |
+
LIST_OF_FLOAT: _Type.ValueType # 2
|
| 68 |
+
LIST_OF_BOOL: _Type.ValueType # 3
|
| 69 |
+
STRING: _Type.ValueType # 4
|
| 70 |
+
INT: _Type.ValueType # 5
|
| 71 |
+
FLOAT: _Type.ValueType # 6
|
| 72 |
+
BOOL: _Type.ValueType # 7
|
| 73 |
+
|
| 74 |
+
class Type(_Type, metaclass=_TypeEnumTypeWrapper): ...
|
| 75 |
+
|
| 76 |
+
LIST_OF_STRING: Type.ValueType # 0
|
| 77 |
+
LIST_OF_INT: Type.ValueType # 1
|
| 78 |
+
LIST_OF_FLOAT: Type.ValueType # 2
|
| 79 |
+
LIST_OF_BOOL: Type.ValueType # 3
|
| 80 |
+
STRING: Type.ValueType # 4
|
| 81 |
+
INT: Type.ValueType # 5
|
| 82 |
+
FLOAT: Type.ValueType # 6
|
| 83 |
+
BOOL: Type.ValueType # 7
|
| 84 |
+
global___Type = Type
|
| 85 |
+
|
| 86 |
+
class _ModelSupplier:
|
| 87 |
+
ValueType = typing.NewType("ValueType", builtins.int)
|
| 88 |
+
V: typing_extensions.TypeAlias = ValueType
|
| 89 |
+
|
| 90 |
+
class _ModelSupplierEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ModelSupplier.ValueType], builtins.type):
|
| 91 |
+
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
|
| 92 |
+
OPENAI: _ModelSupplier.ValueType # 0
|
| 93 |
+
|
| 94 |
+
class ModelSupplier(_ModelSupplier, metaclass=_ModelSupplierEnumTypeWrapper): ...
|
| 95 |
+
|
| 96 |
+
OPENAI: ModelSupplier.ValueType # 0
|
| 97 |
+
global___ModelSupplier = ModelSupplier
|
| 98 |
+
|
| 99 |
+
@typing.final
|
| 100 |
+
class QAExplanation(google.protobuf.message.Message):
|
| 101 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 102 |
+
|
| 103 |
+
OPTION_FIELD_NUMBER: builtins.int
|
| 104 |
+
EXPLANATION_FIELD_NUMBER: builtins.int
|
| 105 |
+
option: builtins.str
|
| 106 |
+
explanation: builtins.str
|
| 107 |
+
def __init__(
|
| 108 |
+
self,
|
| 109 |
+
*,
|
| 110 |
+
option: builtins.str = ...,
|
| 111 |
+
explanation: builtins.str = ...,
|
| 112 |
+
) -> None: ...
|
| 113 |
+
def ClearField(self, field_name: typing.Literal["explanation", b"explanation", "option", b"option"]) -> None: ...
|
| 114 |
+
|
| 115 |
+
global___QAExplanation = QAExplanation
|
| 116 |
+
|
| 117 |
+
@typing.final
|
| 118 |
+
class QAItem(google.protobuf.message.Message):
|
| 119 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 120 |
+
|
| 121 |
+
QUESTION_TYPE_FIELD_NUMBER: builtins.int
|
| 122 |
+
QUESTION_FIELD_NUMBER: builtins.int
|
| 123 |
+
CORRECT_ANSWER_FIELD_NUMBER: builtins.int
|
| 124 |
+
RELEVANT_CONTEXT_FIELD_NUMBER: builtins.int
|
| 125 |
+
QUESTION_EXPLANATION_FIELD_NUMBER: builtins.int
|
| 126 |
+
question_type: global___QAType.ValueType
|
| 127 |
+
question: builtins.str
|
| 128 |
+
correct_answer: builtins.str
|
| 129 |
+
relevant_context: builtins.str
|
| 130 |
+
@property
|
| 131 |
+
def question_explanation(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___QAExplanation]: ...
|
| 132 |
+
def __init__(
|
| 133 |
+
self,
|
| 134 |
+
*,
|
| 135 |
+
question_type: global___QAType.ValueType = ...,
|
| 136 |
+
question: builtins.str = ...,
|
| 137 |
+
correct_answer: builtins.str = ...,
|
| 138 |
+
relevant_context: builtins.str = ...,
|
| 139 |
+
question_explanation: collections.abc.Iterable[global___QAExplanation] | None = ...,
|
| 140 |
+
) -> None: ...
|
| 141 |
+
def ClearField(self, field_name: typing.Literal["correct_answer", b"correct_answer", "question", b"question", "question_explanation", b"question_explanation", "question_type", b"question_type", "relevant_context", b"relevant_context"]) -> None: ...
|
| 142 |
+
|
| 143 |
+
global___QAItem = QAItem
|
| 144 |
+
|
| 145 |
+
@typing.final
|
| 146 |
+
class Field(google.protobuf.message.Message):
|
| 147 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 148 |
+
|
| 149 |
+
NAME_FIELD_NUMBER: builtins.int
|
| 150 |
+
DESCRIPTION_FIELD_NUMBER: builtins.int
|
| 151 |
+
TYPE_FIELD_NUMBER: builtins.int
|
| 152 |
+
DEFAULT_FIELD_NUMBER: builtins.int
|
| 153 |
+
name: builtins.str
|
| 154 |
+
description: builtins.str
|
| 155 |
+
type: global___Type.ValueType
|
| 156 |
+
default: builtins.str
|
| 157 |
+
"""json"""
|
| 158 |
+
def __init__(
|
| 159 |
+
self,
|
| 160 |
+
*,
|
| 161 |
+
name: builtins.str = ...,
|
| 162 |
+
description: builtins.str = ...,
|
| 163 |
+
type: global___Type.ValueType = ...,
|
| 164 |
+
default: builtins.str = ...,
|
| 165 |
+
) -> None: ...
|
| 166 |
+
def ClearField(self, field_name: typing.Literal["default", b"default", "description", b"description", "name", b"name", "type", b"type"]) -> None: ...
|
| 167 |
+
|
| 168 |
+
global___Field = Field
|
| 169 |
+
|
| 170 |
+
@typing.final
|
| 171 |
+
class ModelArgs(google.protobuf.message.Message):
|
| 172 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 173 |
+
|
| 174 |
+
MODEL_NAME_FIELD_NUMBER: builtins.int
|
| 175 |
+
TEMPERATURE_FIELD_NUMBER: builtins.int
|
| 176 |
+
TOP_K_FIELD_NUMBER: builtins.int
|
| 177 |
+
TOP_P_FIELD_NUMBER: builtins.int
|
| 178 |
+
model_name: builtins.str
|
| 179 |
+
temperature: builtins.float
|
| 180 |
+
top_k: builtins.int
|
| 181 |
+
top_p: builtins.float
|
| 182 |
+
def __init__(
|
| 183 |
+
self,
|
| 184 |
+
*,
|
| 185 |
+
model_name: builtins.str = ...,
|
| 186 |
+
temperature: builtins.float = ...,
|
| 187 |
+
top_k: builtins.int = ...,
|
| 188 |
+
top_p: builtins.float = ...,
|
| 189 |
+
) -> None: ...
|
| 190 |
+
def ClearField(self, field_name: typing.Literal["model_name", b"model_name", "temperature", b"temperature", "top_k", b"top_k", "top_p", b"top_p"]) -> None: ...
|
| 191 |
+
|
| 192 |
+
global___ModelArgs = ModelArgs
|
| 193 |
+
|
| 194 |
+
@typing.final
|
| 195 |
+
class ModelInfoRequest(google.protobuf.message.Message):
|
| 196 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 197 |
+
|
| 198 |
+
MODEL_SUPPLIER_FIELD_NUMBER: builtins.int
|
| 199 |
+
model_supplier: global___ModelSupplier.ValueType
|
| 200 |
+
def __init__(
|
| 201 |
+
self,
|
| 202 |
+
*,
|
| 203 |
+
model_supplier: global___ModelSupplier.ValueType = ...,
|
| 204 |
+
) -> None: ...
|
| 205 |
+
def ClearField(self, field_name: typing.Literal["model_supplier", b"model_supplier"]) -> None: ...
|
| 206 |
+
|
| 207 |
+
global___ModelInfoRequest = ModelInfoRequest
|
| 208 |
+
|
| 209 |
+
@typing.final
|
| 210 |
+
class ModelInfoResponse(google.protobuf.message.Message):
|
| 211 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 212 |
+
|
| 213 |
+
MODEL_NAME_FIELD_NUMBER: builtins.int
|
| 214 |
+
@property
|
| 215 |
+
def model_name(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ...
|
| 216 |
+
def __init__(
|
| 217 |
+
self,
|
| 218 |
+
*,
|
| 219 |
+
model_name: collections.abc.Iterable[builtins.str] | None = ...,
|
| 220 |
+
) -> None: ...
|
| 221 |
+
def ClearField(self, field_name: typing.Literal["model_name", b"model_name"]) -> None: ...
|
| 222 |
+
|
| 223 |
+
global___ModelInfoResponse = ModelInfoResponse
|
| 224 |
+
|
| 225 |
+
@typing.final
|
| 226 |
+
class QuestionTypeRequest(google.protobuf.message.Message):
|
| 227 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 228 |
+
|
| 229 |
+
QUESTION_TYPE_FIELD_NUMBER: builtins.int
|
| 230 |
+
question_type: global___QAType.ValueType
|
| 231 |
+
def __init__(
|
| 232 |
+
self,
|
| 233 |
+
*,
|
| 234 |
+
question_type: global___QAType.ValueType = ...,
|
| 235 |
+
) -> None: ...
|
| 236 |
+
def ClearField(self, field_name: typing.Literal["question_type", b"question_type"]) -> None: ...
|
| 237 |
+
|
| 238 |
+
global___QuestionTypeRequest = QuestionTypeRequest
|
| 239 |
+
|
| 240 |
+
@typing.final
|
| 241 |
+
class QuestionTypeResponse(google.protobuf.message.Message):
|
| 242 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 243 |
+
|
| 244 |
+
FIELDS_FIELD_NUMBER: builtins.int
|
| 245 |
+
@property
|
| 246 |
+
def fields(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Field]: ...
|
| 247 |
+
def __init__(
|
| 248 |
+
self,
|
| 249 |
+
*,
|
| 250 |
+
fields: collections.abc.Iterable[global___Field] | None = ...,
|
| 251 |
+
) -> None: ...
|
| 252 |
+
def ClearField(self, field_name: typing.Literal["fields", b"fields"]) -> None: ...
|
| 253 |
+
|
| 254 |
+
global___QuestionTypeResponse = QuestionTypeResponse
|
| 255 |
+
|
| 256 |
+
@typing.final
|
| 257 |
+
class QuestionGenRequest(google.protobuf.message.Message):
|
| 258 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 259 |
+
|
| 260 |
+
SESSION_INFO_FIELD_NUMBER: builtins.int
|
| 261 |
+
QUESTION_ID_FIELD_NUMBER: builtins.int
|
| 262 |
+
MODEL_SUPPLIER_FIELD_NUMBER: builtins.int
|
| 263 |
+
MODEL_ARGS_FIELD_NUMBER: builtins.int
|
| 264 |
+
QUESTION_ARGS_FIELD_NUMBER: builtins.int
|
| 265 |
+
NUM_QUESTIONS_FIELD_NUMBER: builtins.int
|
| 266 |
+
QA_ITEM_FIELD_NUMBER: builtins.int
|
| 267 |
+
ACTION_FIELD_NUMBER: builtins.int
|
| 268 |
+
question_id: builtins.str
|
| 269 |
+
model_supplier: global___ModelSupplier.ValueType
|
| 270 |
+
question_args: builtins.str
|
| 271 |
+
"""json"""
|
| 272 |
+
num_questions: builtins.int
|
| 273 |
+
action: global___Action.ValueType
|
| 274 |
+
@property
|
| 275 |
+
def session_info(self) -> commons_pb2.SessionInfo: ...
|
| 276 |
+
@property
|
| 277 |
+
def model_args(self) -> global___ModelArgs:
|
| 278 |
+
"""pass as json
|
| 279 |
+
*reference**:
|
| 280 |
+
- OpenAI: https://platform.openai.com/docs/api-reference/chat
|
| 281 |
+
"""
|
| 282 |
+
|
| 283 |
+
@property
|
| 284 |
+
def qa_item(self) -> global___QAItem: ...
|
| 285 |
+
def __init__(
|
| 286 |
+
self,
|
| 287 |
+
*,
|
| 288 |
+
session_info: commons_pb2.SessionInfo | None = ...,
|
| 289 |
+
question_id: builtins.str = ...,
|
| 290 |
+
model_supplier: global___ModelSupplier.ValueType = ...,
|
| 291 |
+
model_args: global___ModelArgs | None = ...,
|
| 292 |
+
question_args: builtins.str = ...,
|
| 293 |
+
num_questions: builtins.int = ...,
|
| 294 |
+
qa_item: global___QAItem | None = ...,
|
| 295 |
+
action: global___Action.ValueType = ...,
|
| 296 |
+
) -> None: ...
|
| 297 |
+
def HasField(self, field_name: typing.Literal["model_args", b"model_args", "qa_item", b"qa_item", "session_info", b"session_info"]) -> builtins.bool: ...
|
| 298 |
+
def ClearField(self, field_name: typing.Literal["action", b"action", "model_args", b"model_args", "model_supplier", b"model_supplier", "num_questions", b"num_questions", "qa_item", b"qa_item", "question_args", b"question_args", "question_id", b"question_id", "session_info", b"session_info"]) -> None: ...
|
| 299 |
+
|
| 300 |
+
global___QuestionGenRequest = QuestionGenRequest
|
| 301 |
+
|
| 302 |
+
@typing.final
|
| 303 |
+
class QuestionGenResponseMetadata(google.protobuf.message.Message):
|
| 304 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 305 |
+
|
| 306 |
+
SESSION_INFO_FIELD_NUMBER: builtins.int
|
| 307 |
+
SUCCESS_FIELD_NUMBER: builtins.int
|
| 308 |
+
success: builtins.bool
|
| 309 |
+
@property
|
| 310 |
+
def session_info(self) -> commons_pb2.SessionInfo: ...
|
| 311 |
+
def __init__(
|
| 312 |
+
self,
|
| 313 |
+
*,
|
| 314 |
+
session_info: commons_pb2.SessionInfo | None = ...,
|
| 315 |
+
success: builtins.bool = ...,
|
| 316 |
+
) -> None: ...
|
| 317 |
+
def HasField(self, field_name: typing.Literal["session_info", b"session_info"]) -> builtins.bool: ...
|
| 318 |
+
def ClearField(self, field_name: typing.Literal["session_info", b"session_info", "success", b"success"]) -> None: ...
|
| 319 |
+
|
| 320 |
+
global___QuestionGenResponseMetadata = QuestionGenResponseMetadata
|
| 321 |
+
|
| 322 |
+
@typing.final
|
| 323 |
+
class QuestionGenResponseData(google.protobuf.message.Message):
|
| 324 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 325 |
+
|
| 326 |
+
SESSION_INFO_FIELD_NUMBER: builtins.int
|
| 327 |
+
QUESTION_ID_FIELD_NUMBER: builtins.int
|
| 328 |
+
QA_ITEM_FIELD_NUMBER: builtins.int
|
| 329 |
+
question_id: builtins.str
|
| 330 |
+
@property
|
| 331 |
+
def session_info(self) -> commons_pb2.SessionInfo: ...
|
| 332 |
+
@property
|
| 333 |
+
def qa_item(self) -> global___QAItem: ...
|
| 334 |
+
def __init__(
|
| 335 |
+
self,
|
| 336 |
+
*,
|
| 337 |
+
session_info: commons_pb2.SessionInfo | None = ...,
|
| 338 |
+
question_id: builtins.str = ...,
|
| 339 |
+
qa_item: global___QAItem | None = ...,
|
| 340 |
+
) -> None: ...
|
| 341 |
+
def HasField(self, field_name: typing.Literal["qa_item", b"qa_item", "session_info", b"session_info"]) -> builtins.bool: ...
|
| 342 |
+
def ClearField(self, field_name: typing.Literal["qa_item", b"qa_item", "question_id", b"question_id", "session_info", b"session_info"]) -> None: ...
|
| 343 |
+
|
| 344 |
+
global___QuestionGenResponseData = QuestionGenResponseData
|
| 345 |
+
|
| 346 |
+
@typing.final
|
| 347 |
+
class QuestionGenResponse(google.protobuf.message.Message):
|
| 348 |
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
| 349 |
+
|
| 350 |
+
METADATA_FIELD_NUMBER: builtins.int
|
| 351 |
+
DATA_FIELD_NUMBER: builtins.int
|
| 352 |
+
@property
|
| 353 |
+
def metadata(self) -> global___QuestionGenResponseMetadata: ...
|
| 354 |
+
@property
|
| 355 |
+
def data(self) -> global___QuestionGenResponseData: ...
|
| 356 |
+
def __init__(
|
| 357 |
+
self,
|
| 358 |
+
*,
|
| 359 |
+
metadata: global___QuestionGenResponseMetadata | None = ...,
|
| 360 |
+
data: global___QuestionGenResponseData | None = ...,
|
| 361 |
+
) -> None: ...
|
| 362 |
+
def HasField(self, field_name: typing.Literal["data", b"data", "metadata", b"metadata", "question_gen_response", b"question_gen_response"]) -> builtins.bool: ...
|
| 363 |
+
def ClearField(self, field_name: typing.Literal["data", b"data", "metadata", b"metadata", "question_gen_response", b"question_gen_response"]) -> None: ...
|
| 364 |
+
def WhichOneof(self, oneof_group: typing.Literal["question_gen_response", b"question_gen_response"]) -> typing.Literal["metadata", "data"] | None: ...
|
| 365 |
+
|
| 366 |
+
global___QuestionGenResponse = QuestionGenResponse
|
grpc_services/generated/question_gen_service_pb2_grpc.py
ADDED
|
@@ -0,0 +1,188 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
|
| 2 |
+
"""Client and server classes corresponding to protobuf-defined services."""
|
| 3 |
+
import grpc
|
| 4 |
+
import warnings
|
| 5 |
+
|
| 6 |
+
import grpc_services.generated.question_gen_service_pb2 as question__gen__service__pb2
|
| 7 |
+
|
| 8 |
+
GRPC_GENERATED_VERSION = '1.63.0'
|
| 9 |
+
GRPC_VERSION = grpc.__version__
|
| 10 |
+
EXPECTED_ERROR_RELEASE = '1.65.0'
|
| 11 |
+
SCHEDULED_RELEASE_DATE = 'June 25, 2024'
|
| 12 |
+
_version_not_supported = False
|
| 13 |
+
|
| 14 |
+
try:
|
| 15 |
+
from grpc._utilities import first_version_is_lower
|
| 16 |
+
_version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
|
| 17 |
+
except ImportError:
|
| 18 |
+
_version_not_supported = True
|
| 19 |
+
|
| 20 |
+
if _version_not_supported:
|
| 21 |
+
warnings.warn(
|
| 22 |
+
f'The grpc package installed is at version {GRPC_VERSION},'
|
| 23 |
+
+ f' but the generated code in question_gen_service_pb2_grpc.py depends on'
|
| 24 |
+
+ f' grpcio>={GRPC_GENERATED_VERSION}.'
|
| 25 |
+
+ f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
|
| 26 |
+
+ f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
|
| 27 |
+
+ f' This warning will become an error in {EXPECTED_ERROR_RELEASE},'
|
| 28 |
+
+ f' scheduled for release on {SCHEDULED_RELEASE_DATE}.',
|
| 29 |
+
RuntimeWarning
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class QuestionGenServiceStub(object):
|
| 34 |
+
"""Missing associated documentation comment in .proto file."""
|
| 35 |
+
|
| 36 |
+
def __init__(self, channel):
|
| 37 |
+
"""Constructor.
|
| 38 |
+
|
| 39 |
+
Args:
|
| 40 |
+
channel: A grpc.Channel.
|
| 41 |
+
"""
|
| 42 |
+
self.GetModelInfo = channel.unary_unary(
|
| 43 |
+
'/question_gen.QuestionGenService/GetModelInfo',
|
| 44 |
+
request_serializer=question__gen__service__pb2.ModelInfoRequest.SerializeToString,
|
| 45 |
+
response_deserializer=question__gen__service__pb2.ModelInfoResponse.FromString,
|
| 46 |
+
_registered_method=True)
|
| 47 |
+
self.GetQuestionTypeInfo = channel.unary_unary(
|
| 48 |
+
'/question_gen.QuestionGenService/GetQuestionTypeInfo',
|
| 49 |
+
request_serializer=question__gen__service__pb2.QuestionTypeRequest.SerializeToString,
|
| 50 |
+
response_deserializer=question__gen__service__pb2.QuestionTypeResponse.FromString,
|
| 51 |
+
_registered_method=True)
|
| 52 |
+
self.CreateQuestion = channel.unary_stream(
|
| 53 |
+
'/question_gen.QuestionGenService/CreateQuestion',
|
| 54 |
+
request_serializer=question__gen__service__pb2.QuestionGenRequest.SerializeToString,
|
| 55 |
+
response_deserializer=question__gen__service__pb2.QuestionGenResponse.FromString,
|
| 56 |
+
_registered_method=True)
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class QuestionGenServiceServicer(object):
|
| 60 |
+
"""Missing associated documentation comment in .proto file."""
|
| 61 |
+
|
| 62 |
+
def GetModelInfo(self, request, context):
|
| 63 |
+
"""Parse bot: from csv to json
|
| 64 |
+
"""
|
| 65 |
+
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
| 66 |
+
context.set_details('Method not implemented!')
|
| 67 |
+
raise NotImplementedError('Method not implemented!')
|
| 68 |
+
|
| 69 |
+
def GetQuestionTypeInfo(self, request, context):
|
| 70 |
+
"""Missing associated documentation comment in .proto file."""
|
| 71 |
+
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
| 72 |
+
context.set_details('Method not implemented!')
|
| 73 |
+
raise NotImplementedError('Method not implemented!')
|
| 74 |
+
|
| 75 |
+
def CreateQuestion(self, request, context):
|
| 76 |
+
"""Missing associated documentation comment in .proto file."""
|
| 77 |
+
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
| 78 |
+
context.set_details('Method not implemented!')
|
| 79 |
+
raise NotImplementedError('Method not implemented!')
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def add_QuestionGenServiceServicer_to_server(servicer, server):
|
| 83 |
+
rpc_method_handlers = {
|
| 84 |
+
'GetModelInfo': grpc.unary_unary_rpc_method_handler(
|
| 85 |
+
servicer.GetModelInfo,
|
| 86 |
+
request_deserializer=question__gen__service__pb2.ModelInfoRequest.FromString,
|
| 87 |
+
response_serializer=question__gen__service__pb2.ModelInfoResponse.SerializeToString,
|
| 88 |
+
),
|
| 89 |
+
'GetQuestionTypeInfo': grpc.unary_unary_rpc_method_handler(
|
| 90 |
+
servicer.GetQuestionTypeInfo,
|
| 91 |
+
request_deserializer=question__gen__service__pb2.QuestionTypeRequest.FromString,
|
| 92 |
+
response_serializer=question__gen__service__pb2.QuestionTypeResponse.SerializeToString,
|
| 93 |
+
),
|
| 94 |
+
'CreateQuestion': grpc.unary_stream_rpc_method_handler(
|
| 95 |
+
servicer.CreateQuestion,
|
| 96 |
+
request_deserializer=question__gen__service__pb2.QuestionGenRequest.FromString,
|
| 97 |
+
response_serializer=question__gen__service__pb2.QuestionGenResponse.SerializeToString,
|
| 98 |
+
),
|
| 99 |
+
}
|
| 100 |
+
generic_handler = grpc.method_handlers_generic_handler(
|
| 101 |
+
'question_gen.QuestionGenService', rpc_method_handlers)
|
| 102 |
+
server.add_generic_rpc_handlers((generic_handler,))
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
# This class is part of an EXPERIMENTAL API.
|
| 106 |
+
class QuestionGenService(object):
|
| 107 |
+
"""Missing associated documentation comment in .proto file."""
|
| 108 |
+
|
| 109 |
+
@staticmethod
|
| 110 |
+
def GetModelInfo(request,
|
| 111 |
+
target,
|
| 112 |
+
options=(),
|
| 113 |
+
channel_credentials=None,
|
| 114 |
+
call_credentials=None,
|
| 115 |
+
insecure=False,
|
| 116 |
+
compression=None,
|
| 117 |
+
wait_for_ready=None,
|
| 118 |
+
timeout=None,
|
| 119 |
+
metadata=None):
|
| 120 |
+
return grpc.experimental.unary_unary(
|
| 121 |
+
request,
|
| 122 |
+
target,
|
| 123 |
+
'/question_gen.QuestionGenService/GetModelInfo',
|
| 124 |
+
question__gen__service__pb2.ModelInfoRequest.SerializeToString,
|
| 125 |
+
question__gen__service__pb2.ModelInfoResponse.FromString,
|
| 126 |
+
options,
|
| 127 |
+
channel_credentials,
|
| 128 |
+
insecure,
|
| 129 |
+
call_credentials,
|
| 130 |
+
compression,
|
| 131 |
+
wait_for_ready,
|
| 132 |
+
timeout,
|
| 133 |
+
metadata,
|
| 134 |
+
_registered_method=True)
|
| 135 |
+
|
| 136 |
+
@staticmethod
|
| 137 |
+
def GetQuestionTypeInfo(request,
|
| 138 |
+
target,
|
| 139 |
+
options=(),
|
| 140 |
+
channel_credentials=None,
|
| 141 |
+
call_credentials=None,
|
| 142 |
+
insecure=False,
|
| 143 |
+
compression=None,
|
| 144 |
+
wait_for_ready=None,
|
| 145 |
+
timeout=None,
|
| 146 |
+
metadata=None):
|
| 147 |
+
return grpc.experimental.unary_unary(
|
| 148 |
+
request,
|
| 149 |
+
target,
|
| 150 |
+
'/question_gen.QuestionGenService/GetQuestionTypeInfo',
|
| 151 |
+
question__gen__service__pb2.QuestionTypeRequest.SerializeToString,
|
| 152 |
+
question__gen__service__pb2.QuestionTypeResponse.FromString,
|
| 153 |
+
options,
|
| 154 |
+
channel_credentials,
|
| 155 |
+
insecure,
|
| 156 |
+
call_credentials,
|
| 157 |
+
compression,
|
| 158 |
+
wait_for_ready,
|
| 159 |
+
timeout,
|
| 160 |
+
metadata,
|
| 161 |
+
_registered_method=True)
|
| 162 |
+
|
| 163 |
+
@staticmethod
|
| 164 |
+
def CreateQuestion(request,
|
| 165 |
+
target,
|
| 166 |
+
options=(),
|
| 167 |
+
channel_credentials=None,
|
| 168 |
+
call_credentials=None,
|
| 169 |
+
insecure=False,
|
| 170 |
+
compression=None,
|
| 171 |
+
wait_for_ready=None,
|
| 172 |
+
timeout=None,
|
| 173 |
+
metadata=None):
|
| 174 |
+
return grpc.experimental.unary_stream(
|
| 175 |
+
request,
|
| 176 |
+
target,
|
| 177 |
+
'/question_gen.QuestionGenService/CreateQuestion',
|
| 178 |
+
question__gen__service__pb2.QuestionGenRequest.SerializeToString,
|
| 179 |
+
question__gen__service__pb2.QuestionGenResponse.FromString,
|
| 180 |
+
options,
|
| 181 |
+
channel_credentials,
|
| 182 |
+
insecure,
|
| 183 |
+
call_credentials,
|
| 184 |
+
compression,
|
| 185 |
+
wait_for_ready,
|
| 186 |
+
timeout,
|
| 187 |
+
metadata,
|
| 188 |
+
_registered_method=True)
|
requirements.txt
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
grpcio==1.63.0
|
| 2 |
+
grpcio-tools==1.63.0
|
| 3 |
+
langchain==0.3.7
|
| 4 |
+
langchain-community==0.3.5
|
| 5 |
+
langchain-core==0.3.15
|
| 6 |
+
langgraph==0.2.45
|
| 7 |
+
langchain_openai==0.2.6
|
| 8 |
+
faiss-cpu==1.8.0
|
| 9 |
+
python-dotenv==1.0.0
|
| 10 |
+
openpyxl==3.1.4
|
| 11 |
+
pydantic==2.8.2
|
| 12 |
+
xlrd==2.0.1
|
| 13 |
+
pandas==1.4.2
|
| 14 |
+
python-magic==0.4.27
|
| 15 |
+
attrs_strict==1.0.1
|
| 16 |
+
python-magic==0.4.27
|
| 17 |
+
elastic-apm==6.9.1
|
| 18 |
+
grpcio-reflection==1.63.0
|
| 19 |
+
cachetools==5.3.3
|
| 20 |
+
|
| 21 |
+
mutagen == 1.47.0
|
| 22 |
+
elevenlabs==1.50.2
|
tmp_files/metadata.csv
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
story,speaker,text,filepath
|