ChatFlowModule / ChatAtomicFlow.yaml
nbaldwin's picture
Updated run.py to support Huggingface backend (#2)
cd18bf6 verified
# This is an abstract flow, therefore some required fields are not defined (and must be defined by the concrete flow)
_target_: flow_modules.aiflows.ChatFlowModule.ChatAtomicFlow.instantiate_from_default_config
name: ChatAtomicFlow
description: "Flow which uses as tool an LLM though an API"
enable_cache: True
n_api_retries: 6
wait_time_between_retries: 20
system_name: system
user_name: user
assistant_name: assistant
backend:
_target_: aiflows.backends.llm_lite.LiteLLMBackend
api_infos: ???
model_name:
openai: "gpt-3.5-turbo"
n: 1
max_tokens: 2000
temperature: 0.3
top_p: 0.2
stream: True
system_message_prompt_template:
_target_: aiflows.prompt_template.JinjaPrompt
init_human_message_prompt_template:
_target_: aiflows.prompt_template.JinjaPrompt
template: "{{query}}"
input_variables:
- "query"
human_message_prompt_template:
_target_: aiflows.prompt_template.JinjaPrompt
template: "{{query}}"
input_variables:
- "query"
input_interface_initialized:
- "query"
query_message_prompt_template:
_target_: aiflows.prompt_template.JinjaPrompt
previous_messages:
first_k: null # Note that the first message is the system prompt
last_k: null
output_interface:
- "api_output"