File size: 1,152 Bytes
15f0db2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cd18bf6
15f0db2
 
 
 
 
 
 
 
 
cd18bf6
15f0db2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a749ad1
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49

_target_: flow_modules.aiflows.ChatFlowModule.ChatAtomicFlow.instantiate_from_default_config

name: "SimpleQA_Flow"
description: "A flow that answers questions."

# ~~~ Input interface specification ~~~
input_interface_non_initialized:
  - "question"

# ~~~ backend model parameters ~~
backend:
  _target_: aiflows.backends.llm_lite.LiteLLMBackend
  api_infos: ???
  model_name: 
    openai: "gpt-3.5-turbo"
    azure: "azure/gpt-4"
    huggingface: "huggingface/meta-llama/Meta-Llama-3-70B-Instruct"
  
  # ~~~ generation_parameters ~~
  n: 1
  max_tokens: 3000
  temperature: 0.3

  top_p: 0.2
  frequency_penalty: 0
  presence_penalty: 0
  stream: False

n_api_retries: 6
wait_time_between_retries: 20

# ~~~ Prompt specification ~~~
system_message_prompt_template:
  _target_: aiflows.prompt_template.JinjaPrompt
  template: |2-
    You are a helpful chatbot that truthfully answers questions.
  input_variables: []
  partial_variables: {}


init_human_message_prompt_template:
  _target_: aiflows.prompt_template.JinjaPrompt
  template: |2-
    Answer the following question: {{question}}
  input_variables: ["question"]
  partial_variables: {}