File size: 2,156 Bytes
9a40e4f
 
 
 
 
e7fd469
9a40e4f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90

lmps:
  tabletop_ui:
    prompt_path: prompts/tabletop_ui.py
    engine: model_name
    max_tokens: 256
    temperature: 0
    query_prefix: '# '
    query_suffix: '.'
    stop: ['#', 'objects = [']
    maintain_session: True
    debug_mode: False
    include_context: True
    has_return: False
    return_val_name: ret_val
  parse_obj_name:
    prompt_path: prompts/parse_obj_name.py
    engine: model_name
    max_tokens: 512
    temperature: 0
    query_prefix: '# '
    query_suffix: '.'
    stop: ['#', 'objects = [']
    maintain_session: False
    debug_mode: False
    include_context: True
    has_return: True
    return_val_name: ret_val
  parse_position:
    prompt_path: prompts/parse_position.py
    engine: model_name
    max_tokens: 512
    temperature: 0
    query_prefix: '# '
    query_suffix: '.'
    stop: ['#']
    maintain_session: False
    debug_mode: False
    include_context: True
    has_return: True
    return_val_name: ret_val
  parse_question:
    prompt_path: prompts/parse_question.py
    engine: model_name
    max_tokens: 512
    temperature: 0
    query_prefix: '# '
    query_suffix: '.'
    stop: ['#', 'objects = [']
    maintain_session: False
    debug_mode: False
    include_context: True
    has_return: True
    return_val_name: ret_val
  transform_shape_pts:
    prompt_path: prompts/transform_shape_pts.py
    engine: model_name
    max_tokens: 512
    temperature: 0
    query_prefix: '# '
    query_suffix: '.'
    stop: ['#']
    maintain_session: False
    debug_mode: False
    include_context: True
    has_return: True
    return_val_name: new_shape_pts
  fgen:
    prompt_path: prompts/fgen.py
    engine: model_name
    max_tokens: 512
    temperature: 0
    query_prefix: '# define function: '
    query_suffix: '.'
    stop: ['# define', '# example']
    maintain_session: False
    debug_mode: False
    include_context: True

tabletop_coords:
  top_left: [-0.25, -0.25]
  top_side: [0, -0.25]
  top_right: [0.25, -0.25]
  left_side: [-0.25, -0.5]
  middle: [0, -0.5]
  right_side: [0.25, -0.5]
  bottom_left: [-0.25, -0.75]
  bottom_side: [0, -0.75]
  bottom_right: [0.25, -0.75]
  table_z: 0