File size: 397 Bytes
19677a1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
dataset: blender
batching: single_image
factor: 0
num_coarse_samples: 64
num_fine_samples: 128
use_viewdirs: true
white_bkgd: true
batch_size: 1024
randomized: true
max_steps: 500000
print_every: 100
render_every: 5000
save_every: 5000
use_semantic_loss: true
clip_model_name: openai/clip-vit-base-patch32
clip_output_dtype: float32
sc_loss_factor: 4
sc_loss_every: 16
sc_loss_mult: 10
few_shot: 8