

set -e 
set -x




# blenderproc run examples/basics/basic/main.py examples/resources/camera_positions examples/resources/scene.obj examples/basics/basic/output
# export 'USE_EXTERNAL_BPY_MODULE=1'


#* basic scenes
# out_dir=examples/basics/basic/output
# python examples/basics/basic/main.py examples/resources/camera_positions examples/resources/scene.obj  ${out_dir}


#* camera sampling
# out_dir=examples/basics/basic/camera_sampling
# python examples/basics/camera_sampling/main.py examples/resources/scene.obj ${out_dir}



#* object manipulation
# out_dir=examples/basics/entity_manipulation/output
# python examples/basics/entity_manipulation/main.py examples/resources/scene.obj ${out_dir}

#* object manipulation
# out_dir=examples/basics/material_manipulation/output
# python examples/basics/material_manipulation/main.py examples/basics/material_manipulation/scene.obj images ${out_dir}




#* physics_positioning
# out_dir=examples/basics/physics_positioning/output
# time python examples/basics/physics_positioning/main.py examples/basics/physics_positioning/active.obj examples/basics/physics_positioning/passive.obj ${out_dir}



#* semantic_segmentation
# out_dir=examples/basics/semantic_segmentation/output
# python examples/basics/semantic_segmentation/main.py examples/resources/camera_positions examples/basics/semantic_segmentation/scene.blend ${out_dir}


#* coco_annotations
# out_dir=examples/advanced/coco_annotations/output;
# python  examples/advanced/coco_annotations/main.py examples/resources/camera_positions examples/advanced/coco_annotations/scene.blend ${out_dir}


#todo : https://github.com/DLR-RM/BlenderProc/blob/main/README_BlenderProc4BOP.md
#* README_BlenderProc4BOP


#todo: https://github.com/DLR-RM/BlenderProc/blob/main/examples/README.md



# sub_task_name=$(echo ${out_dir} | cut -d'/' -f3)
# hdf5_list=$(ls ${out_dir})
# for x in ${hdf5_list[@]}; do 
#     python daniel_trail.py --src-path ${out_dir}/${x} --out-path logs/${sub_task_name}
# done


path_to_bop_data=data/BOP;
num_scene=3
save_dir=examples/datasets/bop_challenge/charging_arm/syn_dataset3;

blenderproc run examples/datasets/bop_challenge/main_lm_upright.py  \
${path_to_bop_data} resources/cctextures ${save_dir} --num_scenes=${num_scene}



# python daniel_tools/vis_pose.py --root_path    ${save_dir}/bop_data/lm/train_pbr/000000 --output_path ${save_dir}/bop_data/lm/train_pbr/000000/vis_pose
# python daniel_tools/img_utils.py --root_path    ${save_dir}/bop_data/lm/train_pbr/000000/vis_pose --output_path ${save_dir}/bop_data/lm/train_pbr/000000/vis_pose/pose.gif --duration 50


# blenderproc run blenderproc/scripts/download_cc_textures.py ./resources/cctextures
