from FateZero.test_fatezero import * import copy def merge_config_then_run( model_id, data_path, source_prompt, target_prompt, cross_replace_steps, self_replace_steps, enhance_words, enhance_words_value, num_steps, guidance_scale ): # , ] = inputs default_edit_config='FateZero/config/low_resource_teaser/jeep_watercolor_ddim_10_steps.yaml' Omegadict_default_edit_config = OmegaConf.load(default_edit_config) dataset_time_string = get_time_string() config_now = copy.deepcopy(Omegadict_default_edit_config) # config_now['pretrained_model_path'] = model_id config_now['train_dataset']['prompt'] = source_prompt config_now['train_dataset']['path'] = data_path config_now['validation_sample_logger_config']['prompts'] = [target_prompt] # fatezero config p2p_config_now = copy.deepcopy(config_now['validation_sample_logger_config']['p2p_config'][0]) p2p_config_now['cross_replace_steps']['default_'] = cross_replace_steps p2p_config_now['self_replace_steps'] = self_replace_steps p2p_config_now['eq_params']['words'] = enhance_words.split(" ") p2p_config_now['eq_params']['values'] = [enhance_words_value,]*len(p2p_config_now['eq_params']['words']) config_now['validation_sample_logger_config']['p2p_config'][0] = copy.deepcopy(p2p_config_now) # ddim config config_now['validation_sample_logger_config']['guidance_scale'] = guidance_scale config_now['validation_sample_logger_config']['num_inference_steps'] = num_steps logdir = default_edit_config.replace('config', 'result').replace('.yml', '').replace('.yaml', '')+f'_{dataset_time_string}' config_now['logdir'] = logdir print(f'Saving at {logdir}') save_path = test(config=default_edit_config, **config_now) mp4_path = save_path.replace('_0.gif', '_0_0_0.mp4') return mp4_path if __name__ == "__main__": run()