mrfakename commited on
Commit
0186b12
·
verified ·
1 Parent(s): 61085f9

Sync from GitHub repo

Browse files

This Space is synced from the GitHub repo: https://github.com/SWivid/F5-TTS. Please submit contributions to the Space there

src/f5_tts/train/finetune_cli.py CHANGED
@@ -47,7 +47,7 @@ def parse_args():
47
  parser.add_argument("--num_warmup_updates", type=int, default=300, help="Warmup steps")
48
  parser.add_argument("--save_per_updates", type=int, default=10000, help="Save checkpoint every X steps")
49
  parser.add_argument("--last_per_steps", type=int, default=50000, help="Save last checkpoint every X steps")
50
- parser.add_argument("--finetune", type=bool, default=True, help="Use Finetune")
51
  parser.add_argument("--pretrain", type=str, default=None, help="the path to the checkpoint")
52
  parser.add_argument(
53
  "--tokenizer", type=str, default="pinyin", choices=["pinyin", "char", "custom"], help="Tokenizer type"
@@ -60,15 +60,13 @@ def parse_args():
60
  )
61
  parser.add_argument(
62
  "--log_samples",
63
- type=bool,
64
- default=False,
65
  help="Log inferenced samples per ckpt save steps",
66
  )
67
  parser.add_argument("--logger", type=str, default=None, choices=["wandb", "tensorboard"], help="logger")
68
  parser.add_argument(
69
  "--bnb_optimizer",
70
- type=bool,
71
- default=False,
72
  help="Use 8-bit Adam optimizer from bitsandbytes",
73
  )
74
 
 
47
  parser.add_argument("--num_warmup_updates", type=int, default=300, help="Warmup steps")
48
  parser.add_argument("--save_per_updates", type=int, default=10000, help="Save checkpoint every X steps")
49
  parser.add_argument("--last_per_steps", type=int, default=50000, help="Save last checkpoint every X steps")
50
+ parser.add_argument("--finetune", action="store_true", help="Use Finetune")
51
  parser.add_argument("--pretrain", type=str, default=None, help="the path to the checkpoint")
52
  parser.add_argument(
53
  "--tokenizer", type=str, default="pinyin", choices=["pinyin", "char", "custom"], help="Tokenizer type"
 
60
  )
61
  parser.add_argument(
62
  "--log_samples",
63
+ action="store_true",
 
64
  help="Log inferenced samples per ckpt save steps",
65
  )
66
  parser.add_argument("--logger", type=str, default=None, choices=["wandb", "tensorboard"], help="logger")
67
  parser.add_argument(
68
  "--bnb_optimizer",
69
+ action="store_true",
 
70
  help="Use 8-bit Adam optimizer from bitsandbytes",
71
  )
72
 
src/f5_tts/train/finetune_gradio.py CHANGED
@@ -452,7 +452,8 @@ def start_training(
452
  f"--dataset_name {dataset_name}"
453
  )
454
 
455
- cmd += f" --finetune {finetune}"
 
456
 
457
  if file_checkpoint_train != "":
458
  cmd += f" --pretrain {file_checkpoint_train}"
@@ -460,12 +461,12 @@ def start_training(
460
  if tokenizer_file != "":
461
  cmd += f" --tokenizer_path {tokenizer_file}"
462
 
463
- cmd += f" --tokenizer {tokenizer_type} "
464
 
465
- cmd += f" --log_samples True --logger {logger} "
466
 
467
  if ch_8bit_adam:
468
- cmd += " --bnb_optimizer True "
469
 
470
  print("run command : \n" + cmd + "\n")
471
 
 
452
  f"--dataset_name {dataset_name}"
453
  )
454
 
455
+ if finetune:
456
+ cmd += " --finetune"
457
 
458
  if file_checkpoint_train != "":
459
  cmd += f" --pretrain {file_checkpoint_train}"
 
461
  if tokenizer_file != "":
462
  cmd += f" --tokenizer_path {tokenizer_file}"
463
 
464
+ cmd += f" --tokenizer {tokenizer_type}"
465
 
466
+ cmd += f" --log_samples --logger {logger}"
467
 
468
  if ch_8bit_adam:
469
+ cmd += " --bnb_optimizer"
470
 
471
  print("run command : \n" + cmd + "\n")
472