Yapeng commited on
Commit
1be5edd
1 Parent(s): 73db14f

Training in progress, step 800

Browse files
fine-tune-whisper-streaming.ipynb CHANGED
@@ -878,8 +878,8 @@
878
  "\n",
879
  " <div>\n",
880
  " \n",
881
- " <progress value='701' max='3000' style='width:300px; height:20px; vertical-align: middle;'></progress>\n",
882
- " [ 701/3000 3:45:19 < 12:21:06, 0.05 it/s, Epoch 1.11/9223372036854775807]\n",
883
  " </div>\n",
884
  " <table border=\"1\" class=\"dataframe\">\n",
885
  " <thead>\n",
@@ -1071,7 +1071,15 @@
1071
  "Feature extractor saved in ./preprocessor_config.json\n",
1072
  "tokenizer config file saved in ./tokenizer_config.json\n",
1073
  "Special tokens file saved in ./special_tokens_map.json\n",
1074
- "added tokens file saved in ./added_tokens.json\n"
 
 
 
 
 
 
 
 
1075
  ]
1076
  }
1077
  ],
 
878
  "\n",
879
  " <div>\n",
880
  " \n",
881
+ " <progress value='801' max='3000' style='width:300px; height:20px; vertical-align: middle;'></progress>\n",
882
+ " [ 801/3000 4:15:59 < 11:44:32, 0.05 it/s, Epoch 2.02/9223372036854775807]\n",
883
  " </div>\n",
884
  " <table border=\"1\" class=\"dataframe\">\n",
885
  " <thead>\n",
 
1071
  "Feature extractor saved in ./preprocessor_config.json\n",
1072
  "tokenizer config file saved in ./tokenizer_config.json\n",
1073
  "Special tokens file saved in ./special_tokens_map.json\n",
1074
+ "added tokens file saved in ./added_tokens.json\n",
1075
+ "/opt/conda/lib/python3.10/site-packages/torch/nn/parallel/_functions.py:68: UserWarning: Was asked to gather along dimension 0, but all input tensors were scalars; will instead unsqueeze and return a vector.\n",
1076
+ " warnings.warn('Was asked to gather along dimension 0, but all '\n",
1077
+ "Reading metadata...: 18211it [00:03, 5334.89it/s]\n",
1078
+ "***** Running Evaluation *****\n",
1079
+ " Num examples: Unknown\n",
1080
+ " Batch size = 20\n",
1081
+ "Reading metadata...: 8693it [00:01, 7603.67it/s]\n",
1082
+ "The following columns in the evaluation set don't have a corresponding argument in `WhisperForConditionalGeneration.forward` and have been ignored: input_length. If input_length are not expected by `WhisperForConditionalGeneration.forward`, you can safely ignore this message.\n"
1083
  ]
1084
  }
1085
  ],
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:95600147041ad137f8865acff5c3a6935c43e4f976c70a93c19b8461fde30614
3
  size 967099139
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0704b9d9e8c72961d92b5c256e00e18cbd3b7c9467699d9d5aefa3b628f64477
3
  size 967099139
runs/Dec06_07-40-22_04812eda968b/events.out.tfevents.1670312431.04812eda968b.2650899.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a48dc3472dc009ba34d179cee5bf6ee151365ad39448b9e471fac7e8660a12a5
3
- size 115932
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c888355f07bd6c5995d8299dbfd8c3501521a69ecd7dbf621450732a2590da46
3
+ size 131950