sanchit-gandhi HF staff commited on
Commit
d1558b4
1 Parent(s): c4e5ff9

Saving weights and logs of epoch 0

Browse files
config.json CHANGED
@@ -1,7 +1,9 @@
1
  {
 
2
  "architectures": [
3
  "SpeechEncoderDecoderModel"
4
  ],
 
5
  "decoder": {
6
  "_name_or_path": "facebook/bart-large-cnn",
7
  "_num_labels": 3,
 
1
  {
2
+ "_name_or_path": "./",
3
  "architectures": [
4
  "SpeechEncoderDecoderModel"
5
  ],
6
+ "cache_dir": "/home/sanchit_huggingface_co/cache/huggingface/transformers",
7
  "decoder": {
8
  "_name_or_path": "facebook/bart-large-cnn",
9
  "_num_labels": 3,
create_model.py CHANGED
@@ -4,7 +4,7 @@ from transformers import AutoFeatureExtractor, AutoTokenizer, FlaxSpeechEncoderD
4
  encoder_id = "facebook/wav2vec2-large-lv60"
5
  decoder_id = "facebook/bart-large-cnn"
6
 
7
- model = FlaxSpeechEncoderDecoderModel.from_encoder_decoder_pretrained(encoder_id, decoder_id, encoder_add_adapter=True)
8
 
9
  model.config.encoder.feat_proj_dropout = 0.0
10
  model.config.encoder.final_dropout = 0.0
 
4
  encoder_id = "facebook/wav2vec2-large-lv60"
5
  decoder_id = "facebook/bart-large-cnn"
6
 
7
+ model = FlaxSpeechEncoderDecoderModel.from_encoder_decoder_pretrained(encoder_id, decoder_id, encoder_add_adapter=True, cache_dir="/home/sanchit_huggingface_co/cache/huggingface/transormers")
8
 
9
  model.config.encoder.feat_proj_dropout = 0.0
10
  model.config.encoder.final_dropout = 0.0
flax_model.msgpack CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:59f268b5fa61d5eb20929dce97dfcfbd6123017d61789aef700566461af24242
3
  size 2353635949
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:932441fd4d1167a741bd188a1f0bc953a229e065d97f15027e824bb6dbe4ae7b
3
  size 2353635949
run_librispeech.sh CHANGED
@@ -6,6 +6,8 @@ python run_flax_speech_recognition_seq2seq.py \
6
  --train_split_name="train.100" \
7
  --eval_split_name="validation" \
8
  --output_dir="./" \
 
 
9
  --preprocessing_num_workers="16" \
10
  --length_column_name="input_length" \
11
  --overwrite_output_dir \
 
6
  --train_split_name="train.100" \
7
  --eval_split_name="validation" \
8
  --output_dir="./" \
9
+ --dataset_cache_dir="/home/sanchit_huggingface_co/cache/huggingface/datasets" \
10
+ --cache_dir="/home/sanchit_huggingface_co/cache/huggingface/transformers" \
11
  --preprocessing_num_workers="16" \
12
  --length_column_name="input_length" \
13
  --overwrite_output_dir \
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "model_max_length": 1024, "special_tokens_map_file": null, "name_or_path": "facebook/bart-large-cnn", "tokenizer_class": "BartTokenizer"}
 
1
+ {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "model_max_length": 1024, "special_tokens_map_file": null, "name_or_path": "./", "tokenizer_class": "BartTokenizer"}