SivilTaram commited on
Commit
49dd9b4
·
verified ·
1 Parent(s): 7d6de4a

Upload llama_7b_seq8k_bs2M_skyladder_decay/0000010000/train_state_00024.json with huggingface_hub

Browse files
llama_7b_seq8k_bs2M_skyladder_decay/0000010000/train_state_00024.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"step": 10000, "acc_step": 0, "data_loader_state": {"it_state": {"start_token": 2375, "it_state": {"it_state": {"root_dir": "/mnt/hdfs/tiktok_aiic/user/liuqian", "sources": {"dclm_pro": 1.0}, "source_to_state": {"dclm_pro": {"file_path": "/mnt/hdfs/tiktok_aiic/user/liuqian/dclm_pro/dclm_pro.chunk.00024.jsonl", "position": 1342434594, "block_size": 1, "offset": 0, "current_iter": 0}}, "rng_state": {"bit_generator": "PCG64", "state": {"state": 211455952636091555616223397811987075769, "inc": 285710585089614770479000499584753381129}, "has_uint32": 0, "uinteger": 0}}, "add_bos": true, "add_eos": true, "name": "sp", "path": "/opt/tiger/Github-Repo/lingua/tokenizers/llama2/tokenizer.model"}, "output_seq_len": 8192, "n_views": 2}, "seq_idx": 64, "rng_state": {"bit_generator": "PCG64", "state": {"state": 69362333965341742234020013403235331400, "inc": 134227875351147031923162732418080840751}, "has_uint32": 1, "uinteger": 334226708}, "batch_size": 1, "prefetch_size": 1024}, "scheduler": {"base_lrs": [4e-05], "last_epoch": 10000, "verbose": false, "_step_count": 10001, "_get_lr_called_within_step": false, "_last_lr": [4e-05], "lr_lambdas": [{}]}}