nroggendorff commited on
Commit
0f3cc51
·
verified ·
1 Parent(s): dcf2dd4

Update train.py

Browse files
Files changed (1) hide show
  1. train.py +8 -10
train.py CHANGED
@@ -4,7 +4,7 @@ import torch
4
  import trl
5
 
6
  from transformers import AutoTokenizer, LlamaConfig, LlamaForCausalLM, TrainingArguments, PreTrainedTokenizerFast, AdamW, get_cosine_schedule_with_warmup
7
- from datasets import load_dataset, DatasetDict, Dataset
8
  from tokenizers import ByteLevelBPETokenizer
9
 
10
  BATCH_SIZE = 4
@@ -24,15 +24,13 @@ GRADIENT_ACCUMULATION_STEPS = 1
24
  PUSH_TO_HUB = True
25
 
26
  def load_data():
27
- pretrain = load_dataset(INPUT_DATASET, "cosmopedia-v2", split="train", streaming=True)
28
- pretrain = Dataset.from_generator(lambda: pretrain.take(int(3e+4)))
29
- instruct = load_dataset(INSTRUCT_DATASET, split="train", streaming=True)
30
- instruct = Dataset.from_generator(lambda: instruct.take(int(5e+4)))
31
- dataset_dict = DatasetDict({
32
- 'pretrain': pretrain,
33
- 'instruct': instruct
34
- })
35
- return dataset_dict
36
 
37
  def create_tokenizer(training_corpus):
38
  tokenizer = ByteLevelBPETokenizer()
 
4
  import trl
5
 
6
  from transformers import AutoTokenizer, LlamaConfig, LlamaForCausalLM, TrainingArguments, PreTrainedTokenizerFast, AdamW, get_cosine_schedule_with_warmup
7
+ from datasets import load_dataset, Dataset
8
  from tokenizers import ByteLevelBPETokenizer
9
 
10
  BATCH_SIZE = 4
 
24
  PUSH_TO_HUB = True
25
 
26
  def load_data():
27
+ if not INSTRUCT_FINETUNE_BOOL:
28
+ dataset = load_dataset(INSTRUCT_DATASET, split="train", streaming=True)
29
+ dataset = Dataset.from_generator(lambda: dataset.take(int(5e+4)))
30
+ else:
31
+ dataset = load_dataset(INPUT_DATASET, "cosmopedia-v2", split="train", streaming=True)
32
+ dataset = Dataset.from_generator(lambda: dataset.take(int(6e+4)))
33
+ return dataset
 
 
34
 
35
  def create_tokenizer(training_corpus):
36
  tokenizer = ByteLevelBPETokenizer()