Spaces:
Runtime error
Runtime error
Update train.py
Browse files
train.py
CHANGED
@@ -4,7 +4,7 @@ import torch
|
|
4 |
import trl
|
5 |
|
6 |
from transformers import AutoTokenizer, LlamaConfig, LlamaForCausalLM, TrainingArguments, PreTrainedTokenizerFast, AdamW, get_cosine_schedule_with_warmup
|
7 |
-
from datasets import load_dataset,
|
8 |
from tokenizers import ByteLevelBPETokenizer
|
9 |
|
10 |
BATCH_SIZE = 4
|
@@ -24,15 +24,13 @@ GRADIENT_ACCUMULATION_STEPS = 1
|
|
24 |
PUSH_TO_HUB = True
|
25 |
|
26 |
def load_data():
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
})
|
35 |
-
return dataset_dict
|
36 |
|
37 |
def create_tokenizer(training_corpus):
|
38 |
tokenizer = ByteLevelBPETokenizer()
|
|
|
4 |
import trl
|
5 |
|
6 |
from transformers import AutoTokenizer, LlamaConfig, LlamaForCausalLM, TrainingArguments, PreTrainedTokenizerFast, AdamW, get_cosine_schedule_with_warmup
|
7 |
+
from datasets import load_dataset, Dataset
|
8 |
from tokenizers import ByteLevelBPETokenizer
|
9 |
|
10 |
BATCH_SIZE = 4
|
|
|
24 |
PUSH_TO_HUB = True
|
25 |
|
26 |
def load_data():
|
27 |
+
if not INSTRUCT_FINETUNE_BOOL:
|
28 |
+
dataset = load_dataset(INSTRUCT_DATASET, split="train", streaming=True)
|
29 |
+
dataset = Dataset.from_generator(lambda: dataset.take(int(5e+4)))
|
30 |
+
else:
|
31 |
+
dataset = load_dataset(INPUT_DATASET, "cosmopedia-v2", split="train", streaming=True)
|
32 |
+
dataset = Dataset.from_generator(lambda: dataset.take(int(6e+4)))
|
33 |
+
return dataset
|
|
|
|
|
34 |
|
35 |
def create_tokenizer(training_corpus):
|
36 |
tokenizer = ByteLevelBPETokenizer()
|