mc4_3.1.0_fi_cleaned / calculate_toxicity_labels.py
aapot
Add toxicity calculation script
58ed682
raw
history blame contribute delete
No virus
1.53 kB
from transformers import AutoTokenizer, FlaxBertForSequenceClassification
import datasets
import jax
import jax.numpy as jnp
import time
from flax.training.common_utils import shard
from jax import pmap
def pred_fn(inputs):
outputs = model(**inputs)
return jax.nn.sigmoid(outputs.logits)
def get_toxicity(batch, batch_size):
num_examples = len(batch["text"])
inputs = tokenizer(
batch["text"],
return_tensors="np",
truncation=True,
padding="max_length",
max_length=512,
)
inputs = shard(
{
k: jnp.pad(jnp.array(v), ((0, batch_size - num_examples), (0, 0)))
for k, v in inputs.items()
}
)
preds = p_pred(inputs)
preds = preds.reshape(-1, preds.shape[-1])[:num_examples]
for k, v in model.config.id2label.items():
batch[v] = preds[:, k].tolist()
return batch
p_pred = pmap(pred_fn, "inputs")
tokenizer = AutoTokenizer.from_pretrained("TurkuNLP/bert-large-finnish-cased-toxicity")
model = FlaxBertForSequenceClassification.from_pretrained(
"TurkuNLP/bert-large-finnish-cased-toxicity", from_pt=True, dtype=jnp.bfloat16
)
dataset = datasets.load_from_disk("/researchdisk/mc4_3.1.0_fi_cleaned")
BATCH_SIZE = 8192
dataset = dataset.map(
get_toxicity,
num_proc=1,
batched=True,
batch_size=BATCH_SIZE,
fn_kwargs={"batch_size": BATCH_SIZE},
)
print(dataset)
# SAVE DATASET
dataset.save_to_disk(
"/researchdisk/mc4_3.1.0_fi_cleaned_dataset_toxicity_labels", num_proc=32
)