File size: 272 Bytes
ae7e3a9 |
1 2 3 4 5 6 7 8 |
from transformers import AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained("Isotonic/gpt-human-assistant")
def tokenize_function(examples):
return tokenizer(examples["prompt"], truncation=True)
tokenized_datasets = dataset.map(tokenize_function, batched=True)
|