Jarbot / preprocesamiento
ValValFunny's picture
Create preprocesamiento
ae7e3a9 verified
raw
history blame contribute delete
272 Bytes
from transformers import AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained("Isotonic/gpt-human-assistant")
def tokenize_function(examples):
return tokenizer(examples["prompt"], truncation=True)
tokenized_datasets = dataset.map(tokenize_function, batched=True)