CaioXapelaum's picture
Update app.py
219340b verified
raw
history blame contribute delete
363 Bytes
import gradio as gr
from transformers import GPT2TokenizerFast
import spaces
tokenizer = GPT2TokenizerFast.from_pretrained('Xenova/gpt-4o')
assert tokenizer.encode('hello world') == [24912, 2375]
@spaces.GPU
def tokenize(message, history):
return str(tokenizer.encode(message))
demo = gr.ChatInterface(tokenize)
if __name__ == "__main__":
demo.launch()