Spaces:
Runtime error
Runtime error
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline | |
import xformers | |
import streamlit as st | |
import torch | |
import time | |
import random | |
from transformers.trainer_utils import set_seed | |
import time | |
SEED = int(time.time()) | |
set_seed(SEED) | |
#make columns | |
col1, col2 = st.columns([2,1]) | |
#import model | |
tokenizer = AutoTokenizer.from_pretrained("gpt2-medium") | |
tokenizer.padding_side = 'left' | |
model = AutoModelForCausalLM.from_pretrained("breadlicker45/gpt2-music") | |
def get_model(): | |
return pipeline('text-generation', model=model, | |
tokenizer=tokenizer, do_sample=True) | |
#ui | |
with col1: | |
prompt= st.text_input('input', | |
'''2623 2619 3970 3976 2607 3973 2735 3973 2598 3985 2726 3973 2607 4009 2735 3973 2598 3973 2726 3973 2607 3973 2735 4009''') | |
#gen text | |
text = prompt | |
generator = get_model() | |
gen = st.info('Generating text...') | |
answer = generator(text, pad_token_id=tokenizer.eos_token_id, do_sample=True, max_length=350, min_length=80, temperature=0.7, top_k=2, num_beams=1, no_repeat_ngram_size=1, early_stopping=True) | |
gen.empty() | |
lst = answer[0]['generated_text'] | |
out = lst | |
t = st.empty() | |
for i in range(len(out)): | |
t.markdown("#### %s" % out[0:i]) | |
time.sleep(0.04) |