ahmadrocks's picture
Update app.py
80b6635 verified
raw
history blame
807 Bytes
import streamlit as st
from transformers import GPT2LMHeadModel, GPT2Tokenizer
# Load the GPT-2 model and tokenizer
model_name = 'gpt2-large'
model = GPT2LMHeadModel.from_pretrained(model_name)
tokenizer = GPT2Tokenizer.from_pretrained(model_name)
st.title("Article Generator")
# Input for the article title
title = st.text_input("Enter the title of the article")
# Generate the article
if st.button("Generate Article"):
if title:
input_ids = tokenizer.encode(title, return_tensors='pt')
output = model.generate(input_ids, max_length=500, num_return_sequences=1, no_repeat_ngram_size=2, early_stopping=True)
article = tokenizer.decode(output[0], skip_special_tokens=True)
st.write(article)
else:
st.warning("Please enter a title to generate an article")