|
import streamlit as st |
|
from transformers import T5ForConditionalGeneration, T5Tokenizer |
|
|
|
|
|
model = T5ForConditionalGeneration.from_pretrained("t5-small") |
|
tokenizer = T5Tokenizer.from_pretrained("t5-small") |
|
|
|
|
|
def generate_response(input_text): |
|
input_ids = tokenizer.encode("chatbot: " + input_text, return_tensors="pt", max_length=512) |
|
output_ids = model.generate(input_ids, max_length=100, num_beams=1, early_stopping=True) |
|
response = tokenizer.decode(output_ids[0], skip_special_tokens=True) |
|
return response |
|
|
|
|
|
st.title("Simple Chatbot with T5") |
|
user_input = st.text_input("You:", "") |
|
|
|
if st.button("Send"): |
|
if user_input.strip() != "": |
|
response = generate_response(user_input) |
|
st.text_area("Bot:", response) |
|
else: |
|
st.warning("Please enter a valid input.") |
|
|