Spaces:
Sleeping
Sleeping
# -*- coding: utf-8 -*- | |
"""app.ipynb | |
Automatically generated by Colab. | |
Original file is located at | |
https://colab.research.google.com/drive/1qNBkOEPBOkXJ0zcGdwQmdS7bt5zxjpIr | |
##Creating app.py | |
###Installing Dependencies | |
""" | |
!pip install gradio transformers torch | |
"""###Importing Dependencies""" | |
import gradio as gr | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
"""###Loading the model and tokenizer""" | |
model_name = "gpt2" | |
model = AutoModelForCausalLM.from_pretrained(model_name) | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
"""###Defining the prediction function""" | |
def generate_text(prompt): | |
inputs = tokenizer(prompt, return_tensors="pt") | |
outputs = model.generate(**inputs, max_length=100) | |
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
return generated_text | |
"""###Creating the Gradio interface | |
""" | |
api = gr.Interface( | |
fn=generate_text, | |
inputs=gr.Textbox(label="Input Prompt"), | |
outputs=gr.Textbox(label="Generated Text"), | |
) | |
"""###Launching the API""" | |
api.launch() |