Spaces:
				
			
			
	
			
			
		Sleeping
		
	
	
	
			
			
	
	
	
	
		
		
		Sleeping
		
	Update app.py
Browse files
    	
        app.py
    CHANGED
    
    | @@ -1,16 +1,24 @@ | |
| 1 | 
            -
            import  | 
| 2 | 
            -
            import gradio as gr
         | 
| 3 | 
             
            from transformers import pipeline, AutoTokenizer, AutoModelForSequenceClassification
         | 
|  | |
|  | |
|  | |
|  | |
| 4 |  | 
| 5 | 
            -
            #  | 
| 6 | 
            -
             | 
|  | |
| 7 |  | 
| 8 | 
            -
            #  | 
| 9 | 
            -
             | 
| 10 | 
            -
             | 
| 11 |  | 
| 12 | 
            -
             | 
| 13 | 
            -
             | 
|  | |
|  | |
|  | |
|  | |
| 14 | 
             
                summary = summarizer_ntg(text)[0]['summary_text']
         | 
| 15 |  | 
| 16 | 
             
                # Tokenize the summarized text
         | 
| @@ -21,7 +29,7 @@ def summarize_and_classify(text): | |
| 21 | 
             
                inputs = {k: v.to(device) for k, v in inputs.items()}
         | 
| 22 | 
             
                model_bb.to(device)
         | 
| 23 |  | 
| 24 | 
            -
                # Perform classification
         | 
| 25 | 
             
                with torch.no_grad():
         | 
| 26 | 
             
                    outputs = model_bb(**inputs)
         | 
| 27 |  | 
| @@ -30,16 +38,6 @@ def summarize_and_classify(text): | |
| 30 | 
             
                label_mapping = model_bb.config.id2label
         | 
| 31 | 
             
                predicted_label = label_mapping[predicted_label_id]
         | 
| 32 |  | 
| 33 | 
            -
                 | 
| 34 | 
            -
             | 
| 35 | 
            -
             | 
| 36 | 
            -
            iface = gr.Interface(
         | 
| 37 | 
            -
                fn=summarize_and_classify,
         | 
| 38 | 
            -
                inputs=gr.inputs.Textbox(lines=10, placeholder="Enter news article text here..."),
         | 
| 39 | 
            -
                outputs=[gr.outputs.Textbox(label="Summary"), gr.outputs.Textbox(label="Category")],
         | 
| 40 | 
            -
                title="News Article Summarizer and Classifier",
         | 
| 41 | 
            -
                description="Enter a news article text and get its summary and category."
         | 
| 42 | 
            -
            )
         | 
| 43 | 
            -
             | 
| 44 | 
            -
            # Launch the interface
         | 
| 45 | 
            -
            iface.launch()
         | 
|  | |
| 1 | 
            +
            import streamlit as st
         | 
|  | |
| 2 | 
             
            from transformers import pipeline, AutoTokenizer, AutoModelForSequenceClassification
         | 
| 3 | 
            +
            import torch
         | 
| 4 | 
            +
             | 
| 5 | 
            +
            # Define the summarization pipeline
         | 
| 6 | 
            +
            summarizer_ntg = pipeline("summarization", model="mrm8488/t5-base-finetuned-summarize-news")
         | 
| 7 |  | 
| 8 | 
            +
            # Load the tokenizer and model for classification
         | 
| 9 | 
            +
            tokenizer_bb = AutoTokenizer.from_pretrained("your-username/your-model-name")
         | 
| 10 | 
            +
            model_bb = AutoModelForSequenceClassification.from_pretrained("your-username/your-model-name")
         | 
| 11 |  | 
| 12 | 
            +
            # Streamlit application title
         | 
| 13 | 
            +
            st.title("News Article Summarizer and Classifier")
         | 
| 14 | 
            +
            st.write("Enter a news article text to get its summary and category.")
         | 
| 15 |  | 
| 16 | 
            +
            # Text input for user to enter the news article text
         | 
| 17 | 
            +
            text = st.text_area("Enter the news article text here:")
         | 
| 18 | 
            +
             | 
| 19 | 
            +
            # Perform summarization and classification when the user clicks the "Classify" button
         | 
| 20 | 
            +
            if st.button("Classify"):
         | 
| 21 | 
            +
                # Perform text summarization
         | 
| 22 | 
             
                summary = summarizer_ntg(text)[0]['summary_text']
         | 
| 23 |  | 
| 24 | 
             
                # Tokenize the summarized text
         | 
|  | |
| 29 | 
             
                inputs = {k: v.to(device) for k, v in inputs.items()}
         | 
| 30 | 
             
                model_bb.to(device)
         | 
| 31 |  | 
| 32 | 
            +
                # Perform text classification
         | 
| 33 | 
             
                with torch.no_grad():
         | 
| 34 | 
             
                    outputs = model_bb(**inputs)
         | 
| 35 |  | 
|  | |
| 38 | 
             
                label_mapping = model_bb.config.id2label
         | 
| 39 | 
             
                predicted_label = label_mapping[predicted_label_id]
         | 
| 40 |  | 
| 41 | 
            +
                # Display the summary and classification result
         | 
| 42 | 
            +
                st.write("Summary:", summary)
         | 
| 43 | 
            +
                st.write("Category:", predicted_label)
         | 
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  |