from newspaper import Article, Config from transformers import pipeline import gradio as gr import nltk nltk.download('punkt') def extract_article_summary(url): USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:78.0) Gecko/20100101 Firefox/78.0" config = Config() config.browser_user_agent = USER_AGENT config.request_timeout = 10 article = Article(url, config=config) article.download() article.parse() text = article.text summarizer = pipeline("summarization", model="facebook/bart-large-cnn") return summarizer(text)[0]["summary_text"] sample_url = [ [ "https://www.technologyreview.com/2021/07/22/1029973/deepmind-alphafold-protein-folding-biology-disease-drugs-proteome/" ], [ "https://www.technologyreview.com/2021/07/21/1029860/disability-rights-employment-discrimination-ai-hiring/" ], [ "https://www.technologyreview.com/2021/07/09/1028140/ai-voice-actors-sound-human/" ], ] desc = """ Let Hugging Face models summarize articles for you. Note: Shorter articles generate faster summaries. This summarizer uses bart-large-cnn model by Facebook """ demo = gr.Interface( extract_article_summary, inputs=gr.Textbox(lines=2, label="URL"), outputs="text", title="News Summarizer", theme="huggingface", description=desc, examples=sample_url, ) demo.launch()