Spaces:
Sleeping
Sleeping
Update api.py
Browse files
api.py
CHANGED
|
@@ -11,7 +11,6 @@ import json
|
|
| 11 |
import logging
|
| 12 |
import multiprocessing
|
| 13 |
from news_classifier import predict_news_classes
|
| 14 |
-
from config import SCRAPER_MAX_RETRIES
|
| 15 |
|
| 16 |
|
| 17 |
os.environ["TOKENIZERS_PARALLELISM"] = "false"
|
|
@@ -38,7 +37,7 @@ async def scrape_urls(urls):
|
|
| 38 |
|
| 39 |
results = []
|
| 40 |
for url in urls:
|
| 41 |
-
f = pool.apply_async(scrape_text, [url
|
| 42 |
results.append(f) # appending result to results
|
| 43 |
|
| 44 |
scraped_texts = []
|
|
|
|
| 11 |
import logging
|
| 12 |
import multiprocessing
|
| 13 |
from news_classifier import predict_news_classes
|
|
|
|
| 14 |
|
| 15 |
|
| 16 |
os.environ["TOKENIZERS_PARALLELISM"] = "false"
|
|
|
|
| 37 |
|
| 38 |
results = []
|
| 39 |
for url in urls:
|
| 40 |
+
f = pool.apply_async(scrape_text, [url]) # asynchronously scraping text
|
| 41 |
results.append(f) # appending result to results
|
| 42 |
|
| 43 |
scraped_texts = []
|