run evey hour
Browse files- app.py +13 -4
- news_data.py +10 -0
- requirements.txt +2 -1
app.py
CHANGED
@@ -7,14 +7,24 @@ from fastapi.responses import HTMLResponse
|
|
7 |
from fastapi import FastAPI, Request, HTTPException
|
8 |
from pathlib import Path
|
9 |
from dateutil import parser
|
10 |
-
|
|
|
11 |
from db import Database
|
12 |
|
13 |
database = Database(Path("./"))
|
14 |
logging.basicConfig(level=os.environ.get("LOGLEVEL", "INFO"))
|
15 |
|
16 |
|
17 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
|
19 |
app.add_middleware(
|
20 |
CORSMiddleware,
|
@@ -31,8 +41,7 @@ def format_date(value):
|
|
31 |
try:
|
32 |
date = parser.parse(value)
|
33 |
return date.strftime(format)
|
34 |
-
except Exception
|
35 |
-
logging.error(e)
|
36 |
return value
|
37 |
|
38 |
|
|
|
7 |
from fastapi import FastAPI, Request, HTTPException
|
8 |
from pathlib import Path
|
9 |
from dateutil import parser
|
10 |
+
from contextlib import asynccontextmanager
|
11 |
+
from news_data import scheduler
|
12 |
from db import Database
|
13 |
|
14 |
database = Database(Path("./"))
|
15 |
logging.basicConfig(level=os.environ.get("LOGLEVEL", "INFO"))
|
16 |
|
17 |
|
18 |
+
@asynccontextmanager
|
19 |
+
async def lifespan(app: FastAPI):
|
20 |
+
print("Startup")
|
21 |
+
scheduler.start()
|
22 |
+
yield
|
23 |
+
scheduler.shutdown()
|
24 |
+
print("Shutdown")
|
25 |
+
|
26 |
+
|
27 |
+
app = FastAPI(lifespan=lifespan)
|
28 |
|
29 |
app.add_middleware(
|
30 |
CORSMiddleware,
|
|
|
41 |
try:
|
42 |
date = parser.parse(value)
|
43 |
return date.strftime(format)
|
44 |
+
except Exception:
|
|
|
45 |
return value
|
46 |
|
47 |
|
news_data.py
CHANGED
@@ -2,6 +2,10 @@ import feedparser
|
|
2 |
from pathlib import Path
|
3 |
import json
|
4 |
from db import Database
|
|
|
|
|
|
|
|
|
5 |
|
6 |
database = Database(Path("./"))
|
7 |
|
@@ -12,6 +16,7 @@ def get_feed(feed_url):
|
|
12 |
|
13 |
|
14 |
def cache_news():
|
|
|
15 |
data = []
|
16 |
for feed in TOP_NEWS_FEEDS:
|
17 |
url = feed["url"]
|
@@ -29,6 +34,11 @@ def cache_news():
|
|
29 |
database.insert(data)
|
30 |
|
31 |
|
|
|
|
|
|
|
|
|
|
|
32 |
TOP_NEWS_FEEDS = [
|
33 |
{"label": "BBC World News", "url": "http://feeds.bbci.co.uk/news/world/rss.xml"},
|
34 |
{
|
|
|
2 |
from pathlib import Path
|
3 |
import json
|
4 |
from db import Database
|
5 |
+
from apscheduler.schedulers.background import BackgroundScheduler
|
6 |
+
from datetime import datetime
|
7 |
+
|
8 |
+
scheduler = BackgroundScheduler()
|
9 |
|
10 |
database = Database(Path("./"))
|
11 |
|
|
|
16 |
|
17 |
|
18 |
def cache_news():
|
19 |
+
print("Caching news")
|
20 |
data = []
|
21 |
for feed in TOP_NEWS_FEEDS:
|
22 |
url = feed["url"]
|
|
|
34 |
database.insert(data)
|
35 |
|
36 |
|
37 |
+
scheduler.add_job(
|
38 |
+
cache_news, "interval", hours=1, id="cache_news", next_run_time=datetime.now()
|
39 |
+
)
|
40 |
+
|
41 |
+
|
42 |
TOP_NEWS_FEEDS = [
|
43 |
{"label": "BBC World News", "url": "http://feeds.bbci.co.uk/news/world/rss.xml"},
|
44 |
{
|
requirements.txt
CHANGED
@@ -2,4 +2,5 @@ fastapi
|
|
2 |
uvicorn
|
3 |
feedparser
|
4 |
Jinja2
|
5 |
-
python-dateutil
|
|
|
|
2 |
uvicorn
|
3 |
feedparser
|
4 |
Jinja2
|
5 |
+
python-dateutil
|
6 |
+
apscheduler
|