diff --git a/app/data/__init__.py b/app/data/__init__.py index 60a75dac..c6b15daa 100644 --- a/app/data/__init__.py +++ b/app/data/__init__.py @@ -14,8 +14,7 @@ def data_source(source): """ Retrieves the provided data-source service. - :returns: The service. :rtype: LocationService """ - return DATA_SOURCES.get(source.lower()) + return DATA_SOURCES.get(source.lower()) \ No newline at end of file diff --git a/app/main.py b/app/main.py index b9aff949..8a137cd7 100644 --- a/app/main.py +++ b/app/main.py @@ -118,4 +118,124 @@ async def handle_validation_error( if __name__ == "__main__": uvicorn.run( "app.main:APP", host="127.0.0.1", port=SETTINGS.port, log_level="info", - ) + )""" +app.main.py +""" +import logging + +import pydantic +import sentry_sdk +import uvicorn +from fastapi import FastAPI, Request, Response +from fastapi.middleware.cors import CORSMiddleware +from fastapi.middleware.gzip import GZipMiddleware +from fastapi.responses import JSONResponse +from scout_apm.async_.starlette import ScoutMiddleware +from sentry_sdk.integrations.asgi import SentryAsgiMiddleware + +from .config import get_settings +from .data import data_source +from .routers import V1, V2 +from .utils.httputils import setup_client_session, teardown_client_session + +# ############ +# FastAPI App +# ############ +LOGGER = logging.getLogger("api") + +SETTINGS = get_settings() + +if SETTINGS.sentry_dsn: # pragma: no cover + sentry_sdk.init(dsn=SETTINGS.sentry_dsn) + +APP = FastAPI( + title="Coronavirus Tracker", + description=( + "API for tracking the global coronavirus (COVID-19, SARS-CoV-2) outbreak." + " Project page: https://github.com/ExpDev07/coronavirus-tracker-api." + ), + version="2.0.4", + docs_url="/", + redoc_url="/docs", + on_startup=[setup_client_session], + on_shutdown=[teardown_client_session], +) + +# ##################### +# Middleware +####################### + +# Scout APM +if SETTINGS.scout_name: # pragma: no cover + LOGGER.info(f"Adding Scout APM middleware for `{SETTINGS.scout_name}`") + APP.add_middleware(ScoutMiddleware) +else: + LOGGER.debug("No SCOUT_NAME config") + +# Sentry Error Tracking +if SETTINGS.sentry_dsn: # pragma: no cover + LOGGER.info("Adding Sentry middleware") + APP.add_middleware(SentryAsgiMiddleware) + +# Enable CORS. +APP.add_middleware( + CORSMiddleware, + allow_credentials=True, + allow_origins=["*"], + allow_methods=["*"], + allow_headers=["*"], +) +APP.add_middleware(GZipMiddleware, minimum_size=1000) + + +@APP.middleware("http") +async def add_datasource(request: Request, call_next): + """ + Attach the data source to the request.state. + """ + # Retrieve the datas ource from query param. + source = data_source(request.query_params.get("source", default="jhu")) + + # Abort with 404 if source cannot be found. + if not source: + return Response("The provided data-source was not found.", status_code=404) + + # Attach source to request. + request.state.source = source + + # Move on... + LOGGER.debug(f"source provided: {source.__class__.__name__}") + response = await call_next(request) + return response + + +# ################ +# Exception Handler +# ################ + + +@APP.exception_handler(pydantic.error_wrappers.ValidationError) +async def handle_validation_error( + request: Request, exc: pydantic.error_wrappers.ValidationError +): # pylint: disable=unused-argument + """ + Handles validation errors. + """ + return JSONResponse({"message": exc.errors()}, status_code=422) + + +# ################ +# Routing +# ################ + + +# Include routers. +APP.include_router(V1, prefix="", tags=["v1"]) +APP.include_router(V2, prefix="/v2", tags=["v2"]) + + +# Running of app. +if __name__ == "__main__": + uvicorn.run( + "app.main:APP", host="127.0.0.1", port=SETTINGS.port, log_level="info", + ) \ No newline at end of file diff --git a/app/routers/v2.py b/app/routers/v2.py index 31eb408c..ecf49c17 100644 --- a/app/routers/v2.py +++ b/app/routers/v2.py @@ -107,4 +107,4 @@ async def sources(): """ Retrieves a list of data-sources that are availble to use. """ - return {"sources": list(DATA_SOURCES.keys())} + return {"sources": list(DATA_SOURCES.keys())} \ No newline at end of file diff --git a/app/services/location/__init__.py b/app/services/location/__init__.py deleted file mode 100644 index 6d292b54..00000000 --- a/app/services/location/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -"""app.services.location""" -from abc import ABC, abstractmethod - - -class LocationService(ABC): - """ - Service for retrieving locations. - """ - - @abstractmethod - async def get_all(self): - """ - Gets and returns all of the locations. - - :returns: The locations. - :rtype: List[Location] - """ - raise NotImplementedError - - @abstractmethod - async def get(self, id): # pylint: disable=redefined-builtin,invalid-name - """ - Gets and returns location with the provided id. - - :returns: The location. - :rtype: Location - """ - raise NotImplementedError diff --git a/app/services/location/csbs.py b/app/services/location/csbs.py index 444ebad6..acc48209 100644 --- a/app/services/location/csbs.py +++ b/app/services/location/csbs.py @@ -11,6 +11,8 @@ from ...location.csbs import CSBSLocation from ...utils import httputils from . import LocationService +from ...location import factorylocation + LOGGER = logging.getLogger("services.location.csbs") @@ -73,23 +75,9 @@ async def get_locations(): # Date string without "EDT" at end. last_update = " ".join(item["Last Update"].split(" ")[0:2]) + params = {"index": i, "state": state, "county": county, "item": item, "last_update": last_update} - # Append to locations. - locations.append( - CSBSLocation( - # General info. - i, - state, - county, - # Coordinates. - Coordinates(item["Latitude"], item["Longitude"]), - # Last update (parse as ISO). - datetime.strptime(last_update, "%Y-%m-%d %H:%M").isoformat() + "Z", - # Statistics. - int(item["Confirmed"] or 0), - int(item["Death"] or 0), - ) - ) + locations.append(locationfactory.create_location('CSBS')) LOGGER.info(f"{data_id} Data normalized") # save the results to distributed cache # TODO: fix json serialization diff --git a/app/services/location/factorylocation.py b/app/services/location/factorylocation.py new file mode 100644 index 00000000..4006ee8c --- /dev/null +++ b/app/services/location/factorylocation.py @@ -0,0 +1,97 @@ +from abc import ABC, abstractmethod +import csv +import logging +import os +from datetime import datetime +from pprint import pformat as pf + +from asyncache import cached +from cachetools import TTLCache + +from ...caches import check_cache, load_cache +from ...coordinates import Coordinates +from ...location.csbs import CSBSLocation +from ...location.nyt import NYTLocation +from ...utils import httputils +from . import LocationService + +class factorylocation: + def __init__(self): + pass + + @staticmethod + def location_create(org_name, params): + if org_name == 'NYT': + confirmed_history = params["confirmed_history"] + deaths_history = params["deaths_history"] + return NYTLocation( + id=params["index"], + state=params["county_state"][1], + county=params["county_state"][0], + coordinates=Coordinates(None, None), # NYT does not provide coordinates + last_updated=datetime.utcnow().isoformat() + "Z", # since last request + timelines={ + "confirmed": Timeline( + timeline={ + datetime.strptime(date, "%Y-%m-%d").isoformat() + "Z": amount + for date, amount in confirmed_history.items() + } + ), + "deaths": Timeline( + timeline={ + datetime.strptime(date, "%Y-%m-%d").isoformat() + "Z": amount + for date, amount in deaths_history.items() + } + ), + "recovered": Timeline(), + }, + ) + elif org_name == "JHU": + timelines = params["timelines"]; + return TimelinedLocation( + # General info. + params["index"], + params["country"], + params["province"], + # Coordinates. + Coordinates(latitude=params["coordinates"]["lat"], longitude=params["coordinates"]["long"]), + # Last update. + datetime.utcnow().isoformat() + "Z", + # Timelines (parse dates as ISO). + { + "confirmed": Timeline( + timeline={ + datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount + for date, amount in timelines["confirmed"].items() + } + ), + "deaths": Timeline( + timeline={ + datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount + for date, amount in timelines["deaths"].items() + } + ), + "recovered": Timeline( + timeline={ + datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount + for date, amount in timelines["recovered"].items() + } + ), + }, + ) + elif org_name == "CSBS": + item = params["item"] + return CSBSLocation( + # General info. + params["index"], + params["state"], + params["county"], + # Coordinates. + Coordinates(item["Latitude"], item["Longitude"]), + # Last update (parse as ISO). + datetime.strptime(params["last_update"], "%Y-%m-%d %H:%M").isoformat() + "Z", + # Statistics. + int(item["Confirmed"] or 0), + int(item["Death"] or 0), + ) + diff --git a/app/services/location/jhu.py b/app/services/location/jhu.py index ebed3960..843890b0 100644 --- a/app/services/location/jhu.py +++ b/app/services/location/jhu.py @@ -16,6 +16,7 @@ from ...utils import date as date_util from ...utils import httputils from . import LocationService +from ...location import factorylocation LOGGER = logging.getLogger("services.location.jhu") PID = os.getpid() @@ -171,39 +172,9 @@ async def get_locations(): coordinates = location["coordinates"] # Create location (supporting timelines) and append. - locations.append( - TimelinedLocation( - # General info. - index, - location["country"], - location["province"], - # Coordinates. - Coordinates(latitude=coordinates["lat"], longitude=coordinates["long"]), - # Last update. - datetime.utcnow().isoformat() + "Z", - # Timelines (parse dates as ISO). - { - "confirmed": Timeline( - timeline={ - datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount - for date, amount in timelines["confirmed"].items() - } - ), - "deaths": Timeline( - timeline={ - datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount - for date, amount in timelines["deaths"].items() - } - ), - "recovered": Timeline( - timeline={ - datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount - for date, amount in timelines["recovered"].items() - } - ), - }, - ) - ) + params = {"index": index, "country": location["country"], "province": location["province"], + "coordinates": coordinates, "timelines": timelines} + locations.append(locationfactory.create_location("JHU"), params) LOGGER.info(f"{data_id} Data normalized") # Finally, return the locations. diff --git a/app/services/location/nyt.py b/app/services/location/nyt.py index 1f25ec34..4c33e923 100644 --- a/app/services/location/nyt.py +++ b/app/services/location/nyt.py @@ -1,17 +1,14 @@ """app.services.location.nyt.py""" import csv import logging -from datetime import datetime from asyncache import cached from cachetools import TTLCache from ...caches import check_cache, load_cache -from ...coordinates import Coordinates -from ...location.nyt import NYTLocation -from ...models import Timeline from ...utils import httputils from . import LocationService +from ...import factorylocation LOGGER = logging.getLogger("services.location.nyt") @@ -108,35 +105,11 @@ async def get_locations(): deaths_list = histories["deaths"] deaths_history = {date: int(amount or 0) for date, amount in deaths_list} + params = {'index': idx, 'county_state': county_state, 'confirmed_history': confirmed_history,'deaths_history': deaths_history} + locations.append(locationfactory.create_location('NYT', params)) - # Normalize the item and append to locations. - locations.append( - NYTLocation( - id=idx, - state=county_state[1], - county=county_state[0], - coordinates=Coordinates(None, None), # NYT does not provide coordinates - last_updated=datetime.utcnow().isoformat() + "Z", # since last request - timelines={ - "confirmed": Timeline( - timeline={ - datetime.strptime(date, "%Y-%m-%d").isoformat() + "Z": amount - for date, amount in confirmed_history.items() - } - ), - "deaths": Timeline( - timeline={ - datetime.strptime(date, "%Y-%m-%d").isoformat() + "Z": amount - for date, amount in deaths_history.items() - } - ), - "recovered": Timeline(), - }, - ) - ) LOGGER.info(f"{data_id} Data normalized") - # save the results to distributed cache - # TODO: fix json serialization + try: await load_cache(data_id, locations) except TypeError as type_err: