Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 10 additions & 7 deletions app/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,21 @@
from ..services.location.nyt import NYTLocationService

# Mapping of services to data-sources.
DATA_SOURCES = {
"jhu": JhuLocationService(),
"csbs": CSBSLocationService(),
"nyt": NYTLocationService(),
}
class DATASOURCES:
__data_sources = {}
def __init__(self):
self.__date_sources = {
"jhu": JhuLocationService(),
"csbs": CSBSLocationService(),
"nyt": NYTLocationService(),
}


def data_source(source):
def get_data_sources(self, source: str):
"""
Retrieves the provided data-source service.

:returns: The service.
:rtype: LocationService
"""
return DATA_SOURCES.get(source.lower())
return self.__data_sources.get(source.lower())
5 changes: 3 additions & 2 deletions app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware

from .config import get_settings
from .data import data_source
from .data import DATASOURCES
from .routers import V1, V2
from .utils.httputils import setup_client_session, teardown_client_session

Expand Down Expand Up @@ -74,7 +74,8 @@ async def add_datasource(request: Request, call_next):
Attach the data source to the request.state.
"""
# Retrieve the datas ource from query param.
source = data_source(request.query_params.get("source", default="jhu"))
source = DATASOURCES()
source.get_data_sources(request.query_params.get("source", default="jhu"))

# Abort with 404 if source cannot be found.
if not source:
Expand Down
7 changes: 7 additions & 0 deletions app/services/location/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,3 +26,10 @@ async def get(self, id): # pylint: disable=redefined-builtin,invalid-name
:rtype: Location
"""
raise NotImplementedError

class BASE_URLs(str):
csbs = "https://facts.csbs.org/covid-19/covid19_county.csv"
jhu = "https://raw.githubusercontent.com/CSSEGISandData/2019-nCoV/master/csse_covid_19_data/csse_covid_19_time_series/"
nyt = "https://raw.githubusercontent.com/nytimes/covid-19-data/master/us-counties.csv"


9 changes: 2 additions & 7 deletions app/services/location/csbs.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from ...coordinates import Coordinates
from ...location.csbs import CSBSLocation
from ...utils import httputils
from . import LocationService
from . import LocationService, BASE_URLs

LOGGER = logging.getLogger("services.location.csbs")

Expand All @@ -30,11 +30,6 @@ async def get(self, loc_id): # pylint: disable=arguments-differ
locations = await self.get_all()
return locations[loc_id]


# Base URL for fetching data
BASE_URL = "https://facts.csbs.org/covid-19/covid19_county.csv"


@cached(cache=TTLCache(maxsize=1, ttl=1800))
async def get_locations():
"""
Expand All @@ -52,7 +47,7 @@ async def get_locations():
locations = cache_results
else:
LOGGER.info(f"{data_id} shared cache empty")
async with httputils.CLIENT_SESSION.get(BASE_URL) as response:
async with httputils.CLIENT_SESSION.get(BASE_URLs.csbs) as response:
text = await response.text()

LOGGER.debug(f"{data_id} Data received")
Expand Down
9 changes: 2 additions & 7 deletions app/services/location/jhu.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from ...utils import countries
from ...utils import date as date_util
from ...utils import httputils
from . import LocationService
from . import LocationService, BASE_URLs

LOGGER = logging.getLogger("services.location.jhu")
PID = os.getpid()
Expand All @@ -39,11 +39,6 @@ async def get(self, loc_id): # pylint: disable=arguments-differ

# ---------------------------------------------------------------


# Base URL for fetching category.
BASE_URL = "https://raw.githubusercontent.com/CSSEGISandData/2019-nCoV/master/csse_covid_19_data/csse_covid_19_time_series/"


@cached(cache=TTLCache(maxsize=4, ttl=1800))
async def get_category(category):
"""
Expand All @@ -64,7 +59,7 @@ async def get_category(category):
else:
LOGGER.info(f"{data_id} shared cache empty")
# URL to request data from.
url = BASE_URL + "time_series_covid19_%s_global.csv" % category
url = BASE_URLs.jhu + "time_series_covid19_%s_global.csv" % category

# Request the data
LOGGER.info(f"{data_id} Requesting data...")
Expand Down
9 changes: 2 additions & 7 deletions app/services/location/nyt.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from ...location.nyt import NYTLocation
from ...models import Timeline
from ...utils import httputils
from . import LocationService
from . import LocationService, BASE_URLs

LOGGER = logging.getLogger("services.location.nyt")

Expand All @@ -34,11 +34,6 @@ async def get(self, loc_id): # pylint: disable=arguments-differ

# ---------------------------------------------------------------


# Base URL for fetching category.
BASE_URL = "https://raw.githubusercontent.com/nytimes/covid-19-data/master/us-counties.csv"


def get_grouped_locations_dict(data):
"""
Helper function to group history for locations into one dict.
Expand Down Expand Up @@ -85,7 +80,7 @@ async def get_locations():
locations = cache_results
else:
LOGGER.info(f"{data_id} shared cache empty")
async with httputils.CLIENT_SESSION.get(BASE_URL) as response:
async with httputils.CLIENT_SESSION.get(BASE_URLs.nyt) as response:
text = await response.text()

LOGGER.debug(f"{data_id} Data received")
Expand Down