diff --git a/Pipfile b/Pipfile index 9a0839af..c28a067c 100644 --- a/Pipfile +++ b/Pipfile @@ -36,7 +36,7 @@ uvicorn = "*" python_version = "3.8" [scripts] -dev = "uvicorn app.main:APP --reload" +dev = "uvicorn app.main:APP --reload --log-level=debug" start = "uvicorn app.main:APP" fmt = "invoke fmt" sort = "invoke sort" diff --git a/app/services/location/csbs.py b/app/services/location/csbs.py index d660269c..68bdb01c 100644 --- a/app/services/location/csbs.py +++ b/app/services/location/csbs.py @@ -42,14 +42,15 @@ async def get_locations(): :returns: The locations. :rtype: dict """ - LOGGER.info("csbs Requesting data...") + data_id = "csbs.locations" + LOGGER.info(f"{data_id} Requesting data...") async with httputils.CLIENT_SESSION.get(BASE_URL) as response: text = await response.text() - LOGGER.info("csbs Data received") + LOGGER.debug(f"{data_id} Data received") data = list(csv.DictReader(text.splitlines())) - LOGGER.info("csbs CSV parsed") + LOGGER.debug(f"{data_id} CSV parsed") locations = [] @@ -84,7 +85,7 @@ async def get_locations(): int(item["Death"] or 0), ) ) - LOGGER.info("csbs Data normalized") + LOGGER.info(f"{data_id} Data normalized") # Return the locations. return locations diff --git a/app/services/location/jhu.py b/app/services/location/jhu.py index 53aa6ff9..bd247113 100644 --- a/app/services/location/jhu.py +++ b/app/services/location/jhu.py @@ -3,6 +3,7 @@ import logging import os from datetime import datetime +from pprint import pformat as pf from asyncache import cached from cachetools import TTLCache @@ -16,7 +17,7 @@ from . import LocationService LOGGER = logging.getLogger("services.location.jhu") - +PID = os.getpid() class JhuLocationService(LocationService): """ @@ -53,20 +54,21 @@ async def get_category(category): """ # Adhere to category naming standard. category = category.lower() + data_id = f"jhu.{category}" # URL to request data from. url = BASE_URL + "time_series_covid19_%s_global.csv" % category # Request the data - LOGGER.info(f"pid:{os.getpid()}: jhu Requesting data...") + LOGGER.info(f"{data_id} Requesting data...") async with httputils.CLIENT_SESSION.get(url) as response: text = await response.text() - LOGGER.info("jhu Data received") + LOGGER.debug(f"{data_id} Data received") # Parse the CSV. data = list(csv.DictReader(text.splitlines())) - LOGGER.info("jhu CSV parsed") + LOGGER.debug(f"{data_id} CSV parsed") # The normalized locations. locations = [] @@ -99,18 +101,20 @@ async def get_category(category): "latest": int(latest or 0), } ) - LOGGER.info("jhu Data normalized") + LOGGER.debug(f"{data_id} Data normalized") # Latest total. latest = sum(map(lambda location: location["latest"], locations)) # Return the final data. - return { + results = { "locations": locations, "latest": latest, "last_updated": datetime.utcnow().isoformat() + "Z", "source": "https://github.com/ExpDev07/coronavirus-tracker-api", } + LOGGER.info(f"{data_id} results:\n{pf(results, depth=1)}") + return results @cached(cache=TTLCache(maxsize=1024, ttl=3600)) @@ -121,6 +125,8 @@ async def get_locations(): :returns: The locations. :rtype: List[Location] """ + data_id = "jhu.locations" + LOGGER.info(f"pid:{PID}: {data_id} Requesting data...") # Get all of the data categories locations. confirmed = await get_category("confirmed") deaths = await get_category("deaths") @@ -174,6 +180,7 @@ async def get_locations(): }, ) ) + LOGGER.info(f"{data_id} Data normalized") # Finally, return the locations. return locations diff --git a/app/services/location/nyt.py b/app/services/location/nyt.py index a6435166..b33f5d3c 100644 --- a/app/services/location/nyt.py +++ b/app/services/location/nyt.py @@ -74,16 +74,17 @@ async def get_locations(): :returns: The complete data for US Counties. :rtype: dict """ + data_id = "nyt.locations" # Request the data. - LOGGER.info("nyt Requesting data...") + LOGGER.info(f"{data_id} Requesting data...") async with httputils.CLIENT_SESSION.get(BASE_URL) as response: text = await response.text() - LOGGER.info("Data received") + LOGGER.debug(f"{data_id} Data received") # Parse the CSV. data = list(csv.DictReader(text.splitlines())) - LOGGER.info("nyt CSV parsed") + LOGGER.debug(f"{data_id} CSV parsed") # Group together locations (NYT data ordered by dates not location). grouped_locations = get_grouped_locations_dict(data) @@ -125,6 +126,6 @@ async def get_locations(): }, ) ) - LOGGER.info("nyt Data normalized") + LOGGER.info(f"{data_id} Data normalized") return locations diff --git a/app/utils/countries.py b/app/utils/countries.py index d239b5ee..9fb4f98a 100644 --- a/app/utils/countries.py +++ b/app/utils/countries.py @@ -374,6 +374,7 @@ def country_code(value): """ code = COUNTRY_NAME__COUNTRY_CODE.get(value, DEFAULT_COUNTRY_CODE) if code == DEFAULT_COUNTRY_CODE: - LOGGER.debug(f"No country code found for '{value}'. Using '{code}'!") + # log at sub DEBUG level + LOGGER.log(5, f"No country code found for '{value}'. Using '{code}'!") return code