From a93a996d248c9684274670e3fe17150d9b930a39 Mon Sep 17 00:00:00 2001 From: Gabriel Date: Sun, 26 Apr 2020 10:30:19 -0400 Subject: [PATCH 1/5] import datetime as dt avoids confusion between datetime module and and datetime object --- app/services/location/jhu.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/app/services/location/jhu.py b/app/services/location/jhu.py index 53aa6ff9..991b16dc 100644 --- a/app/services/location/jhu.py +++ b/app/services/location/jhu.py @@ -1,8 +1,8 @@ """app.services.location.jhu.py""" import csv +import datetime as dt import logging import os -from datetime import datetime from asyncache import cached from cachetools import TTLCache @@ -108,7 +108,7 @@ async def get_category(category): return { "locations": locations, "latest": latest, - "last_updated": datetime.utcnow().isoformat() + "Z", + "last_updated": dt.datetime.utcnow().isoformat() + "Z", "source": "https://github.com/ExpDev07/coronavirus-tracker-api", } @@ -155,18 +155,18 @@ async def get_locations(): # Coordinates. Coordinates(coordinates["lat"], coordinates["long"]), # Last update. - datetime.utcnow().isoformat() + "Z", + dt.datetime.utcnow().isoformat() + "Z", # Timelines (parse dates as ISO). { "confirmed": Timeline( { - datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount + dt.datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount for date, amount in timelines["confirmed"].items() } ), "deaths": Timeline( { - datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount + dt.datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount for date, amount in timelines["deaths"].items() } ), From f2b54cc407f042824d676227c74c88b145b8cd62 Mon Sep 17 00:00:00 2001 From: Gabriel Date: Sun, 26 Apr 2020 11:19:24 -0400 Subject: [PATCH 2/5] add & log a data_id string for each cache move some messages to debug level log process id for location get not category get --- app/services/location/csbs.py | 9 +++++---- app/services/location/jhu.py | 19 +++++++++++++------ app/services/location/nyt.py | 9 +++++---- 3 files changed, 23 insertions(+), 14 deletions(-) diff --git a/app/services/location/csbs.py b/app/services/location/csbs.py index d660269c..68bdb01c 100644 --- a/app/services/location/csbs.py +++ b/app/services/location/csbs.py @@ -42,14 +42,15 @@ async def get_locations(): :returns: The locations. :rtype: dict """ - LOGGER.info("csbs Requesting data...") + data_id = "csbs.locations" + LOGGER.info(f"{data_id} Requesting data...") async with httputils.CLIENT_SESSION.get(BASE_URL) as response: text = await response.text() - LOGGER.info("csbs Data received") + LOGGER.debug(f"{data_id} Data received") data = list(csv.DictReader(text.splitlines())) - LOGGER.info("csbs CSV parsed") + LOGGER.debug(f"{data_id} CSV parsed") locations = [] @@ -84,7 +85,7 @@ async def get_locations(): int(item["Death"] or 0), ) ) - LOGGER.info("csbs Data normalized") + LOGGER.info(f"{data_id} Data normalized") # Return the locations. return locations diff --git a/app/services/location/jhu.py b/app/services/location/jhu.py index 991b16dc..ab369eb5 100644 --- a/app/services/location/jhu.py +++ b/app/services/location/jhu.py @@ -3,6 +3,7 @@ import datetime as dt import logging import os +from pprint import pformat as pf from asyncache import cached from cachetools import TTLCache @@ -16,7 +17,7 @@ from . import LocationService LOGGER = logging.getLogger("services.location.jhu") - +PID = os.getpid() class JhuLocationService(LocationService): """ @@ -53,20 +54,21 @@ async def get_category(category): """ # Adhere to category naming standard. category = category.lower() + data_id = f"jhu.{category}" # URL to request data from. url = BASE_URL + "time_series_covid19_%s_global.csv" % category # Request the data - LOGGER.info(f"pid:{os.getpid()}: jhu Requesting data...") + LOGGER.info(f"{data_id} Requesting data...") async with httputils.CLIENT_SESSION.get(url) as response: text = await response.text() - LOGGER.info("jhu Data received") + LOGGER.debug(f"{data_id} Data received") # Parse the CSV. data = list(csv.DictReader(text.splitlines())) - LOGGER.info("jhu CSV parsed") + LOGGER.debug(f"{data_id} CSV parsed") # The normalized locations. locations = [] @@ -99,18 +101,20 @@ async def get_category(category): "latest": int(latest or 0), } ) - LOGGER.info("jhu Data normalized") + LOGGER.debug(f"{data_id} Data normalized") # Latest total. latest = sum(map(lambda location: location["latest"], locations)) # Return the final data. - return { + results = { "locations": locations, "latest": latest, "last_updated": dt.datetime.utcnow().isoformat() + "Z", "source": "https://github.com/ExpDev07/coronavirus-tracker-api", } + LOGGER.info(f"{data_id} results:\n{pf(results, depth=1)}") + return results @cached(cache=TTLCache(maxsize=1024, ttl=3600)) @@ -121,6 +125,8 @@ async def get_locations(): :returns: The locations. :rtype: List[Location] """ + data_id = "jhu.locations" + LOGGER.info(f"pid:{PID}: {data_id} Requesting data...") # Get all of the data categories locations. confirmed = await get_category("confirmed") deaths = await get_category("deaths") @@ -174,6 +180,7 @@ async def get_locations(): }, ) ) + LOGGER.info(f"{data_id} Data normalized") # Finally, return the locations. return locations diff --git a/app/services/location/nyt.py b/app/services/location/nyt.py index a6435166..b33f5d3c 100644 --- a/app/services/location/nyt.py +++ b/app/services/location/nyt.py @@ -74,16 +74,17 @@ async def get_locations(): :returns: The complete data for US Counties. :rtype: dict """ + data_id = "nyt.locations" # Request the data. - LOGGER.info("nyt Requesting data...") + LOGGER.info(f"{data_id} Requesting data...") async with httputils.CLIENT_SESSION.get(BASE_URL) as response: text = await response.text() - LOGGER.info("Data received") + LOGGER.debug(f"{data_id} Data received") # Parse the CSV. data = list(csv.DictReader(text.splitlines())) - LOGGER.info("nyt CSV parsed") + LOGGER.debug(f"{data_id} CSV parsed") # Group together locations (NYT data ordered by dates not location). grouped_locations = get_grouped_locations_dict(data) @@ -125,6 +126,6 @@ async def get_locations(): }, ) ) - LOGGER.info("nyt Data normalized") + LOGGER.info(f"{data_id} Data normalized") return locations From 27a0f1ef7c75823ebfc3164fa455bb336111bcdd Mon Sep 17 00:00:00 2001 From: Gabriel Date: Sun, 26 Apr 2020 11:20:53 -0400 Subject: [PATCH 3/5] dev mode should run with debug log level --- Pipfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Pipfile b/Pipfile index 9a0839af..c28a067c 100644 --- a/Pipfile +++ b/Pipfile @@ -36,7 +36,7 @@ uvicorn = "*" python_version = "3.8" [scripts] -dev = "uvicorn app.main:APP --reload" +dev = "uvicorn app.main:APP --reload --log-level=debug" start = "uvicorn app.main:APP" fmt = "invoke fmt" sort = "invoke sort" From cb4bbc082f7d5e86a717dd5a08e6f89cde87a127 Mon Sep 17 00:00:00 2001 From: Gabriel Date: Sun, 26 Apr 2020 11:22:02 -0400 Subject: [PATCH 4/5] log missing country code at sub debug level debug is `10` --- app/utils/countries.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/app/utils/countries.py b/app/utils/countries.py index d239b5ee..9fb4f98a 100644 --- a/app/utils/countries.py +++ b/app/utils/countries.py @@ -374,6 +374,7 @@ def country_code(value): """ code = COUNTRY_NAME__COUNTRY_CODE.get(value, DEFAULT_COUNTRY_CODE) if code == DEFAULT_COUNTRY_CODE: - LOGGER.debug(f"No country code found for '{value}'. Using '{code}'!") + # log at sub DEBUG level + LOGGER.log(5, f"No country code found for '{value}'. Using '{code}'!") return code From a02af8a658b1b7b910c28d35794dc801fed1f822 Mon Sep 17 00:00:00 2001 From: Gabriel Date: Sun, 26 Apr 2020 11:41:52 -0400 Subject: [PATCH 5/5] rollback datetime import change --- app/services/location/jhu.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/app/services/location/jhu.py b/app/services/location/jhu.py index ab369eb5..bd247113 100644 --- a/app/services/location/jhu.py +++ b/app/services/location/jhu.py @@ -1,8 +1,8 @@ """app.services.location.jhu.py""" import csv -import datetime as dt import logging import os +from datetime import datetime from pprint import pformat as pf from asyncache import cached @@ -110,7 +110,7 @@ async def get_category(category): results = { "locations": locations, "latest": latest, - "last_updated": dt.datetime.utcnow().isoformat() + "Z", + "last_updated": datetime.utcnow().isoformat() + "Z", "source": "https://github.com/ExpDev07/coronavirus-tracker-api", } LOGGER.info(f"{data_id} results:\n{pf(results, depth=1)}") @@ -161,18 +161,18 @@ async def get_locations(): # Coordinates. Coordinates(coordinates["lat"], coordinates["long"]), # Last update. - dt.datetime.utcnow().isoformat() + "Z", + datetime.utcnow().isoformat() + "Z", # Timelines (parse dates as ISO). { "confirmed": Timeline( { - dt.datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount + datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount for date, amount in timelines["confirmed"].items() } ), "deaths": Timeline( { - dt.datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount + datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount for date, amount in timelines["deaths"].items() } ),