Skip to content

Commit 9e8d12b

Browse files
authored
Improve logging details (#305)
* add & log a data_id string for each cache move some messages to debug level log process id for location get not category get * dev mode should run with debug log level * log missing country code at sub debug level debug is `10`
1 parent 05e67bd commit 9e8d12b

File tree

5 files changed

+26
-16
lines changed

5 files changed

+26
-16
lines changed

Pipfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ uvicorn = "*"
3636
python_version = "3.8"
3737

3838
[scripts]
39-
dev = "uvicorn app.main:APP --reload"
39+
dev = "uvicorn app.main:APP --reload --log-level=debug"
4040
start = "uvicorn app.main:APP"
4141
fmt = "invoke fmt"
4242
sort = "invoke sort"

app/services/location/csbs.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -42,14 +42,15 @@ async def get_locations():
4242
:returns: The locations.
4343
:rtype: dict
4444
"""
45-
LOGGER.info("csbs Requesting data...")
45+
data_id = "csbs.locations"
46+
LOGGER.info(f"{data_id} Requesting data...")
4647
async with httputils.CLIENT_SESSION.get(BASE_URL) as response:
4748
text = await response.text()
4849

49-
LOGGER.info("csbs Data received")
50+
LOGGER.debug(f"{data_id} Data received")
5051

5152
data = list(csv.DictReader(text.splitlines()))
52-
LOGGER.info("csbs CSV parsed")
53+
LOGGER.debug(f"{data_id} CSV parsed")
5354

5455
locations = []
5556

@@ -84,7 +85,7 @@ async def get_locations():
8485
int(item["Death"] or 0),
8586
)
8687
)
87-
LOGGER.info("csbs Data normalized")
88+
LOGGER.info(f"{data_id} Data normalized")
8889

8990
# Return the locations.
9091
return locations

app/services/location/jhu.py

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
import logging
44
import os
55
from datetime import datetime
6+
from pprint import pformat as pf
67

78
from asyncache import cached
89
from cachetools import TTLCache
@@ -16,7 +17,7 @@
1617
from . import LocationService
1718

1819
LOGGER = logging.getLogger("services.location.jhu")
19-
20+
PID = os.getpid()
2021

2122
class JhuLocationService(LocationService):
2223
"""
@@ -53,20 +54,21 @@ async def get_category(category):
5354
"""
5455
# Adhere to category naming standard.
5556
category = category.lower()
57+
data_id = f"jhu.{category}"
5658

5759
# URL to request data from.
5860
url = BASE_URL + "time_series_covid19_%s_global.csv" % category
5961

6062
# Request the data
61-
LOGGER.info(f"pid:{os.getpid()}: jhu Requesting data...")
63+
LOGGER.info(f"{data_id} Requesting data...")
6264
async with httputils.CLIENT_SESSION.get(url) as response:
6365
text = await response.text()
6466

65-
LOGGER.info("jhu Data received")
67+
LOGGER.debug(f"{data_id} Data received")
6668

6769
# Parse the CSV.
6870
data = list(csv.DictReader(text.splitlines()))
69-
LOGGER.info("jhu CSV parsed")
71+
LOGGER.debug(f"{data_id} CSV parsed")
7072

7173
# The normalized locations.
7274
locations = []
@@ -99,18 +101,20 @@ async def get_category(category):
99101
"latest": int(latest or 0),
100102
}
101103
)
102-
LOGGER.info("jhu Data normalized")
104+
LOGGER.debug(f"{data_id} Data normalized")
103105

104106
# Latest total.
105107
latest = sum(map(lambda location: location["latest"], locations))
106108

107109
# Return the final data.
108-
return {
110+
results = {
109111
"locations": locations,
110112
"latest": latest,
111113
"last_updated": datetime.utcnow().isoformat() + "Z",
112114
"source": "https://github.com/ExpDev07/coronavirus-tracker-api",
113115
}
116+
LOGGER.info(f"{data_id} results:\n{pf(results, depth=1)}")
117+
return results
114118

115119

116120
@cached(cache=TTLCache(maxsize=1024, ttl=3600))
@@ -121,6 +125,8 @@ async def get_locations():
121125
:returns: The locations.
122126
:rtype: List[Location]
123127
"""
128+
data_id = "jhu.locations"
129+
LOGGER.info(f"pid:{PID}: {data_id} Requesting data...")
124130
# Get all of the data categories locations.
125131
confirmed = await get_category("confirmed")
126132
deaths = await get_category("deaths")
@@ -174,6 +180,7 @@ async def get_locations():
174180
},
175181
)
176182
)
183+
LOGGER.info(f"{data_id} Data normalized")
177184

178185
# Finally, return the locations.
179186
return locations

app/services/location/nyt.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -74,16 +74,17 @@ async def get_locations():
7474
:returns: The complete data for US Counties.
7575
:rtype: dict
7676
"""
77+
data_id = "nyt.locations"
7778
# Request the data.
78-
LOGGER.info("nyt Requesting data...")
79+
LOGGER.info(f"{data_id} Requesting data...")
7980
async with httputils.CLIENT_SESSION.get(BASE_URL) as response:
8081
text = await response.text()
8182

82-
LOGGER.info("Data received")
83+
LOGGER.debug(f"{data_id} Data received")
8384

8485
# Parse the CSV.
8586
data = list(csv.DictReader(text.splitlines()))
86-
LOGGER.info("nyt CSV parsed")
87+
LOGGER.debug(f"{data_id} CSV parsed")
8788

8889
# Group together locations (NYT data ordered by dates not location).
8990
grouped_locations = get_grouped_locations_dict(data)
@@ -125,6 +126,6 @@ async def get_locations():
125126
},
126127
)
127128
)
128-
LOGGER.info("nyt Data normalized")
129+
LOGGER.info(f"{data_id} Data normalized")
129130

130131
return locations

app/utils/countries.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -374,6 +374,7 @@ def country_code(value):
374374
"""
375375
code = COUNTRY_NAME__COUNTRY_CODE.get(value, DEFAULT_COUNTRY_CODE)
376376
if code == DEFAULT_COUNTRY_CODE:
377-
LOGGER.debug(f"No country code found for '{value}'. Using '{code}'!")
377+
# log at sub DEBUG level
378+
LOGGER.log(5, f"No country code found for '{value}'. Using '{code}'!")
378379

379380
return code

0 commit comments

Comments
 (0)