Skip to content

Commit 0d1ff74

Browse files
authored
add minimal logging for location services (#290)
1 parent a9f3b73 commit 0d1ff74

File tree

3 files changed

+21
-0
lines changed

3 files changed

+21
-0
lines changed

app/services/location/csbs.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
"""app.services.location.csbs.py"""
22
import csv
3+
import logging
34
from datetime import datetime
45

56
from asyncache import cached
@@ -39,10 +40,15 @@ async def get_locations():
3940
:returns: The locations.
4041
:rtype: dict
4142
"""
43+
logger = logging.getLogger("services.location.csbs")
44+
logger.info("Requesting data...")
4245
async with httputils.CLIENT_SESSION.get(BASE_URL) as response:
4346
text = await response.text()
4447

48+
logger.info("Data received")
49+
4550
data = list(csv.DictReader(text.splitlines()))
51+
logger.info("CSV parsed")
4652

4753
locations = []
4854

@@ -77,6 +83,7 @@ async def get_locations():
7783
int(item["Death"] or 0),
7884
)
7985
)
86+
logger.info("Data normalized")
8087

8188
# Return the locations.
8289
return locations

app/services/location/jhu.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
"""app.services.location.jhu.py"""
22
import csv
3+
import logging
34
from datetime import datetime
45

56
from asyncache import cached
@@ -47,6 +48,7 @@ async def get_category(category):
4748
:returns: The data for category.
4849
:rtype: dict
4950
"""
51+
logger = logging.getLogger("services.location.jhu")
5052

5153
# Adhere to category naming standard.
5254
category = category.lower()
@@ -55,11 +57,15 @@ async def get_category(category):
5557
url = BASE_URL + "time_series_covid19_%s_global.csv" % category
5658

5759
# Request the data
60+
logger.info("Requesting data...")
5861
async with httputils.CLIENT_SESSION.get(url) as response:
5962
text = await response.text()
6063

64+
logger.info("Data received")
65+
6166
# Parse the CSV.
6267
data = list(csv.DictReader(text.splitlines()))
68+
logger.info("CSV parsed")
6369

6470
# The normalized locations.
6571
locations = []
@@ -92,6 +98,7 @@ async def get_category(category):
9298
"latest": int(latest or 0),
9399
}
94100
)
101+
logger.info("Data normalized")
95102

96103
# Latest total.
97104
latest = sum(map(lambda location: location["latest"], locations))

app/services/location/nyt.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
"""app.services.location.nyt.py"""
22
import csv
3+
import logging
34
from datetime import datetime
45

56
from asyncache import cached
@@ -71,13 +72,18 @@ async def get_locations():
7172
:returns: The complete data for US Counties.
7273
:rtype: dict
7374
"""
75+
logger = logging.getLogger("services.location.nyt")
7476

7577
# Request the data.
78+
logger.info("Requesting data...")
7679
async with httputils.CLIENT_SESSION.get(BASE_URL) as response:
7780
text = await response.text()
7881

82+
logger.info("Data received")
83+
7984
# Parse the CSV.
8085
data = list(csv.DictReader(text.splitlines()))
86+
logger.info("CSV parsed")
8187

8288
# Group together locations (NYT data ordered by dates not location).
8389
grouped_locations = get_grouped_locations_dict(data)
@@ -119,5 +125,6 @@ async def get_locations():
119125
},
120126
)
121127
)
128+
logger.info("Data normalized")
122129

123130
return locations

0 commit comments

Comments
 (0)