Skip to content

Commit caae4f5

Browse files
committed
csbs to/from Redis
1 parent e1857b2 commit caae4f5

File tree

1 file changed

+55
-44
lines changed

1 file changed

+55
-44
lines changed

app/services/location/csbs.py

Lines changed: 55 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,8 @@
66
from asyncache import cached
77
from cachetools import TTLCache
88

9+
10+
from ...caches import check_cache, load_cache
911
from ...coordinates import Coordinates
1012
from ...location.csbs import CSBSLocation
1113
from ...utils import httputils
@@ -34,7 +36,7 @@ async def get(self, loc_id): # pylint: disable=arguments-differ
3436
BASE_URL = "https://facts.csbs.org/covid-19/covid19_county.csv"
3537

3638

37-
@cached(cache=TTLCache(maxsize=1, ttl=3600))
39+
@cached(cache=TTLCache(maxsize=1, ttl=1800))
3840
async def get_locations():
3941
"""
4042
Retrieves county locations; locations are cached for 1 hour
@@ -44,50 +46,59 @@ async def get_locations():
4446
"""
4547
data_id = "csbs.locations"
4648
LOGGER.info(f"{data_id} Requesting data...")
47-
async with httputils.CLIENT_SESSION.get(BASE_URL) as response:
48-
text = await response.text()
49-
50-
LOGGER.debug(f"{data_id} Data received")
51-
52-
data = list(csv.DictReader(text.splitlines()))
53-
LOGGER.debug(f"{data_id} CSV parsed")
54-
55-
locations = []
56-
57-
for i, item in enumerate(data):
58-
# General info.
59-
state = item["State Name"]
60-
county = item["County Name"]
61-
62-
# Ensure country is specified.
63-
if county in {"Unassigned", "Unknown"}:
64-
continue
65-
66-
# Coordinates.
67-
coordinates = Coordinates(
68-
item["Latitude"], item["Longitude"]
69-
) # pylint: disable=unused-variable
70-
71-
# Date string without "EDT" at end.
72-
last_update = " ".join(item["Last Update"].split(" ")[0:2])
73-
74-
# Append to locations.
75-
locations.append(
76-
CSBSLocation(
77-
# General info.
78-
i,
79-
state,
80-
county,
81-
# Coordinates.
82-
Coordinates(item["Latitude"], item["Longitude"]),
83-
# Last update (parse as ISO).
84-
datetime.strptime(last_update, "%Y-%m-%d %H:%M").isoformat() + "Z",
85-
# Statistics.
86-
int(item["Confirmed"] or 0),
87-
int(item["Death"] or 0),
49+
# check shared cache
50+
cache_results = await check_cache(data_id)
51+
if cache_results:
52+
LOGGER.info(f"{data_id} using shared cache results")
53+
locations = cache_results
54+
else:
55+
LOGGER.info(f"{data_id} shared cache empty")
56+
async with httputils.CLIENT_SESSION.get(BASE_URL) as response:
57+
text = await response.text()
58+
59+
LOGGER.debug(f"{data_id} Data received")
60+
61+
data = list(csv.DictReader(text.splitlines()))
62+
LOGGER.debug(f"{data_id} CSV parsed")
63+
64+
locations = []
65+
66+
for i, item in enumerate(data):
67+
# General info.
68+
state = item["State Name"]
69+
county = item["County Name"]
70+
71+
# Ensure country is specified.
72+
if county in {"Unassigned", "Unknown"}:
73+
continue
74+
75+
# Coordinates.
76+
coordinates = Coordinates(
77+
item["Latitude"], item["Longitude"]
78+
) # pylint: disable=unused-variable
79+
80+
# Date string without "EDT" at end.
81+
last_update = " ".join(item["Last Update"].split(" ")[0:2])
82+
83+
# Append to locations.
84+
locations.append(
85+
CSBSLocation(
86+
# General info.
87+
i,
88+
state,
89+
county,
90+
# Coordinates.
91+
Coordinates(item["Latitude"], item["Longitude"]),
92+
# Last update (parse as ISO).
93+
datetime.strptime(last_update, "%Y-%m-%d %H:%M").isoformat() + "Z",
94+
# Statistics.
95+
int(item["Confirmed"] or 0),
96+
int(item["Death"] or 0),
97+
)
8898
)
89-
)
90-
LOGGER.info(f"{data_id} Data normalized")
99+
LOGGER.info(f"{data_id} Data normalized")
100+
# save the results to distributed cache
101+
await load_cache(data_id, locations)
91102

92103
# Return the locations.
93104
return locations

0 commit comments

Comments
 (0)