Skip to content

Commit 0719d24

Browse files
committed
added adapter design pattern for location services
1 parent 1c7e4ae commit 0719d24

File tree

4 files changed

+257
-231
lines changed

4 files changed

+257
-231
lines changed

app/services/location/csbs.py

Lines changed: 67 additions & 67 deletions
Original file line numberDiff line numberDiff line change
@@ -30,73 +30,73 @@ async def get(self, loc_id): # pylint: disable=arguments-differ
3030
locations = await self.get_all()
3131
return locations[loc_id]
3232

33+
@cached(cache=TTLCache(maxsize=1, ttl=1800))
34+
async def get_locations():
35+
"""
36+
Retrieves county locations; locations are cached for 1 hour
37+
38+
:returns: The locations.
39+
:rtype: dict
40+
"""
41+
data_id = "csbs.locations"
42+
LOGGER.info(f"{data_id} Requesting data...")
43+
# check shared cache
44+
cache_results = await check_cache(data_id)
45+
if cache_results:
46+
LOGGER.info(f"{data_id} using shared cache results")
47+
locations = cache_results
48+
else:
49+
LOGGER.info(f"{data_id} shared cache empty")
50+
async with httputils.CLIENT_SESSION.get(BASE_URL) as response:
51+
text = await response.text()
52+
53+
LOGGER.debug(f"{data_id} Data received")
54+
55+
data = list(csv.DictReader(text.splitlines()))
56+
LOGGER.debug(f"{data_id} CSV parsed")
57+
58+
locations = []
59+
60+
for i, item in enumerate(data):
61+
# General info.
62+
state = item["State Name"]
63+
county = item["County Name"]
64+
65+
# Ensure country is specified.
66+
if county in {"Unassigned", "Unknown"}:
67+
continue
68+
69+
# Date string without "EDT" at end.
70+
last_update = " ".join(item["Last Update"].split(" ")[0:2])
71+
72+
# Append to locations.
73+
locations.append(
74+
CSBSLocation(
75+
# General info.
76+
i,
77+
state,
78+
county,
79+
# Coordinates.
80+
Coordinates(item["Latitude"], item["Longitude"]),
81+
# Last update (parse as ISO).
82+
datetime.strptime(
83+
last_update, "%Y-%m-%d %H:%M").isoformat() + "Z",
84+
# Statistics.
85+
int(item["Confirmed"] or 0),
86+
int(item["Death"] or 0),
87+
)
88+
)
89+
LOGGER.info(f"{data_id} Data normalized")
90+
# save the results to distributed cache
91+
# TODO: fix json serialization
92+
try:
93+
await load_cache(data_id, locations)
94+
except TypeError as type_err:
95+
LOGGER.error(type_err)
96+
97+
# Return the locations.
98+
return locations
99+
33100

34101
# Base URL for fetching data
35102
BASE_URL = "https://facts.csbs.org/covid-19/covid19_county.csv"
36-
37-
38-
@cached(cache=TTLCache(maxsize=1, ttl=1800))
39-
async def get_locations():
40-
"""
41-
Retrieves county locations; locations are cached for 1 hour
42-
43-
:returns: The locations.
44-
:rtype: dict
45-
"""
46-
data_id = "csbs.locations"
47-
LOGGER.info(f"{data_id} Requesting data...")
48-
# check shared cache
49-
cache_results = await check_cache(data_id)
50-
if cache_results:
51-
LOGGER.info(f"{data_id} using shared cache results")
52-
locations = cache_results
53-
else:
54-
LOGGER.info(f"{data_id} shared cache empty")
55-
async with httputils.CLIENT_SESSION.get(BASE_URL) as response:
56-
text = await response.text()
57-
58-
LOGGER.debug(f"{data_id} Data received")
59-
60-
data = list(csv.DictReader(text.splitlines()))
61-
LOGGER.debug(f"{data_id} CSV parsed")
62-
63-
locations = []
64-
65-
for i, item in enumerate(data):
66-
# General info.
67-
state = item["State Name"]
68-
county = item["County Name"]
69-
70-
# Ensure country is specified.
71-
if county in {"Unassigned", "Unknown"}:
72-
continue
73-
74-
# Date string without "EDT" at end.
75-
last_update = " ".join(item["Last Update"].split(" ")[0:2])
76-
77-
# Append to locations.
78-
locations.append(
79-
CSBSLocation(
80-
# General info.
81-
i,
82-
state,
83-
county,
84-
# Coordinates.
85-
Coordinates(item["Latitude"], item["Longitude"]),
86-
# Last update (parse as ISO).
87-
datetime.strptime(last_update, "%Y-%m-%d %H:%M").isoformat() + "Z",
88-
# Statistics.
89-
int(item["Confirmed"] or 0),
90-
int(item["Death"] or 0),
91-
)
92-
)
93-
LOGGER.info(f"{data_id} Data normalized")
94-
# save the results to distributed cache
95-
# TODO: fix json serialization
96-
try:
97-
await load_cache(data_id, locations)
98-
except TypeError as type_err:
99-
LOGGER.error(type_err)
100-
101-
# Return the locations.
102-
return locations

app/services/location/jhu.py

Lines changed: 88 additions & 86 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,89 @@ async def get(self, loc_id): # pylint: disable=arguments-differ
3636
locations = await self.get_all()
3737
return locations[loc_id]
3838

39+
@cached(cache=TTLCache(maxsize=1, ttl=1800))
40+
async def get_locations():
41+
"""
42+
Retrieves the locations from the categories. The locations are cached for 1 hour.
43+
44+
:returns: The locations.
45+
:rtype: List[Location]
46+
"""
47+
data_id = "jhu.locations"
48+
LOGGER.info(f"pid:{PID}: {data_id} Requesting data...")
49+
# Get all of the data categories locations.
50+
confirmed = await get_category("confirmed")
51+
deaths = await get_category("deaths")
52+
recovered = await get_category("recovered")
53+
54+
locations_confirmed = confirmed["locations"]
55+
locations_deaths = deaths["locations"]
56+
locations_recovered = recovered["locations"]
57+
58+
# Final locations to return.
59+
locations = []
60+
# ***************************************************************************
61+
# TODO: This iteration approach assumes the indexes remain the same
62+
# and opens us to a CRITICAL ERROR. The removal of a column in the data source
63+
# would break the API or SHIFT all the data confirmed, deaths, recovery producting
64+
# incorrect data to consumers.
65+
# ***************************************************************************
66+
# Go through locations.
67+
for index, location in enumerate(locations_confirmed):
68+
# Get the timelines.
69+
70+
# TEMP: Fix for merging recovery data. See TODO above for more details.
71+
key = (location["country"], location["province"])
72+
73+
timelines = {
74+
"confirmed": location["history"],
75+
"deaths": parse_history(key, locations_deaths, index),
76+
"recovered": parse_history(key, locations_recovered, index),
77+
}
78+
79+
# Grab coordinates.
80+
coordinates = location["coordinates"]
81+
82+
# Create location (supporting timelines) and append.
83+
locations.append(
84+
TimelinedLocation(
85+
# General info.
86+
index,
87+
location["country"],
88+
location["province"],
89+
# Coordinates.
90+
Coordinates(
91+
latitude=coordinates["lat"], longitude=coordinates["long"]),
92+
# Last update.
93+
datetime.utcnow().isoformat() + "Z",
94+
# Timelines (parse dates as ISO).
95+
{
96+
"confirmed": Timeline(
97+
timeline={
98+
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
99+
for date, amount in timelines["confirmed"].items()
100+
}
101+
),
102+
"deaths": Timeline(
103+
timeline={
104+
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
105+
for date, amount in timelines["deaths"].items()
106+
}
107+
),
108+
"recovered": Timeline(
109+
timeline={
110+
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
111+
for date, amount in timelines["recovered"].items()
112+
}
113+
),
114+
},
115+
)
116+
)
117+
LOGGER.info(f"{data_id} Data normalized")
118+
119+
# Finally, return the locations.
120+
return locations
121+
39122

40123
# ---------------------------------------------------------------
41124

@@ -82,10 +165,12 @@ async def get_category(category):
82165

83166
for item in data:
84167
# Filter out all the dates.
85-
dates = dict(filter(lambda element: date_util.is_date(element[0]), item.items()))
168+
dates = dict(
169+
filter(lambda element: date_util.is_date(element[0]), item.items()))
86170

87171
# Make location history from dates.
88-
history = {date: int(float(amount or 0)) for date, amount in dates.items()}
172+
history = {date: int(float(amount or 0))
173+
for date, amount in dates.items()}
89174

90175
# Country for this location.
91176
country = item["Country/Region"]
@@ -101,7 +186,7 @@ async def get_category(category):
101186
"country_code": countries.country_code(country),
102187
"province": item["Province/State"],
103188
# Coordinates.
104-
"coordinates": {"lat": item["Lat"], "long": item["Long"],},
189+
"coordinates": {"lat": item["Lat"], "long": item["Long"], },
105190
# History.
106191
"history": history,
107192
# Latest statistic.
@@ -127,89 +212,6 @@ async def get_category(category):
127212
return results
128213

129214

130-
@cached(cache=TTLCache(maxsize=1, ttl=1800))
131-
async def get_locations():
132-
"""
133-
Retrieves the locations from the categories. The locations are cached for 1 hour.
134-
135-
:returns: The locations.
136-
:rtype: List[Location]
137-
"""
138-
data_id = "jhu.locations"
139-
LOGGER.info(f"pid:{PID}: {data_id} Requesting data...")
140-
# Get all of the data categories locations.
141-
confirmed = await get_category("confirmed")
142-
deaths = await get_category("deaths")
143-
recovered = await get_category("recovered")
144-
145-
locations_confirmed = confirmed["locations"]
146-
locations_deaths = deaths["locations"]
147-
locations_recovered = recovered["locations"]
148-
149-
# Final locations to return.
150-
locations = []
151-
# ***************************************************************************
152-
# TODO: This iteration approach assumes the indexes remain the same
153-
# and opens us to a CRITICAL ERROR. The removal of a column in the data source
154-
# would break the API or SHIFT all the data confirmed, deaths, recovery producting
155-
# incorrect data to consumers.
156-
# ***************************************************************************
157-
# Go through locations.
158-
for index, location in enumerate(locations_confirmed):
159-
# Get the timelines.
160-
161-
# TEMP: Fix for merging recovery data. See TODO above for more details.
162-
key = (location["country"], location["province"])
163-
164-
timelines = {
165-
"confirmed": location["history"],
166-
"deaths": parse_history(key, locations_deaths, index),
167-
"recovered": parse_history(key, locations_recovered, index),
168-
}
169-
170-
# Grab coordinates.
171-
coordinates = location["coordinates"]
172-
173-
# Create location (supporting timelines) and append.
174-
locations.append(
175-
TimelinedLocation(
176-
# General info.
177-
index,
178-
location["country"],
179-
location["province"],
180-
# Coordinates.
181-
Coordinates(latitude=coordinates["lat"], longitude=coordinates["long"]),
182-
# Last update.
183-
datetime.utcnow().isoformat() + "Z",
184-
# Timelines (parse dates as ISO).
185-
{
186-
"confirmed": Timeline(
187-
timeline={
188-
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
189-
for date, amount in timelines["confirmed"].items()
190-
}
191-
),
192-
"deaths": Timeline(
193-
timeline={
194-
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
195-
for date, amount in timelines["deaths"].items()
196-
}
197-
),
198-
"recovered": Timeline(
199-
timeline={
200-
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
201-
for date, amount in timelines["recovered"].items()
202-
}
203-
),
204-
},
205-
)
206-
)
207-
LOGGER.info(f"{data_id} Data normalized")
208-
209-
# Finally, return the locations.
210-
return locations
211-
212-
213215
def parse_history(key: tuple, locations: list, index: int):
214216
"""
215217
Helper for validating and extracting history content from
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
from services.location import LocationService
2+
3+
"""
4+
Implement Adapter method for LocationService
5+
"""
6+
7+
8+
class Adapter (LocationService):
9+
# def __init__(self):
10+
# self.locations
11+
12+
# async def get_locations(self):
13+
# self.locations = await LocationService.get_locations()
14+
15+
async def get_all(self):
16+
# Get the locations.
17+
locations = await LocationService.get_locations()
18+
return locations
19+
20+
async def get(self, loc_id): # pylint: disable=arguments-differ
21+
# Get location at the index equal to the provided id.
22+
locations = await LocationService.get_locations()
23+
return locations[loc_id]

0 commit comments

Comments
 (0)