Skip to content

Commit 52ef1cd

Browse files
author
codedawi
committed
👍 fixing recovery data in api/v2 jhu
1 parent 375794e commit 52ef1cd

File tree

3 files changed

+50
-12
lines changed

3 files changed

+50
-12
lines changed

app/services/location/jhu.py

Lines changed: 39 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -142,22 +142,31 @@ async def get_locations():
142142
# Get all of the data categories locations.
143143
confirmed = await get_category("confirmed")
144144
deaths = await get_category("deaths")
145-
# recovered = await get_category("recovered")
145+
recovered = await get_category("recovered")
146146

147147
locations_confirmed = confirmed["locations"]
148148
locations_deaths = deaths["locations"]
149-
# locations_recovered = recovered["locations"]
149+
locations_recovered = recovered["locations"]
150150

151151
# Final locations to return.
152152
locations = []
153-
153+
# ***************************************************************************
154+
# TODO: This iteration approach assumes the indexes remain the same
155+
# and opens us to a CRITICAL ERROR. The removal of a column in the data source
156+
# would break the API or SHIFT all the data confirmed, deaths, recovery producting
157+
# incorrect data to consumers.
158+
# ***************************************************************************
154159
# Go through locations.
155160
for index, location in enumerate(locations_confirmed):
156161
# Get the timelines.
162+
163+
# TEMP: Fix for merging recovery data. See TODO above for more details.
164+
key = (location['country'], location['province'])
165+
157166
timelines = {
158-
"confirmed": locations_confirmed[index]["history"],
159-
"deaths": locations_deaths[index]["history"],
160-
# 'recovered' : locations_recovered[index]['history'],
167+
"confirmed": location["history"],
168+
"deaths": parse_history(key, locations_deaths, index),
169+
"recovered": parse_history(key, locations_recovered, index),
161170
}
162171

163172
# Grab coordinates.
@@ -188,11 +197,34 @@ async def get_locations():
188197
for date, amount in timelines["deaths"].items()
189198
}
190199
),
191-
"recovered": Timeline({}),
200+
"recovered": Timeline(
201+
{
202+
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
203+
for date, amount in timelines["recovered"].items()
204+
}
205+
),
192206
},
193207
)
194208
)
195209
LOGGER.info(f"{data_id} Data normalized")
196210

197211
# Finally, return the locations.
198212
return locations
213+
214+
215+
def parse_history(key: tuple, locations: list, index: int):
216+
"""
217+
Helper for validating and extracting history content from
218+
locations data based on index. Validates with the current country/province
219+
key to make sure no index/column issue.
220+
221+
TEMP: solution because implement a more efficient and better approach in the refactor.
222+
"""
223+
location_history = {}
224+
try:
225+
if key == (locations[index]["country"], locations[index]["province"]):
226+
location_history = locations[index]["history"]
227+
except IndexError or KeyError as e:
228+
LOGGER.warn(f"iteration data merge error: {index} {key}")
229+
230+
return location_history

tests/test_jhu.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,3 +22,11 @@ async def test_get_locations(mock_client_session):
2222
# `jhu.get_locations()` creates id based on confirmed list
2323
location_confirmed = await jhu.get_category("confirmed")
2424
assert len(output) == len(location_confirmed["locations"])
25+
26+
# `jhu.get_locations()` creates id based on deaths list
27+
location_deaths = await jhu.get_category("deaths")
28+
assert len(output) == len(location_deaths["locations"])
29+
30+
# `jhu.get_locations()` creates id based on recovered list
31+
location_recovered = await jhu.get_category("recovered")
32+
assert len(output) == len(location_recovered["locations"])

tests/test_routes.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -112,8 +112,7 @@ async def test_v2_locations(self):
112112
with open(filepath, "r") as file:
113113
expected_json_output = file.read()
114114

115-
# TODO: Why is this failing?
116-
# assert return_data == json.loads(expected_json_output)
115+
assert return_data == json.loads(expected_json_output)
117116

118117
async def test_v2_locations_id(self):
119118
state = "locations"
@@ -130,8 +129,7 @@ async def test_v2_locations_id(self):
130129
with open(filepath, "r") as file:
131130
expected_json_output = file.read()
132131

133-
# TODO: Why is this failing?
134-
# assert return_data == expected_json_output
132+
assert return_data == json.loads(expected_json_output)
135133

136134

137135
@pytest.mark.asyncio
@@ -183,4 +181,4 @@ async def test_latest(async_api_client, query_params, mock_client_session):
183181

184182
assert response.status_code == 200
185183
assert response_json["latest"]["confirmed"]
186-
assert response_json["latest"]["deaths"]
184+
assert response_json["latest"]["deaths"]

0 commit comments

Comments
 (0)