Skip to content

Commit f98447e

Browse files
Kilo59codedawi
andauthored
partial recovery fix (#31)
* 👍 fixing recovery data in api/v2 jhu * 🚨 fix linting issue * ✅ unittest for parse val history Co-authored-by: codedawi <[email protected]>
1 parent 464453d commit f98447e

File tree

4 files changed

+67
-13
lines changed

4 files changed

+67
-13
lines changed

Procfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
web: gunicorn app.main:APP -w 2 --max-requests 1000 --max-requests-jitter 400 -k uvicorn.workers.UvicornWorker
1+
web: gunicorn app.main:APP -w 3 -k uvicorn.workers.UvicornWorker

app/services/location/jhu.py

Lines changed: 39 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -140,22 +140,31 @@ async def get_locations():
140140
# Get all of the data categories locations.
141141
confirmed = await get_category("confirmed")
142142
deaths = await get_category("deaths")
143-
# recovered = await get_category("recovered")
143+
recovered = await get_category("recovered")
144144

145145
locations_confirmed = confirmed["locations"]
146146
locations_deaths = deaths["locations"]
147-
# locations_recovered = recovered["locations"]
147+
locations_recovered = recovered["locations"]
148148

149149
# Final locations to return.
150150
locations = []
151-
151+
# ***************************************************************************
152+
# TODO: This iteration approach assumes the indexes remain the same
153+
# and opens us to a CRITICAL ERROR. The removal of a column in the data source
154+
# would break the API or SHIFT all the data confirmed, deaths, recovery producting
155+
# incorrect data to consumers.
156+
# ***************************************************************************
152157
# Go through locations.
153158
for index, location in enumerate(locations_confirmed):
154159
# Get the timelines.
160+
161+
# TEMP: Fix for merging recovery data. See TODO above for more details.
162+
key = (location["country"], location["province"])
163+
155164
timelines = {
156-
"confirmed": locations_confirmed[index]["history"],
157-
"deaths": locations_deaths[index]["history"],
158-
# 'recovered' : locations_recovered[index]['history'],
165+
"confirmed": location["history"],
166+
"deaths": parse_history(key, locations_deaths, index),
167+
"recovered": parse_history(key, locations_recovered, index),
159168
}
160169

161170
# Grab coordinates.
@@ -186,11 +195,34 @@ async def get_locations():
186195
for date, amount in timelines["deaths"].items()
187196
}
188197
),
189-
"recovered": Timeline({}),
198+
"recovered": Timeline(
199+
{
200+
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
201+
for date, amount in timelines["recovered"].items()
202+
}
203+
),
190204
},
191205
)
192206
)
193207
LOGGER.info(f"{data_id} Data normalized")
194208

195209
# Finally, return the locations.
196210
return locations
211+
212+
213+
def parse_history(key: tuple, locations: list, index: int):
214+
"""
215+
Helper for validating and extracting history content from
216+
locations data based on index. Validates with the current country/province
217+
key to make sure no index/column issue.
218+
219+
TEMP: solution because implement a more efficient and better approach in the refactor.
220+
"""
221+
location_history = {}
222+
try:
223+
if key == (locations[index]["country"], locations[index]["province"]):
224+
location_history = locations[index]["history"]
225+
except (IndexError, KeyError):
226+
LOGGER.debug(f"iteration data merge error: {index} {key}")
227+
228+
return location_history

tests/test_jhu.py

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,3 +22,27 @@ async def test_get_locations(mock_client_session):
2222
# `jhu.get_locations()` creates id based on confirmed list
2323
location_confirmed = await jhu.get_category("confirmed")
2424
assert len(output) == len(location_confirmed["locations"])
25+
26+
# `jhu.get_locations()` creates id based on deaths list
27+
location_deaths = await jhu.get_category("deaths")
28+
assert len(output) == len(location_deaths["locations"])
29+
30+
# `jhu.get_locations()` creates id based on recovered list
31+
location_recovered = await jhu.get_category("recovered")
32+
assert len(output) == len(location_recovered["locations"])
33+
34+
35+
@pytest.mark.parametrize(
36+
"key, locations, index, expected",
37+
[
38+
(("Thailand", "TH"), [{"country": "Thailand", "province": "TH", "history": {"test": "yes"}}], 0, {"test": "yes"}), # Success
39+
(("Deutschland", "DE"), [{"country": "Deutschland", "province": "DE", "history": {"test": "no"}}], 1, {}), # IndexError
40+
(("US", "NJ"), [{"country": "Deutschland", "province": "DE", "history": {"test": "no"}}], 0, {}), # Invaid Key Merge
41+
],
42+
)
43+
def test_parse_history(key, locations, index, expected):
44+
"""
45+
Test validating and extracting history content from
46+
locations data based on index.
47+
"""
48+
assert jhu.parse_history(key, locations, index) == expected

tests/test_routes.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -112,8 +112,7 @@ async def test_v2_locations(self):
112112
with open(filepath, "r") as file:
113113
expected_json_output = file.read()
114114

115-
# TODO: Why is this failing?
116-
# assert return_data == json.loads(expected_json_output)
115+
assert return_data == json.loads(expected_json_output)
117116

118117
async def test_v2_locations_id(self):
119118
state = "locations"
@@ -132,8 +131,7 @@ async def test_v2_locations_id(self):
132131
with open(filepath, "r") as file:
133132
expected_json_output = file.read()
134133

135-
# TODO: Why is this failing?
136-
# assert return_data == expected_json_output
134+
assert return_data == json.loads(expected_json_output)
137135

138136

139137
@pytest.mark.asyncio
@@ -187,4 +185,4 @@ async def test_latest(async_api_client, query_params, mock_client_session):
187185

188186
assert response.status_code == 200
189187
assert response_json["latest"]["confirmed"]
190-
assert response_json["latest"]["deaths"]
188+
assert response_json["latest"]["deaths"]

0 commit comments

Comments
 (0)