Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Procfile
Original file line number Diff line number Diff line change
@@ -1 +1 @@
web: gunicorn app.main:APP -w 2 --max-requests 1000 --max-requests-jitter 400 -k uvicorn.workers.UvicornWorker
web: gunicorn app.main:APP -w 3 -k uvicorn.workers.UvicornWorker
46 changes: 39 additions & 7 deletions app/services/location/jhu.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,22 +140,31 @@ async def get_locations():
# Get all of the data categories locations.
confirmed = await get_category("confirmed")
deaths = await get_category("deaths")
# recovered = await get_category("recovered")
recovered = await get_category("recovered")

locations_confirmed = confirmed["locations"]
locations_deaths = deaths["locations"]
# locations_recovered = recovered["locations"]
locations_recovered = recovered["locations"]

# Final locations to return.
locations = []

# ***************************************************************************
# TODO: This iteration approach assumes the indexes remain the same
# and opens us to a CRITICAL ERROR. The removal of a column in the data source
# would break the API or SHIFT all the data confirmed, deaths, recovery producting
# incorrect data to consumers.
# ***************************************************************************
# Go through locations.
for index, location in enumerate(locations_confirmed):
# Get the timelines.

# TEMP: Fix for merging recovery data. See TODO above for more details.
key = (location["country"], location["province"])

timelines = {
"confirmed": locations_confirmed[index]["history"],
"deaths": locations_deaths[index]["history"],
# 'recovered' : locations_recovered[index]['history'],
"confirmed": location["history"],
"deaths": parse_history(key, locations_deaths, index),
"recovered": parse_history(key, locations_recovered, index),
}

# Grab coordinates.
Expand Down Expand Up @@ -186,11 +195,34 @@ async def get_locations():
for date, amount in timelines["deaths"].items()
}
),
"recovered": Timeline({}),
"recovered": Timeline(
{
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
for date, amount in timelines["recovered"].items()
}
),
},
)
)
LOGGER.info(f"{data_id} Data normalized")

# Finally, return the locations.
return locations


def parse_history(key: tuple, locations: list, index: int):
"""
Helper for validating and extracting history content from
locations data based on index. Validates with the current country/province
key to make sure no index/column issue.

TEMP: solution because implement a more efficient and better approach in the refactor.
"""
location_history = {}
try:
if key == (locations[index]["country"], locations[index]["province"]):
location_history = locations[index]["history"]
except (IndexError, KeyError):
LOGGER.debug(f"iteration data merge error: {index} {key}")

return location_history
24 changes: 24 additions & 0 deletions tests/test_jhu.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,3 +22,27 @@ async def test_get_locations(mock_client_session):
# `jhu.get_locations()` creates id based on confirmed list
location_confirmed = await jhu.get_category("confirmed")
assert len(output) == len(location_confirmed["locations"])

# `jhu.get_locations()` creates id based on deaths list
location_deaths = await jhu.get_category("deaths")
assert len(output) == len(location_deaths["locations"])

# `jhu.get_locations()` creates id based on recovered list
location_recovered = await jhu.get_category("recovered")
assert len(output) == len(location_recovered["locations"])


@pytest.mark.parametrize(
"key, locations, index, expected",
[
(("Thailand", "TH"), [{"country": "Thailand", "province": "TH", "history": {"test": "yes"}}], 0, {"test": "yes"}), # Success
(("Deutschland", "DE"), [{"country": "Deutschland", "province": "DE", "history": {"test": "no"}}], 1, {}), # IndexError
(("US", "NJ"), [{"country": "Deutschland", "province": "DE", "history": {"test": "no"}}], 0, {}), # Invaid Key Merge
],
)
def test_parse_history(key, locations, index, expected):
"""
Test validating and extracting history content from
locations data based on index.
"""
assert jhu.parse_history(key, locations, index) == expected
8 changes: 3 additions & 5 deletions tests/test_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,8 +112,7 @@ async def test_v2_locations(self):
with open(filepath, "r") as file:
expected_json_output = file.read()

# TODO: Why is this failing?
# assert return_data == json.loads(expected_json_output)
assert return_data == json.loads(expected_json_output)

async def test_v2_locations_id(self):
state = "locations"
Expand All @@ -132,8 +131,7 @@ async def test_v2_locations_id(self):
with open(filepath, "r") as file:
expected_json_output = file.read()

# TODO: Why is this failing?
# assert return_data == expected_json_output
assert return_data == json.loads(expected_json_output)


@pytest.mark.asyncio
Expand Down Expand Up @@ -187,4 +185,4 @@ async def test_latest(async_api_client, query_params, mock_client_session):

assert response.status_code == 200
assert response_json["latest"]["confirmed"]
assert response_json["latest"]["deaths"]
assert response_json["latest"]["deaths"]