Skip to content
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
724 changes: 367 additions & 357 deletions Pipfile.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion Procfile
Original file line number Diff line number Diff line change
@@ -1 +1 @@
web: gunicorn app.main:APP -w 2 --max-requests 1000 --max-requests-jitter 400 -k uvicorn.workers.UvicornWorker
web: gunicorn app.main:APP -w 2 -k uvicorn.workers.UvicornWorker
2 changes: 1 addition & 1 deletion app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
"API for tracking the global coronavirus (COVID-19, SARS-CoV-2) outbreak."
" Project page: https://github.com/ExpDev07/coronavirus-tracker-api."
),
version="2.0.3",
version="2.0.4",
docs_url="/",
redoc_url="/docs",
on_startup=[setup_client_session],
Expand Down
51 changes: 41 additions & 10 deletions app/services/location/jhu.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,8 @@
from cachetools import TTLCache

from ...caches import check_cache, load_cache
from ...coordinates import Coordinates
from ...location import TimelinedLocation
from ...models import Timeline
from ...models import Coordinates, Timeline
from ...utils import countries
from ...utils import date as date_util
from ...utils import httputils
Expand Down Expand Up @@ -140,22 +139,31 @@ async def get_locations():
# Get all of the data categories locations.
confirmed = await get_category("confirmed")
deaths = await get_category("deaths")
# recovered = await get_category("recovered")
recovered = await get_category("recovered")

locations_confirmed = confirmed["locations"]
locations_deaths = deaths["locations"]
# locations_recovered = recovered["locations"]
locations_recovered = recovered["locations"]

# Final locations to return.
locations = []

# ***************************************************************************
# TODO: This iteration approach assumes the indexes remain the same
# and opens us to a CRITICAL ERROR. The removal of a column in the data source
# would break the API or SHIFT all the data confirmed, deaths, recovery producting
# incorrect data to consumers.
# ***************************************************************************
# Go through locations.
for index, location in enumerate(locations_confirmed):
# Get the timelines.

# TEMP: Fix for merging recovery data. See TODO above for more details.
key = (location["country"], location["province"])

timelines = {
"confirmed": locations_confirmed[index]["history"],
"deaths": locations_deaths[index]["history"],
# 'recovered' : locations_recovered[index]['history'],
"confirmed": location["history"],
"deaths": parse_history(key, locations_deaths, index),
"recovered": parse_history(key, locations_recovered, index),
}

# Grab coordinates.
Expand All @@ -169,7 +177,7 @@ async def get_locations():
location["country"],
location["province"],
# Coordinates.
Coordinates(coordinates["lat"], coordinates["long"]),
Coordinates(latitude=coordinates["lat"], longitude=coordinates["long"]),
# Last update.
datetime.utcnow().isoformat() + "Z",
# Timelines (parse dates as ISO).
Expand All @@ -186,11 +194,34 @@ async def get_locations():
for date, amount in timelines["deaths"].items()
}
),
"recovered": Timeline(),
"recovered": Timeline(
{
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
for date, amount in timelines["recovered"].items()
}
),
},
)
)
LOGGER.info(f"{data_id} Data normalized")

# Finally, return the locations.
return locations


def parse_history(key: tuple, locations: list, index: int):
"""
Helper for validating and extracting history content from
locations data based on index. Validates with the current country/province
key to make sure no index/column issue.

TEMP: solution because implement a more efficient and better approach in the refactor.
"""
location_history = {}
try:
if key == (locations[index]["country"], locations[index]["province"]):
location_history = locations[index]["history"]
except (IndexError, KeyError):
LOGGER.debug(f"iteration data merge error: {index} {key}")

return location_history
48 changes: 24 additions & 24 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
@@ -1,49 +1,49 @@
-i https://pypi.org/simple
appdirs==1.4.3
astroid==2.4.1
appdirs==1.4.4
astroid==2.4.2
async-asgi-testclient==1.4.4
async-generator==1.10
asyncmock==0.4.2
attrs==19.3.0
bandit==1.6.2
black==19.10b0
certifi==2020.4.5.1
certifi==2020.6.20
chardet==3.0.4
click==7.1.2
coverage==5.1
coveralls==2.0.0
coverage==5.2.1
coveralls==2.1.1
docopt==0.6.2
gitdb==4.0.5
gitpython==3.1.2
idna==2.9
importlib-metadata==1.6.0 ; python_version < '3.8'
gitpython==3.1.7
idna==2.10
importlib-metadata==1.7.0 ; python_version < '3.8'
iniconfig==1.0.1
invoke==1.4.1
isort==4.3.21
lazy-object-proxy==1.4.3
mccabe==0.6.1
mock==4.0.2
more-itertools==8.2.0
multidict==4.7.5
packaging==20.3
more-itertools==8.4.0
multidict==4.7.6
packaging==20.4
pathspec==0.8.0
pbr==5.4.5
pluggy==0.13.1
py==1.8.1
pylint==2.5.2
py==1.9.0
pylint==2.5.3
pyparsing==2.4.7
pytest-asyncio==0.12.0
pytest-cov==2.8.1
pytest==5.4.2
pytest-asyncio==0.14.0
pytest-cov==2.10.0
pytest==6.0.1
pyyaml==5.3.1
regex==2020.5.7
requests==2.23.0
responses==0.10.14
six==1.14.0
regex==2020.7.14
requests==2.24.0
responses==0.10.15
six==1.15.0
smmap==3.0.4
stevedore==1.32.0
toml==0.10.0
stevedore==3.2.0
toml==0.10.1
typed-ast==1.4.1
urllib3[secure]==1.25.9 ; python_version >= '3.5'
wcwidth==0.1.9
urllib3[secure]==1.25.10 ; python_version >= '3.5'
wrapt==1.12.1
zipp==3.1.0
40 changes: 20 additions & 20 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,39 +3,39 @@ aiocache[redis]==0.11.1
aiofiles==0.5.0
aiohttp==3.6.2
aioredis==1.3.1
asgiref==3.2.7 ; python_version >= '3.5'
asgiref==3.2.10 ; python_version >= '3.5'
async-timeout==3.0.1
asyncache==0.1.1
attrs==19.3.0
cachetools==4.1.0
certifi==2020.4.5.1
cffi==1.14.0
cachetools==4.1.1
certifi==2020.6.20
cffi==1.14.1
chardet==3.0.4
click==7.1.2
cryptography==2.9.2
cryptography==3.0
dataclasses==0.6 ; python_version < '3.7'
fastapi==0.54.1
fastapi==0.60.1
gunicorn==20.0.4
h11==0.9.0
hiredis==1.0.1
hiredis==1.1.0
httptools==0.1.1 ; sys_platform != 'win32' and sys_platform != 'cygwin' and platform_python_implementation != 'PyPy'
idna-ssl==1.1.0 ; python_version < '3.7'
idna==2.9
multidict==4.7.5
psutil==5.7.0
idna==2.10
multidict==4.7.6
psutil==5.7.2
pycparser==2.20
pydantic[dotenv]==1.5.1
pydantic[dotenv]==1.6.1
pyopenssl==19.1.0
python-dateutil==2.8.1
python-dotenv==0.13.0
requests==2.23.0
scout-apm==2.14.1
sentry-sdk==0.14.3
six==1.14.0
starlette==0.13.2
urllib3[secure]==1.25.9 ; python_version >= '3.5'
uvicorn==0.11.5
python-dotenv==0.14.0
requests==2.24.0
scout-apm==2.15.2
sentry-sdk==0.16.2
six==1.15.0
starlette==0.13.6
urllib3[secure]==1.25.10 ; python_version >= '3.5'
uvicorn==0.11.8
uvloop==0.14.0 ; sys_platform != 'win32' and sys_platform != 'cygwin' and platform_python_implementation != 'PyPy'
websockets==8.1
wrapt==1.12.1
yarl==1.4.2
yarl==1.5.1
39 changes: 39 additions & 0 deletions tests/test_jhu.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,3 +22,42 @@ async def test_get_locations(mock_client_session):
# `jhu.get_locations()` creates id based on confirmed list
location_confirmed = await jhu.get_category("confirmed")
assert len(output) == len(location_confirmed["locations"])

# `jhu.get_locations()` creates id based on deaths list
location_deaths = await jhu.get_category("deaths")
assert len(output) == len(location_deaths["locations"])

# `jhu.get_locations()` creates id based on recovered list
location_recovered = await jhu.get_category("recovered")
assert len(output) == len(location_recovered["locations"])


@pytest.mark.parametrize(
"key, locations, index, expected",
[
(
("Thailand", "TH"),
[{"country": "Thailand", "province": "TH", "history": {"test": "yes"}}],
0,
{"test": "yes"},
), # Success
(
("Deutschland", "DE"),
[{"country": "Deutschland", "province": "DE", "history": {"test": "no"}}],
1,
{},
), # IndexError
(
("US", "NJ"),
[{"country": "Deutschland", "province": "DE", "history": {"test": "no"}}],
0,
{},
), # Invaid Key Merge
],
)
def test_parse_history(key, locations, index, expected):
"""
Test validating and extracting history content from
locations data based on index.
"""
assert jhu.parse_history(key, locations, index) == expected
6 changes: 2 additions & 4 deletions tests/test_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,8 +112,7 @@ async def test_v2_locations(self):
with open(filepath, "r") as file:
expected_json_output = file.read()

# TODO: Why is this failing?
# assert return_data == json.loads(expected_json_output)
assert return_data == json.loads(expected_json_output)

async def test_v2_locations_id(self):
state = "locations"
Expand All @@ -132,8 +131,7 @@ async def test_v2_locations_id(self):
with open(filepath, "r") as file:
expected_json_output = file.read()

# TODO: Why is this failing?
# assert return_data == expected_json_output
assert return_data == json.loads(expected_json_output)


@pytest.mark.asyncio
Expand Down