Skip to content

Commit d4088c6

Browse files
authored
Merge branch 'master' into refactor#28
2 parents aa8b51d + 3a2975f commit d4088c6

File tree

8 files changed

+495
-417
lines changed

8 files changed

+495
-417
lines changed

Pipfile.lock

Lines changed: 367 additions & 357 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Procfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
web: gunicorn app.main:APP -w 2 --max-requests 1000 --max-requests-jitter 400 -k uvicorn.workers.UvicornWorker
1+
web: gunicorn app.main:APP -w 2 -k uvicorn.workers.UvicornWorker

app/main.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@
3434
"API for tracking the global coronavirus (COVID-19, SARS-CoV-2) outbreak."
3535
" Project page: https://github.com/ExpDev07/coronavirus-tracker-api."
3636
),
37-
version="2.0.3",
37+
version="2.0.4",
3838
docs_url="/",
3939
redoc_url="/docs",
4040
on_startup=[setup_client_session],

app/services/location/jhu.py

Lines changed: 41 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,8 @@
99
from cachetools import TTLCache
1010

1111
from ...caches import check_cache, load_cache
12-
from ...coordinates import Coordinates
1312
from ...location import TimelinedLocation
14-
from ...models import Timeline
13+
from ...models import Coordinates, Timeline
1514
from ...utils import countries
1615
from ...utils import date as date_util
1716
from ...utils import httputils
@@ -140,22 +139,31 @@ async def get_locations():
140139
# Get all of the data categories locations.
141140
confirmed = await get_category("confirmed")
142141
deaths = await get_category("deaths")
143-
# recovered = await get_category("recovered")
142+
recovered = await get_category("recovered")
144143

145144
locations_confirmed = confirmed["locations"]
146145
locations_deaths = deaths["locations"]
147-
# locations_recovered = recovered["locations"]
146+
locations_recovered = recovered["locations"]
148147

149148
# Final locations to return.
150149
locations = []
151-
150+
# ***************************************************************************
151+
# TODO: This iteration approach assumes the indexes remain the same
152+
# and opens us to a CRITICAL ERROR. The removal of a column in the data source
153+
# would break the API or SHIFT all the data confirmed, deaths, recovery producting
154+
# incorrect data to consumers.
155+
# ***************************************************************************
152156
# Go through locations.
153157
for index, location in enumerate(locations_confirmed):
154158
# Get the timelines.
159+
160+
# TEMP: Fix for merging recovery data. See TODO above for more details.
161+
key = (location["country"], location["province"])
162+
155163
timelines = {
156-
"confirmed": locations_confirmed[index]["history"],
157-
"deaths": locations_deaths[index]["history"],
158-
# 'recovered' : locations_recovered[index]['history'],
164+
"confirmed": location["history"],
165+
"deaths": parse_history(key, locations_deaths, index),
166+
"recovered": parse_history(key, locations_recovered, index),
159167
}
160168

161169
# Grab coordinates.
@@ -169,7 +177,7 @@ async def get_locations():
169177
location["country"],
170178
location["province"],
171179
# Coordinates.
172-
Coordinates(coordinates["lat"], coordinates["long"]),
180+
Coordinates(latitude=coordinates["lat"], longitude=coordinates["long"]),
173181
# Last update.
174182
datetime.utcnow().isoformat() + "Z",
175183
# Timelines (parse dates as ISO).
@@ -186,11 +194,34 @@ async def get_locations():
186194
for date, amount in timelines["deaths"].items()
187195
}
188196
),
189-
"recovered": Timeline(),
197+
"recovered": Timeline(
198+
{
199+
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
200+
for date, amount in timelines["recovered"].items()
201+
}
202+
),
190203
},
191204
)
192205
)
193206
LOGGER.info(f"{data_id} Data normalized")
194207

195208
# Finally, return the locations.
196209
return locations
210+
211+
212+
def parse_history(key: tuple, locations: list, index: int):
213+
"""
214+
Helper for validating and extracting history content from
215+
locations data based on index. Validates with the current country/province
216+
key to make sure no index/column issue.
217+
218+
TEMP: solution because implement a more efficient and better approach in the refactor.
219+
"""
220+
location_history = {}
221+
try:
222+
if key == (locations[index]["country"], locations[index]["province"]):
223+
location_history = locations[index]["history"]
224+
except (IndexError, KeyError):
225+
LOGGER.debug(f"iteration data merge error: {index} {key}")
226+
227+
return location_history

requirements-dev.txt

Lines changed: 24 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1,49 +1,49 @@
11
-i https://pypi.org/simple
2-
appdirs==1.4.3
3-
astroid==2.4.1
2+
appdirs==1.4.4
3+
astroid==2.4.2
44
async-asgi-testclient==1.4.4
55
async-generator==1.10
66
asyncmock==0.4.2
77
attrs==19.3.0
88
bandit==1.6.2
99
black==19.10b0
10-
certifi==2020.4.5.1
10+
certifi==2020.6.20
1111
chardet==3.0.4
1212
click==7.1.2
13-
coverage==5.1
14-
coveralls==2.0.0
13+
coverage==5.2.1
14+
coveralls==2.1.1
1515
docopt==0.6.2
1616
gitdb==4.0.5
17-
gitpython==3.1.2
18-
idna==2.9
19-
importlib-metadata==1.6.0 ; python_version < '3.8'
17+
gitpython==3.1.7
18+
idna==2.10
19+
importlib-metadata==1.7.0 ; python_version < '3.8'
20+
iniconfig==1.0.1
2021
invoke==1.4.1
2122
isort==4.3.21
2223
lazy-object-proxy==1.4.3
2324
mccabe==0.6.1
2425
mock==4.0.2
25-
more-itertools==8.2.0
26-
multidict==4.7.5
27-
packaging==20.3
26+
more-itertools==8.4.0
27+
multidict==4.7.6
28+
packaging==20.4
2829
pathspec==0.8.0
2930
pbr==5.4.5
3031
pluggy==0.13.1
31-
py==1.8.1
32-
pylint==2.5.2
32+
py==1.9.0
33+
pylint==2.5.3
3334
pyparsing==2.4.7
34-
pytest-asyncio==0.12.0
35-
pytest-cov==2.8.1
36-
pytest==5.4.2
35+
pytest-asyncio==0.14.0
36+
pytest-cov==2.10.0
37+
pytest==6.0.1
3738
pyyaml==5.3.1
38-
regex==2020.5.7
39-
requests==2.23.0
40-
responses==0.10.14
41-
six==1.14.0
39+
regex==2020.7.14
40+
requests==2.24.0
41+
responses==0.10.15
42+
six==1.15.0
4243
smmap==3.0.4
43-
stevedore==1.32.0
44-
toml==0.10.0
44+
stevedore==3.2.0
45+
toml==0.10.1
4546
typed-ast==1.4.1
46-
urllib3[secure]==1.25.9 ; python_version >= '3.5'
47-
wcwidth==0.1.9
47+
urllib3[secure]==1.25.10 ; python_version >= '3.5'
4848
wrapt==1.12.1
4949
zipp==3.1.0

requirements.txt

Lines changed: 20 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -3,39 +3,39 @@ aiocache[redis]==0.11.1
33
aiofiles==0.5.0
44
aiohttp==3.6.2
55
aioredis==1.3.1
6-
asgiref==3.2.7 ; python_version >= '3.5'
6+
asgiref==3.2.10 ; python_version >= '3.5'
77
async-timeout==3.0.1
88
asyncache==0.1.1
99
attrs==19.3.0
10-
cachetools==4.1.0
11-
certifi==2020.4.5.1
12-
cffi==1.14.0
10+
cachetools==4.1.1
11+
certifi==2020.6.20
12+
cffi==1.14.1
1313
chardet==3.0.4
1414
click==7.1.2
15-
cryptography==2.9.2
15+
cryptography==3.0
1616
dataclasses==0.6 ; python_version < '3.7'
17-
fastapi==0.54.1
17+
fastapi==0.60.1
1818
gunicorn==20.0.4
1919
h11==0.9.0
20-
hiredis==1.0.1
20+
hiredis==1.1.0
2121
httptools==0.1.1 ; sys_platform != 'win32' and sys_platform != 'cygwin' and platform_python_implementation != 'PyPy'
2222
idna-ssl==1.1.0 ; python_version < '3.7'
23-
idna==2.9
24-
multidict==4.7.5
25-
psutil==5.7.0
23+
idna==2.10
24+
multidict==4.7.6
25+
psutil==5.7.2
2626
pycparser==2.20
27-
pydantic[dotenv]==1.5.1
27+
pydantic[dotenv]==1.6.1
2828
pyopenssl==19.1.0
2929
python-dateutil==2.8.1
30-
python-dotenv==0.13.0
31-
requests==2.23.0
32-
scout-apm==2.14.1
33-
sentry-sdk==0.14.3
34-
six==1.14.0
35-
starlette==0.13.2
36-
urllib3[secure]==1.25.9 ; python_version >= '3.5'
37-
uvicorn==0.11.5
30+
python-dotenv==0.14.0
31+
requests==2.24.0
32+
scout-apm==2.15.2
33+
sentry-sdk==0.16.2
34+
six==1.15.0
35+
starlette==0.13.6
36+
urllib3[secure]==1.25.10 ; python_version >= '3.5'
37+
uvicorn==0.11.8
3838
uvloop==0.14.0 ; sys_platform != 'win32' and sys_platform != 'cygwin' and platform_python_implementation != 'PyPy'
3939
websockets==8.1
4040
wrapt==1.12.1
41-
yarl==1.4.2
41+
yarl==1.5.1

tests/test_jhu.py

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,3 +22,42 @@ async def test_get_locations(mock_client_session):
2222
# `jhu.get_locations()` creates id based on confirmed list
2323
location_confirmed = await jhu.get_category("confirmed")
2424
assert len(output) == len(location_confirmed["locations"])
25+
26+
# `jhu.get_locations()` creates id based on deaths list
27+
location_deaths = await jhu.get_category("deaths")
28+
assert len(output) == len(location_deaths["locations"])
29+
30+
# `jhu.get_locations()` creates id based on recovered list
31+
location_recovered = await jhu.get_category("recovered")
32+
assert len(output) == len(location_recovered["locations"])
33+
34+
35+
@pytest.mark.parametrize(
36+
"key, locations, index, expected",
37+
[
38+
(
39+
("Thailand", "TH"),
40+
[{"country": "Thailand", "province": "TH", "history": {"test": "yes"}}],
41+
0,
42+
{"test": "yes"},
43+
), # Success
44+
(
45+
("Deutschland", "DE"),
46+
[{"country": "Deutschland", "province": "DE", "history": {"test": "no"}}],
47+
1,
48+
{},
49+
), # IndexError
50+
(
51+
("US", "NJ"),
52+
[{"country": "Deutschland", "province": "DE", "history": {"test": "no"}}],
53+
0,
54+
{},
55+
), # Invaid Key Merge
56+
],
57+
)
58+
def test_parse_history(key, locations, index, expected):
59+
"""
60+
Test validating and extracting history content from
61+
locations data based on index.
62+
"""
63+
assert jhu.parse_history(key, locations, index) == expected

tests/test_routes.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -112,8 +112,7 @@ async def test_v2_locations(self):
112112
with open(filepath, "r") as file:
113113
expected_json_output = file.read()
114114

115-
# TODO: Why is this failing?
116-
# assert return_data == json.loads(expected_json_output)
115+
assert return_data == json.loads(expected_json_output)
117116

118117
async def test_v2_locations_id(self):
119118
state = "locations"
@@ -132,8 +131,7 @@ async def test_v2_locations_id(self):
132131
with open(filepath, "r") as file:
133132
expected_json_output = file.read()
134133

135-
# TODO: Why is this failing?
136-
# assert return_data == expected_json_output
134+
assert return_data == json.loads(expected_json_output)
137135

138136

139137
@pytest.mark.asyncio

0 commit comments

Comments
 (0)