diff --git a/.gitignore b/.gitignore index efd5545c..9c41818c 100644 --- a/.gitignore +++ b/.gitignore @@ -66,4 +66,7 @@ docs/_build/ target/ # OSX Stuff -.DS_Store \ No newline at end of file +.DS_Store + +# IntelliJ/Pycharm +.idea/ diff --git a/Pipfile b/Pipfile index 2e5b9a1f..b337c22a 100644 --- a/Pipfile +++ b/Pipfile @@ -4,19 +4,28 @@ url = "https://pypi.org/simple" verify_ssl = true [dev-packages] +async-asgi-testclient = "*" +async_generator = "*" +asyncmock = "*" bandit = "*" black = "==19.10b0" coveralls = "*" +importlib-metadata = {version="*", markers="python_version<'3.8'"} invoke = "*" isort = "*" pylint = "*" pytest = "*" +pytest-asyncio = "*" pytest-cov = "*" [packages] +aiohttp = "*" +asyncache = "*" cachetools = "*" +dataclasses = {version="*", markers="python_version<'3.7'"} fastapi = "*" gunicorn = "*" +idna_ssl = {version="*", markers="python_version<'3.7'"} python-dateutil = "*" python-dotenv = "*" requests = "*" diff --git a/Pipfile.lock b/Pipfile.lock index 6d4da039..a699f880 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "6135cc5a7f50377967629ba129a9747170d933706e42b8b74e29a5d4713aa4e0" + "sha256": "1911b081cecdda482b2a9c7c03ebba985c447846506b607df01563600c23126b" }, "pipfile-spec": 6, "requires": { @@ -16,6 +16,45 @@ ] }, "default": { + "aiohttp": { + "hashes": [ + "sha256:1e984191d1ec186881ffaed4581092ba04f7c61582a177b187d3a2f07ed9719e", + "sha256:259ab809ff0727d0e834ac5e8a283dc5e3e0ecc30c4d80b3cd17a4139ce1f326", + "sha256:2f4d1a4fdce595c947162333353d4a44952a724fba9ca3205a3df99a33d1307a", + "sha256:32e5f3b7e511aa850829fbe5aa32eb455e5534eaa4b1ce93231d00e2f76e5654", + "sha256:344c780466b73095a72c616fac5ea9c4665add7fc129f285fbdbca3cccf4612a", + "sha256:460bd4237d2dbecc3b5ed57e122992f60188afe46e7319116da5eb8a9dfedba4", + "sha256:4c6efd824d44ae697814a2a85604d8e992b875462c6655da161ff18fd4f29f17", + "sha256:50aaad128e6ac62e7bf7bd1f0c0a24bc968a0c0590a726d5a955af193544bcec", + "sha256:6206a135d072f88da3e71cc501c59d5abffa9d0bb43269a6dcd28d66bfafdbdd", + "sha256:65f31b622af739a802ca6fd1a3076fd0ae523f8485c52924a89561ba10c49b48", + "sha256:ae55bac364c405caa23a4f2d6cfecc6a0daada500274ffca4a9230e7129eac59", + "sha256:b778ce0c909a2653741cb4b1ac7015b5c130ab9c897611df43ae6a58523cb965" + ], + "index": "pypi", + "version": "==3.6.2" + }, + "async-timeout": { + "hashes": [ + "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f", + "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3" + ], + "version": "==3.0.1" + }, + "asyncache": { + "hashes": [ + "sha256:c741b3ccef2c5291b3da05d97bab3cc8d50f2ac8efd7fd79d47e3d7b6a3774de" + ], + "index": "pypi", + "version": "==0.1.1" + }, + "attrs": { + "hashes": [ + "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", + "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" + ], + "version": "==19.3.0" + }, "cachetools": { "hashes": [ "sha256:9a52dd97a85f257f4e4127f15818e71a0c7899f121b34591fcc1173ea79a0198", @@ -45,13 +84,22 @@ ], "version": "==7.1.1" }, + "dataclasses": { + "hashes": [ + "sha256:454a69d788c7fda44efd71e259be79577822f5e3f53f029a22d08004e951dc9f", + "sha256:6988bd2b895eef432d562370bb707d540f32f7360ab13da45340101bc2307d84" + ], + "index": "pypi", + "markers": "python_version < '3.7'", + "version": "==0.6" + }, "fastapi": { "hashes": [ - "sha256:c2d572370153a6b74d62a73252d75934e2bfdbb0f620fecfd489b5d4789f5c48", - "sha256:c478bc513d192f6776fd3f0355b7ff5414e94ed842677294c06e348105aaa237" + "sha256:a5cb9100d5f2b5dd82addbc2cdf8009258bce45b03ba21d3f5eecc88c7b5a716", + "sha256:cf26d47ede6bc6e179df951312f55fea7d4005dd53370245e216436ca4e22f22" ], "index": "pypi", - "version": "==0.53.1" + "version": "==0.53.2" }, "gunicorn": { "hashes": [ @@ -93,6 +141,36 @@ ], "version": "==2.9" }, + "idna-ssl": { + "hashes": [ + "sha256:a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c" + ], + "index": "pypi", + "markers": "python_version < '3.7'", + "version": "==1.1.0" + }, + "multidict": { + "hashes": [ + "sha256:317f96bc0950d249e96d8d29ab556d01dd38888fbe68324f46fd834b430169f1", + "sha256:42f56542166040b4474c0c608ed051732033cd821126493cf25b6c276df7dd35", + "sha256:4b7df040fb5fe826d689204f9b544af469593fb3ff3a069a6ad3409f742f5928", + "sha256:544fae9261232a97102e27a926019100a9db75bec7b37feedd74b3aa82f29969", + "sha256:620b37c3fea181dab09267cd5a84b0f23fa043beb8bc50d8474dd9694de1fa6e", + "sha256:6e6fef114741c4d7ca46da8449038ec8b1e880bbe68674c01ceeb1ac8a648e78", + "sha256:7774e9f6c9af3f12f296131453f7b81dabb7ebdb948483362f5afcaac8a826f1", + "sha256:85cb26c38c96f76b7ff38b86c9d560dea10cf3459bb5f4caf72fc1bb932c7136", + "sha256:a326f4240123a2ac66bb163eeba99578e9d63a8654a59f4688a79198f9aa10f8", + "sha256:ae402f43604e3b2bc41e8ea8b8526c7fa7139ed76b0d64fc48e28125925275b2", + "sha256:aee283c49601fa4c13adc64c09c978838a7e812f85377ae130a24d7198c0331e", + "sha256:b51249fdd2923739cd3efc95a3d6c363b67bbf779208e9f37fd5e68540d1a4d4", + "sha256:bb519becc46275c594410c6c28a8a0adc66fe24fef154a9addea54c1adb006f5", + "sha256:c2c37185fb0af79d5c117b8d2764f4321eeb12ba8c141a95d0aa8c2c1d0a11dd", + "sha256:dc561313279f9d05a3d0ffa89cd15ae477528ea37aa9795c4654588a3287a9ab", + "sha256:e439c9a10a95cb32abd708bb8be83b2134fa93790a4fb0535ca36db3dda94d20", + "sha256:fc3b4adc2ee8474cb3cd2a155305d5f8eda0a9c91320f83e55748e1fcb68f8e3" + ], + "version": "==4.7.5" + }, "pydantic": { "hashes": [ "sha256:012c422859bac2e03ab3151ea6624fecf0e249486be7eb8c6ee69c91740c6752", @@ -206,6 +284,28 @@ "sha256:f8a7bff6e8664afc4e6c28b983845c5bc14965030e3fb98789734d416af77c4b" ], "version": "==8.1" + }, + "yarl": { + "hashes": [ + "sha256:0c2ab325d33f1b824734b3ef51d4d54a54e0e7a23d13b86974507602334c2cce", + "sha256:0ca2f395591bbd85ddd50a82eb1fde9c1066fafe888c5c7cc1d810cf03fd3cc6", + "sha256:2098a4b4b9d75ee352807a95cdf5f10180db903bc5b7270715c6bbe2551f64ce", + "sha256:25e66e5e2007c7a39541ca13b559cd8ebc2ad8fe00ea94a2aad28a9b1e44e5ae", + "sha256:26d7c90cb04dee1665282a5d1a998defc1a9e012fdca0f33396f81508f49696d", + "sha256:308b98b0c8cd1dfef1a0311dc5e38ae8f9b58349226aa0533f15a16717ad702f", + "sha256:3ce3d4f7c6b69c4e4f0704b32eca8123b9c58ae91af740481aa57d7857b5e41b", + "sha256:58cd9c469eced558cd81aa3f484b2924e8897049e06889e8ff2510435b7ef74b", + "sha256:5b10eb0e7f044cf0b035112446b26a3a2946bca9d7d7edb5e54a2ad2f6652abb", + "sha256:6faa19d3824c21bcbfdfce5171e193c8b4ddafdf0ac3f129ccf0cdfcb083e462", + "sha256:944494be42fa630134bf907714d40207e646fd5a94423c90d5b514f7b0713fea", + "sha256:a161de7e50224e8e3de6e184707476b5a989037dcb24292b391a3d66ff158e70", + "sha256:a4844ebb2be14768f7994f2017f70aca39d658a96c786211be5ddbe1c68794c1", + "sha256:c2b509ac3d4b988ae8769901c66345425e361d518aecbe4acbfc2567e416626a", + "sha256:c9959d49a77b0e07559e579f38b2f3711c2b8716b8410b320bf9713013215a1b", + "sha256:d8cdee92bc930d8b09d8bd2043cedd544d9c8bd7436a77678dd602467a993080", + "sha256:e15199cdb423316e15f108f51249e44eb156ae5dba232cb73be555324a1d49c2" + ], + "version": "==1.4.2" } }, "develop": { @@ -223,6 +323,29 @@ ], "version": "==2.3.3" }, + "async-asgi-testclient": { + "hashes": [ + "sha256:e961c61123eca6dc30c4f67df7fe8a3f695ca9c8b013d97272b930d6d5af4509" + ], + "index": "pypi", + "version": "==1.4.4" + }, + "async-generator": { + "hashes": [ + "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b", + "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144" + ], + "index": "pypi", + "version": "==1.10" + }, + "asyncmock": { + "hashes": [ + "sha256:c251889d542e98fe5f7ece2b5b8643b7d62b50a5657d34a4cbce8a1d5170d750", + "sha256:fd8bc4e7813251a8959d1140924ccba3adbbc7af885dba7047c67f73c0b664b1" + ], + "index": "pypi", + "version": "==0.4.2" + }, "attrs": { "hashes": [ "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", @@ -338,6 +461,15 @@ ], "version": "==2.9" }, + "importlib-metadata": { + "hashes": [ + "sha256:2a688cbaa90e0cc587f1df48bdc97a6eadccdcd9c35fb3f976a09e3b5016d90f", + "sha256:34513a8a0c4962bc66d35b359558fd8a5e10cd472d37aec5f66858addef32c1e" + ], + "index": "pypi", + "markers": "python_version < '3.8'", + "version": "==1.6.0" + }, "invoke": { "hashes": [ "sha256:87b3ef9d72a1667e104f89b159eaf8a514dbf2f3576885b2bbdefe74c3fb2132", @@ -388,6 +520,13 @@ ], "version": "==0.6.1" }, + "mock": { + "hashes": [ + "sha256:3f9b2c0196c60d21838f307f5825a7b86b678cedc58ab9e50a8988187b4d81e0", + "sha256:dd33eb70232b6118298d516bbcecd26704689c386594f0f3c4f13867b2c56f72" + ], + "version": "==4.0.2" + }, "more-itertools": { "hashes": [ "sha256:5dd8bcf33e5f9513ffa06d5ad33d78f31e1931ac9a18f33d37e77a180d393a7c", @@ -395,6 +534,28 @@ ], "version": "==8.2.0" }, + "multidict": { + "hashes": [ + "sha256:317f96bc0950d249e96d8d29ab556d01dd38888fbe68324f46fd834b430169f1", + "sha256:42f56542166040b4474c0c608ed051732033cd821126493cf25b6c276df7dd35", + "sha256:4b7df040fb5fe826d689204f9b544af469593fb3ff3a069a6ad3409f742f5928", + "sha256:544fae9261232a97102e27a926019100a9db75bec7b37feedd74b3aa82f29969", + "sha256:620b37c3fea181dab09267cd5a84b0f23fa043beb8bc50d8474dd9694de1fa6e", + "sha256:6e6fef114741c4d7ca46da8449038ec8b1e880bbe68674c01ceeb1ac8a648e78", + "sha256:7774e9f6c9af3f12f296131453f7b81dabb7ebdb948483362f5afcaac8a826f1", + "sha256:85cb26c38c96f76b7ff38b86c9d560dea10cf3459bb5f4caf72fc1bb932c7136", + "sha256:a326f4240123a2ac66bb163eeba99578e9d63a8654a59f4688a79198f9aa10f8", + "sha256:ae402f43604e3b2bc41e8ea8b8526c7fa7139ed76b0d64fc48e28125925275b2", + "sha256:aee283c49601fa4c13adc64c09c978838a7e812f85377ae130a24d7198c0331e", + "sha256:b51249fdd2923739cd3efc95a3d6c363b67bbf779208e9f37fd5e68540d1a4d4", + "sha256:bb519becc46275c594410c6c28a8a0adc66fe24fef154a9addea54c1adb006f5", + "sha256:c2c37185fb0af79d5c117b8d2764f4321eeb12ba8c141a95d0aa8c2c1d0a11dd", + "sha256:dc561313279f9d05a3d0ffa89cd15ae477528ea37aa9795c4654588a3287a9ab", + "sha256:e439c9a10a95cb32abd708bb8be83b2134fa93790a4fb0535ca36db3dda94d20", + "sha256:fc3b4adc2ee8474cb3cd2a155305d5f8eda0a9c91320f83e55748e1fcb68f8e3" + ], + "version": "==4.7.5" + }, "packaging": { "hashes": [ "sha256:3c292b474fda1671ec57d46d739d072bfd495a4f51ad01a055121d81e952b7a3", @@ -453,6 +614,14 @@ "index": "pypi", "version": "==5.4.1" }, + "pytest-asyncio": { + "hashes": [ + "sha256:9fac5100fd716cbecf6ef89233e8590a4ad61d729d1732e0a96b84182df1daaf", + "sha256:d734718e25cfc32d2bf78d346e99d33724deeba774cc4afdf491530c6184b63b" + ], + "index": "pypi", + "version": "==0.10.0" + }, "pytest-cov": { "hashes": [ "sha256:cc6742d8bac45070217169f5f72ceee1e0e55b0221f54bcf24845972d3a47f2b", @@ -584,6 +753,13 @@ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1" ], "version": "==1.11.2" + }, + "zipp": { + "hashes": [ + "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b", + "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96" + ], + "version": "==3.1.0" } } } diff --git a/app/main.py b/app/main.py index 75805ebc..0018f8bf 100644 --- a/app/main.py +++ b/app/main.py @@ -13,6 +13,7 @@ from .data import data_source from .router.v1 import V1 from .router.v2 import V2 +from .utils.httputils import setup_client_session, teardown_client_session # ############ # FastAPI App @@ -28,6 +29,8 @@ version="2.0.1", docs_url="/", redoc_url="/docs", + on_startup=[setup_client_session], + on_shutdown=[teardown_client_session], ) # ##################### diff --git a/app/router/v1/all.py b/app/router/v1/all.py index b26fe25b..91b9e826 100644 --- a/app/router/v1/all.py +++ b/app/router/v1/all.py @@ -4,11 +4,11 @@ @V1.get("/all") -def all(): # pylint: disable=redefined-builtin +async def all(): # pylint: disable=redefined-builtin """Get all the categories.""" - confirmed = get_category("confirmed") - deaths = get_category("deaths") - recovered = get_category("recovered") + confirmed = await get_category("confirmed") + deaths = await get_category("deaths") + recovered = await get_category("recovered") return { # Data. diff --git a/app/router/v1/confirmed.py b/app/router/v1/confirmed.py index f3b97523..13365e32 100644 --- a/app/router/v1/confirmed.py +++ b/app/router/v1/confirmed.py @@ -4,6 +4,8 @@ @V1.get("/confirmed") -def confirmed(): +async def confirmed(): """Confirmed cases.""" - return get_category("confirmed") + confirmed_data = await get_category("confirmed") + + return confirmed_data diff --git a/app/router/v1/deaths.py b/app/router/v1/deaths.py index 65ed0967..fb45498c 100644 --- a/app/router/v1/deaths.py +++ b/app/router/v1/deaths.py @@ -4,6 +4,8 @@ @V1.get("/deaths") -def deaths(): +async def deaths(): """Total deaths.""" - return get_category("deaths") + deaths_data = await get_category("deaths") + + return deaths_data diff --git a/app/router/v1/recovered.py b/app/router/v1/recovered.py index 254823ed..3a3a85b7 100644 --- a/app/router/v1/recovered.py +++ b/app/router/v1/recovered.py @@ -4,6 +4,8 @@ @V1.get("/recovered") -def recovered(): +async def recovered(): """Recovered cases.""" - return get_category("recovered") + recovered_data = await get_category("recovered") + + return recovered_data diff --git a/app/router/v2/latest.py b/app/router/v2/latest.py index 071c3a22..105b16fe 100644 --- a/app/router/v2/latest.py +++ b/app/router/v2/latest.py @@ -7,11 +7,11 @@ @V2.get("/latest", response_model=Latest) -def get_latest(request: Request, source: Sources = "jhu"): # pylint: disable=unused-argument +async def get_latest(request: Request, source: Sources = "jhu"): # pylint: disable=unused-argument """ Getting latest amount of total confirmed cases, deaths, and recoveries. """ - locations = request.state.source.get_all() + locations = await request.state.source.get_all() return { "latest": { "confirmed": sum(map(lambda location: location.confirmed, locations)), diff --git a/app/router/v2/locations.py b/app/router/v2/locations.py index 815b1eb8..649f9c9e 100644 --- a/app/router/v2/locations.py +++ b/app/router/v2/locations.py @@ -9,7 +9,7 @@ # pylint: disable=unused-argument,too-many-arguments,redefined-builtin @V2.get("/locations", response_model=Locations, response_model_exclude_unset=True) -def get_locations( +async def get_locations( request: Request, source: Sources = "jhu", country_code: str = None, @@ -28,7 +28,7 @@ def get_locations( params.pop("timelines", None) # Retrieve all the locations. - locations = request.state.source.get_all() + locations = await request.state.source.get_all() # Attempt to filter out locations with properties matching the provided query params. for key, value in params.items(): @@ -57,8 +57,9 @@ def get_locations( # pylint: disable=invalid-name @V2.get("/locations/{id}", response_model=Location) -def get_location_by_id(request: Request, id: int, source: Sources = "jhu", timelines: bool = True): +async def get_location_by_id(request: Request, id: int, source: Sources = "jhu", timelines: bool = True): """ Getting specific location by id. """ - return {"location": request.state.source.get(id).serialize(timelines)} + location = await request.state.source.get(id) + return {"location": location.serialize(timelines)} diff --git a/app/services/location/__init__.py b/app/services/location/__init__.py index 404e9f7e..6d292b54 100644 --- a/app/services/location/__init__.py +++ b/app/services/location/__init__.py @@ -8,7 +8,7 @@ class LocationService(ABC): """ @abstractmethod - def get_all(self): + async def get_all(self): """ Gets and returns all of the locations. @@ -18,7 +18,7 @@ def get_all(self): raise NotImplementedError @abstractmethod - def get(self, id): # pylint: disable=redefined-builtin,invalid-name + async def get(self, id): # pylint: disable=redefined-builtin,invalid-name """ Gets and returns location with the provided id. diff --git a/app/services/location/csbs.py b/app/services/location/csbs.py index 84654963..dbd8d82d 100644 --- a/app/services/location/csbs.py +++ b/app/services/location/csbs.py @@ -2,25 +2,29 @@ import csv from datetime import datetime -import requests -from cachetools import TTLCache, cached +from asyncache import cached +from cachetools import TTLCache from ...coordinates import Coordinates from ...location.csbs import CSBSLocation +from ...utils import httputils from . import LocationService class CSBSLocationService(LocationService): """ - Servive for retrieving locations from csbs + Service for retrieving locations from csbs """ - def get_all(self): - # Get the locations - return get_locations() + async def get_all(self): + # Get the locations. + locations = await get_locations() + return locations - def get(self, loc_id): # pylint: disable=arguments-differ - return self.get_all()[loc_id] + async def get(self, loc_id): # pylint: disable=arguments-differ + # Get location at the index equal to the provided id. + locations = await self.get_all() + return locations[loc_id] # Base URL for fetching data @@ -28,15 +32,15 @@ def get(self, loc_id): # pylint: disable=arguments-differ @cached(cache=TTLCache(maxsize=1, ttl=3600)) -def get_locations(): +async def get_locations(): """ Retrieves county locations; locations are cached for 1 hour :returns: The locations. :rtype: dict """ - request = requests.get(BASE_URL) - text = request.text + async with httputils.CLIENT_SESSION.get(BASE_URL) as response: + text = await response.text() data = list(csv.DictReader(text.splitlines())) diff --git a/app/services/location/jhu.py b/app/services/location/jhu.py index 0f02409f..316de367 100644 --- a/app/services/location/jhu.py +++ b/app/services/location/jhu.py @@ -2,14 +2,15 @@ import csv from datetime import datetime -import requests -from cachetools import TTLCache, cached +from asyncache import cached +from cachetools import TTLCache from ...coordinates import Coordinates from ...location import TimelinedLocation from ...timeline import Timeline from ...utils import countries from ...utils import date as date_util +from ...utils import httputils from . import LocationService @@ -18,13 +19,15 @@ class JhuLocationService(LocationService): Service for retrieving locations from Johns Hopkins CSSE (https://github.com/CSSEGISandData/COVID-19). """ - def get_all(self): + async def get_all(self): # Get the locations. - return get_locations() + locations = await get_locations() + return locations - def get(self, loc_id): # pylint: disable=arguments-differ + async def get(self, loc_id): # pylint: disable=arguments-differ # Get location at the index equal to provided id. - return self.get_all()[loc_id] + locations = await self.get_all() + return locations[loc_id] # --------------------------------------------------------------- @@ -37,7 +40,7 @@ def get(self, loc_id): # pylint: disable=arguments-differ @cached(cache=TTLCache(maxsize=1024, ttl=3600)) -def get_category(category): +async def get_category(category): """ Retrieves the data for the provided category. The data is cached for 1 hour. @@ -52,8 +55,8 @@ def get_category(category): url = BASE_URL + "time_series_covid19_%s_global.csv" % category # Request the data - request = requests.get(url) - text = request.text + async with httputils.CLIENT_SESSION.get(url) as response: + text = await response.text() # Parse the CSV. data = list(csv.DictReader(text.splitlines())) @@ -103,7 +106,7 @@ def get_category(category): @cached(cache=TTLCache(maxsize=1024, ttl=3600)) -def get_locations(): +async def get_locations(): """ Retrieves the locations from the categories. The locations are cached for 1 hour. @@ -111,20 +114,24 @@ def get_locations(): :rtype: List[Location] """ # Get all of the data categories locations. - confirmed = get_category("confirmed")["locations"] - deaths = get_category("deaths")["locations"] - # recovered = get_category('recovered')['locations'] + confirmed = await get_category("confirmed") + deaths = await get_category("deaths") + # recovered = await get_category("recovered") + + locations_confirmed = confirmed["locations"] + locations_deaths = deaths["locations"] + # locations_recovered = recovered["locations"] # Final locations to return. locations = [] # Go through locations. - for index, location in enumerate(confirmed): + for index, location in enumerate(locations_confirmed): # Get the timelines. timelines = { - "confirmed": confirmed[index]["history"], - "deaths": deaths[index]["history"], - # 'recovered' : recovered[index]['history'], + "confirmed": locations_confirmed[index]["history"], + "deaths": locations_deaths[index]["history"], + # 'recovered' : locations_recovered[index]['history'], } # Grab coordinates. diff --git a/app/utils/httputils.py b/app/utils/httputils.py new file mode 100644 index 00000000..a0793170 --- /dev/null +++ b/app/utils/httputils.py @@ -0,0 +1,30 @@ +"""app.utils.httputils.py""" +import logging + +from aiohttp import ClientSession + +# Singleton aiohttp.ClientSession instance. +CLIENT_SESSION: ClientSession + + +LOGGER = logging.getLogger(__name__) + + +async def setup_client_session(): + """Set up the application-global aiohttp.ClientSession instance. + + aiohttp recommends that only one ClientSession exist for the lifetime of an application. + See: https://docs.aiohttp.org/en/stable/client_quickstart.html#make-a-request + + """ + global CLIENT_SESSION # pylint: disable=global-statement + LOGGER.info("Setting up global aiohttp.ClientSession.") + CLIENT_SESSION = ClientSession() + + +async def teardown_client_session(): + """Close the application-global aiohttp.ClientSession. + """ + global CLIENT_SESSION # pylint: disable=global-statement + LOGGER.info("Closing global aiohttp.ClientSession.") + await CLIENT_SESSION.close() diff --git a/tests/conftest.py b/tests/conftest.py index a9811d22..b6399fec 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,16 +3,134 @@ Global conftest file for shared pytest fixtures """ +import datetime +import os + import pytest +from async_asgi_testclient import TestClient as AsyncTestClient from fastapi.testclient import TestClient from app.main import APP +from app.utils import httputils + +try: + from unittest.mock import AsyncMock +except ImportError: + # Python 3.7 backwards compat + from asyncmock import AsyncMock + +try: + from contextlib import asynccontextmanager +except ImportError: + # Python 3.6 backwards compat + from async_generator import asynccontextmanager @pytest.fixture def api_client(): """ - Returns a TestClient. + Returns a fastapi.testclient.TestClient. The test client uses the requests library for making http requests. """ return TestClient(APP) + + +@pytest.fixture +async def async_api_client(): + """ + Returns an async_asgi_testclient.TestClient. + """ + return AsyncTestClient(APP) + + +class DateTimeStrpTime: + """Returns instance of `DateTimeStrpTime` + when calling `app.services.location.jhu.datetime.trptime(date, '%m/%d/%y').isoformat()`. + """ + + def __init__(self, date, strformat): + self.date = date + self.strformat = strformat + + def isoformat(self): + return datetime.datetime.strptime(self.date, self.strformat).isoformat() + + +class FakeRequestsGetResponse: + """Fake instance of a response from `aiohttp.ClientSession.get`. + """ + + def __init__(self, url, filename, state): + self.url = url + self.filename = filename + self.state = state + + async def text(self): + return self.read_file(self.state) + + def read_file(self, state): + """ + Mock HTTP GET-method and return text from file + """ + state = state.lower() + + # Determine filepath. + filepath = os.path.join(os.path.dirname(__file__), "example_data/{}.csv".format(state)) + + # Return fake response. + print("Try to read {}".format(filepath)) + with open(filepath, "r") as file: + return file.read() + + +@pytest.fixture(scope="class") +def mock_client_session_class(request): + """Class fixture to expose an AsyncMock to unittest.TestCase subclasses. + + See: https://docs.pytest.org/en/5.4.1/unittest.html#mixing-pytest-fixtures-into-unittest-testcase-subclasses-using-marks + """ + + httputils.CLIENT_SESSION = request.cls.mock_client_session = AsyncMock() + httputils.CLIENT_SESSION.get = mocked_session_get + try: + yield + finally: + del httputils.CLIENT_SESSION + + +@pytest.fixture +async def mock_client_session(): + """Context manager fixture that replaces the global client_session with an AsyncMock + instance. + """ + + httputils.CLIENT_SESSION = AsyncMock() + httputils.CLIENT_SESSION.get = mocked_session_get + try: + yield httputils.CLIENT_SESSION + finally: + del httputils.CLIENT_SESSION + + +@asynccontextmanager +async def mocked_session_get(*args, **kwargs): + """Mock response from client_session.get. + """ + + url = args[0] + filename = url.split("/")[-1] + + # clean up for id token (e.g. Deaths) + state = filename.split("-")[-1].replace(".csv", "").lower().capitalize() + + yield FakeRequestsGetResponse(url, filename, state) + + +def mocked_strptime_isoformat(*args, **kwargs): + """Mock return value from datetime.strptime().isoformat(). + """ + + date = args[0] + strformat = args[1] + + return DateTimeStrpTime(date, strformat) diff --git a/tests/example_data/sample_covid19_county.csv b/tests/example_data/covid19_county.csv similarity index 100% rename from tests/example_data/sample_covid19_county.csv rename to tests/example_data/covid19_county.csv diff --git a/tests/test_csbs.py b/tests/test_csbs.py index 64852102..828a5b65 100644 --- a/tests/test_csbs.py +++ b/tests/test_csbs.py @@ -1,9 +1,5 @@ -import datetime -from unittest import mock - import pytest -import app from app.services.location import csbs @@ -21,7 +17,7 @@ def read_file(self): """ Mock HTTP GET-method and return text from file """ - filepath = "tests/example_data/sample_covid19_county.csv" + filepath = "tests/example_data/covid19_county.csv" print("Try to read {}".format(filepath)) with open(filepath, "r") as file: return file.read() @@ -29,9 +25,10 @@ def read_file(self): return FakeRequestsGetResponse() -@mock.patch("app.services.location.csbs.requests.get", side_effect=mocked_csbs_requests_get) -def test_get_locations(mock_request_get): - data = csbs.get_locations() +@pytest.mark.asyncio +async def test_get_locations(mock_client_session): + data = await csbs.get_locations() + assert isinstance(data, list) # check to see that Unknown/Unassigned has been filtered diff --git a/tests/test_httputils.py b/tests/test_httputils.py new file mode 100644 index 00000000..547f3725 --- /dev/null +++ b/tests/test_httputils.py @@ -0,0 +1,19 @@ +import pytest + +from app.utils import httputils + + +@pytest.mark.asyncio +async def test_setup_teardown_client_session(): + with pytest.raises(AttributeError): + # Ensure client_session is undefined prior to setup + httputils.CLIENT_SESSION + + await httputils.setup_client_session() + + assert httputils.CLIENT_SESSION + + await httputils.teardown_client_session() + assert httputils.CLIENT_SESSION.closed + + del httputils.CLIENT_SESSION diff --git a/tests/test_jhu.py b/tests/test_jhu.py index f9c214a6..3790218d 100644 --- a/tests/test_jhu.py +++ b/tests/test_jhu.py @@ -1,86 +1,24 @@ -import datetime from unittest import mock import pytest -import app from app import location from app.services.location import jhu -from app.utils import date +from tests.conftest import mocked_strptime_isoformat DATETIME_STRING = "2020-03-17T10:23:22.505550" -def mocked_requests_get(*args, **kwargs): - class FakeRequestsGetResponse: - """ - Returns instance of `FakeRequestsGetResponse` - when calling `app.services.location.jhu.requests.get()` - """ +@pytest.mark.asyncio +async def test_get_locations(mock_client_session): + with mock.patch("app.services.location.jhu.datetime") as mock_datetime: + mock_datetime.utcnow.return_value.isoformat.return_value = DATETIME_STRING + mock_datetime.strptime.side_effect = mocked_strptime_isoformat + output = await jhu.get_locations() - def __init__(self, url, filename, state): - self.url = url - self.filename = filename - self.state = state - self.text = self.read_file(self.state) - - def read_file(self, state): - """ - Mock HTTP GET-method and return text from file - """ - state = state.lower() - - # Determine filepath. - filepath = "tests/example_data/{}.csv".format(state) - - # Return fake response. - print("Try to read {}".format(filepath)) - with open(filepath, "r") as file: - return file.read() - - # get url from `request.get` - url = args[0] - - # get filename from url - filename = url.split("/")[-1] - - # clean up for id token (e.g. Deaths) - state = filename.split("-")[-1].replace(".csv", "").lower().capitalize() - - return FakeRequestsGetResponse(url, filename, state) - - -def mocked_strptime_isoformat(*args, **kwargs): - class DateTimeStrpTime: - """ - Returns instance of `DateTimeStrpTime` - when calling `app.services.location.jhu.datetime.trptime(date, '%m/%d/%y').isoformat()` - """ - - def __init__(self, date, strformat): - self.date = date - self.strformat = strformat - - def isoformat(self): - return datetime.datetime.strptime(self.date, self.strformat).isoformat() - - date = args[0] - strformat = args[1] - - return DateTimeStrpTime(date, strformat) - - -@mock.patch("app.services.location.jhu.datetime") -@mock.patch("app.services.location.jhu.requests.get", side_effect=mocked_requests_get) -def test_get_locations(mock_request_get, mock_datetime): - # mock app.services.location.jhu.datetime.utcnow().isoformat() - mock_datetime.utcnow.return_value.isoformat.return_value = DATETIME_STRING - mock_datetime.strptime.side_effect = mocked_strptime_isoformat - - output = jhu.get_locations() assert isinstance(output, list) assert isinstance(output[0], location.Location) # `jhu.get_locations()` creates id based on confirmed list - location_confirmed = jhu.get_category("confirmed") + location_confirmed = await jhu.get_category("confirmed") assert len(output) == len(location_confirmed["locations"]) diff --git a/tests/test_routes.py b/tests/test_routes.py index 48d804e5..605ce2c0 100644 --- a/tests/test_routes.py +++ b/tests/test_routes.py @@ -4,17 +4,16 @@ from unittest import mock import pytest -from fastapi.testclient import TestClient +from async_asgi_testclient import TestClient -# import app -# from app import services from app.main import APP -from .test_jhu import DATETIME_STRING, mocked_requests_get, mocked_strptime_isoformat +from .conftest import mocked_strptime_isoformat +from .test_jhu import DATETIME_STRING -@mock.patch("app.services.location.jhu.datetime") -@mock.patch("app.services.location.jhu.requests.get", side_effect=mocked_requests_get) +@pytest.mark.usefixtures("mock_client_session_class") +@pytest.mark.asyncio class FlaskRoutesTest(unittest.TestCase): """ Need to mock some objects to control testing data locally @@ -32,89 +31,110 @@ def read_file_v1(self, state): expected_json_output = file.read() return expected_json_output - def test_root_api(self, mock_request_get, mock_datetime): + async def test_root_api(self): """Validate that / returns a 200 and is not a redirect.""" - response = self.asgi_client.get("/") + response = await self.asgi_client.get("/") assert response.status_code == 200 assert not response.is_redirect - def test_v1_confirmed(self, mock_request_get, mock_datetime): - mock_datetime.utcnow.return_value.isoformat.return_value = self.date - mock_datetime.strptime.side_effect = mocked_strptime_isoformat + async def test_v1_confirmed(self): state = "confirmed" expected_json_output = self.read_file_v1(state=state) - return_data = self.asgi_client.get("/{}".format(state)).json() + with mock.patch("app.services.location.jhu.datetime") as mock_datetime: + mock_datetime.utcnow.return_value.isoformat.return_value = self.date + mock_datetime.strptime.side_effect = mocked_strptime_isoformat + response = await self.asgi_client.get("/{}".format(state)) + + return_data = response.json() assert return_data == json.loads(expected_json_output) - def test_v1_deaths(self, mock_request_get, mock_datetime): - mock_datetime.utcnow.return_value.isoformat.return_value = self.date - mock_datetime.strptime.side_effect = mocked_strptime_isoformat + async def test_v1_deaths(self): state = "deaths" expected_json_output = self.read_file_v1(state=state) - return_data = self.asgi_client.get("/{}".format(state)).json() + with mock.patch("app.services.location.jhu.datetime") as mock_datetime: + mock_datetime.utcnow.return_value.isoformat.return_value = self.date + mock_datetime.strptime.side_effect = mocked_strptime_isoformat + response = await self.asgi_client.get("/{}".format(state)) + + return_data = response.json() assert return_data == json.loads(expected_json_output) - def test_v1_recovered(self, mock_request_get, mock_datetime): - mock_datetime.utcnow.return_value.isoformat.return_value = self.date - mock_datetime.strptime.side_effect = mocked_strptime_isoformat + async def test_v1_recovered(self): state = "recovered" expected_json_output = self.read_file_v1(state=state) - return_data = self.asgi_client.get("/{}".format(state)).json() + with mock.patch("app.services.location.jhu.datetime") as mock_datetime: + mock_datetime.utcnow.return_value.isoformat.return_value = self.date + mock_datetime.strptime.side_effect = mocked_strptime_isoformat + response = await self.asgi_client.get("/{}".format(state)) + + return_data = response.json() assert return_data == json.loads(expected_json_output) - def test_v1_all(self, mock_request_get, mock_datetime): - mock_datetime.utcnow.return_value.isoformat.return_value = self.date - mock_datetime.strptime.side_effect = mocked_strptime_isoformat + async def test_v1_all(self): state = "all" expected_json_output = self.read_file_v1(state=state) - return_data = self.asgi_client.get("/{}".format(state)).json() + with mock.patch("app.services.location.jhu.datetime") as mock_datetime: + mock_datetime.utcnow.return_value.isoformat.return_value = self.date + mock_datetime.strptime.side_effect = mocked_strptime_isoformat + response = await self.asgi_client.get("/{}".format(state)) + + return_data = response.json() assert return_data == json.loads(expected_json_output) - def test_v2_latest(self, mock_request_get, mock_datetime): - mock_datetime.utcnow.return_value.isoformat.return_value = DATETIME_STRING - mock_datetime.strptime.side_effect = mocked_strptime_isoformat + async def test_v2_latest(self): state = "latest" - return_data = self.asgi_client.get(f"/v2/{state}").json() - check_dict = {"latest": {"confirmed": 1940, "deaths": 1940, "recovered": 0}} + with mock.patch("app.services.location.jhu.datetime") as mock_datetime: + mock_datetime.utcnow.return_value.isoformat.return_value = DATETIME_STRING + mock_datetime.strptime.side_effect = mocked_strptime_isoformat + response = await self.asgi_client.get(f"/v2/{state}") + return_data = response.json() + check_dict = {"latest": {"confirmed": 1940, "deaths": 1940, "recovered": 0}} assert return_data == check_dict - def test_v2_locations(self, mock_request_get, mock_datetime): - mock_datetime.utcnow.return_value.isoformat.return_value = DATETIME_STRING - mock_datetime.strptime.side_effect = mocked_strptime_isoformat + async def test_v2_locations(self): state = "locations" - return_data = self.asgi_client.get("/v2/{}".format(state)).json() + + with mock.patch("app.services.location.jhu.datetime") as mock_datetime: + mock_datetime.utcnow.return_value.isoformat.return_value = DATETIME_STRING + mock_datetime.strptime.side_effect = mocked_strptime_isoformat + response = await self.asgi_client.get("/v2/{}".format(state)) + + return_data = response.json() filepath = "tests/expected_output/v2_{state}.json".format(state=state) with open(filepath, "r") as file: expected_json_output = file.read() + # TODO: Why is this failing? # assert return_data == json.loads(expected_json_output) - def test_v2_locations_id(self, mock_request_get, mock_datetime): - mock_datetime.utcnow.return_value.isoformat.return_value = DATETIME_STRING - mock_datetime.strptime.side_effect = mocked_strptime_isoformat - + async def test_v2_locations_id(self): state = "locations" test_id = 1 - return_data = self.asgi_client.get("/v2/{}/{}".format(state, test_id)).json() + + with mock.patch("app.services.location.jhu.datetime") as mock_datetime: + mock_datetime.utcnow.return_value.isoformat.return_value = DATETIME_STRING + mock_datetime.strptime.side_effect = mocked_strptime_isoformat + response = await self.asgi_client.get("/v2/{}/{}".format(state, test_id)) + + return_data = response.json() filepath = "tests/expected_output/v2_{state}_id_{test_id}.json".format(state=state, test_id=test_id) with open(filepath, "r") as file: expected_json_output = file.read() + # TODO: Why is this failing? # assert return_data == expected_json_output - def tearDown(self): - pass - +@pytest.mark.asyncio @pytest.mark.parametrize( "query_params,expected_status", [ @@ -128,13 +148,15 @@ def tearDown(self): ({"source": "jhu", "country_code": "US"}, 404), ], ) -def test_locations_status_code(api_client, query_params, expected_status): - response = api_client.get("/v2/locations", params=query_params) +async def test_locations_status_code(async_api_client, query_params, expected_status, mock_client_session): + response = await async_api_client.get("/v2/locations", query_string=query_params) + print(f"GET {response.url}\n{response}") print(f"\tjson:\n{pf(response.json())[:1000]}\n\t...") assert response.status_code == expected_status +@pytest.mark.asyncio @pytest.mark.parametrize( "query_params", [ @@ -146,8 +168,9 @@ def test_locations_status_code(api_client, query_params, expected_status): {"source": "jhu", "timelines": True}, ], ) -def test_latest(api_client, query_params): - response = api_client.get("/v2/latest", params=query_params) +async def test_latest(async_api_client, query_params, mock_client_session): + response = await async_api_client.get("/v2/latest", query_string=query_params) + print(f"GET {response.url}\n{response}") response_json = response.json()