diff --git a/.env.example b/.env.example index cb380afb..e35ce549 100644 --- a/.env.example +++ b/.env.example @@ -1,2 +1,3 @@ # Port to serve app on. -PORT = 5000 \ No newline at end of file +PORT = 5000 +LOCAL_REDIS_URL = redis://localhost:6379 diff --git a/Pipfile b/Pipfile index c28a067c..50d0a8a2 100644 --- a/Pipfile +++ b/Pipfile @@ -20,6 +20,8 @@ pytest-cov = "*" responses = "*" [packages] +aiocache = {extras = ["redis"],version = "*"} +aiofiles = "*" aiohttp = "*" asyncache = "*" cachetools = "*" @@ -27,8 +29,8 @@ dataclasses = {version = "*",markers = "python_version<'3.7'"} fastapi = "*" gunicorn = "*" idna_ssl = {version = "*",markers = "python_version<'3.7'"} +pydantic = {extras = ["dotenv"],version = "*"} python-dateutil = "*" -python-dotenv = "*" requests = "*" uvicorn = "*" diff --git a/Pipfile.lock b/Pipfile.lock index 9ac79d0f..43e27b0e 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "9c469c96db1ae3a7e4c239d3a9c7028ecf49a0ab5e3ea50aed304ea2ab1a113e" + "sha256": "596c0a497d4f2cfa9e3a3e8b38b2cf018ab3b6d9a26f04a949ced6b025e05f62" }, "pipfile-spec": 6, "requires": { @@ -16,6 +16,25 @@ ] }, "default": { + "aiocache": { + "extras": [ + "redis" + ], + "hashes": [ + "sha256:e55c7caaa5753794fd301c3a2e592737fa1d036db9f8d04ae154facdfb48a157", + "sha256:f2ebe0b05cec45782e7b5ea0bb74640f157dd4bb1028b4565364dda9fe33be7f" + ], + "index": "pypi", + "version": "==0.11.1" + }, + "aiofiles": { + "hashes": [ + "sha256:377fdf7815cc611870c59cbd07b68b180841d2a2b79812d8c218be02448c2acb", + "sha256:98e6bcfd1b50f97db4980e182ddd509b7cc35909e903a8fe50d8849e02d815af" + ], + "index": "pypi", + "version": "==0.5.0" + }, "aiohttp": { "hashes": [ "sha256:1e984191d1ec186881ffaed4581092ba04f7c61582a177b187d3a2f07ed9719e", @@ -34,6 +53,13 @@ "index": "pypi", "version": "==3.6.2" }, + "aioredis": { + "hashes": [ + "sha256:15f8af30b044c771aee6787e5ec24694c048184c7b9e54c3b60c750a4b93273a", + "sha256:b61808d7e97b7cd5a92ed574937a079c9387fdadd22bfbfa7ad2fd319ecc26e3" + ], + "version": "==1.3.1" + }, "async-timeout": { "hashes": [ "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f", @@ -79,10 +105,10 @@ }, "click": { "hashes": [ - "sha256:8a18b4ea89d8820c5d0c7da8a64b2c324b4dabb695804dbfea19b9be9d88c0cc", - "sha256:e345d143d80bf5ee7534056164e5e112ea5e22716bbb1ce727941f4c8b471b9a" + "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", + "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" ], - "version": "==7.1.1" + "version": "==7.1.2" }, "dataclasses": { "hashes": [ @@ -116,6 +142,51 @@ ], "version": "==0.9.0" }, + "hiredis": { + "hashes": [ + "sha256:01b577f84c20ecc9c07fc4c184231b08e3c3942de096fa99978e053de231c423", + "sha256:01ff0900134166961c9e339df77c33b72f7edc5cb41739f0babcd9faa345926e", + "sha256:03ed34a13316d0c34213c4fd46e0fa3a5299073f4d4f08e93fed8c2108b399b3", + "sha256:040436e91df5143aff9e0debb49530d0b17a6bd52200ce568621c31ef581b10d", + "sha256:091eb38fbf968d1c5b703e412bbbd25f43a7967d8400842cee33a5a07b33c27b", + "sha256:102f9b9dc6ed57feb3a7c9bdf7e71cb7c278fe8df1edfcfe896bc3e0c2be9447", + "sha256:2b4b392c7e3082860c8371fab3ae762139090f9115819e12d9f56060f9ede05d", + "sha256:2c9cc0b986397b833073f466e6b9e9c70d1d4dc2c2c1b3e9cae3a23102ff296c", + "sha256:2fa65a9df683bca72073cd77709ddeb289ea2b114d3775d225fbbcc5faf808c5", + "sha256:38437a681f17c975fd22349e72c29bc643f8e7eb2d6dc5df419eac59afa4d7ce", + "sha256:3b3428fa3cf1ee178807b52c9bee8950ab94cd4eaa9bfae8c1bbae3c49501d34", + "sha256:3dd8c2fae7f5494978facb0e93297dd627b1a3f536f3b070cf0a7d9157a07dcb", + "sha256:4414a96c212e732723b5c3d7c04d386ebbb2ec359e1de646322cbc3f875cbd0d", + "sha256:48c627581ad4ef60adbac980981407939acf13a0e18f093502c7b542223c4f19", + "sha256:4a60e71625a2d78d8ab84dfb2fa2cfd9458c964b6e6c04fea76d9ade153fb371", + "sha256:585ace09f434e43d8a8dbeb366865b1a044d7c06319b3c7372a0a00e63b860f4", + "sha256:74b364b3f06c9cf0a53f7df611045bc9437ed972a283fa1f0b12537236d23ddc", + "sha256:75c65c3850e89e9daa68d1b9bedd5806f177d60aa5a7b0953b4829481cfc1f72", + "sha256:7f052de8bf744730a9120dbdc67bfeb7605a01f69fb8e7ba5c475af33c24e145", + "sha256:8113a7d5e87ecf57cd4ae263cc9e429adb9a3e59f5a7768da5d3312a8d0a051a", + "sha256:84857ce239eb8ed191ac78e77ff65d52902f00f30f4ee83bf80eb71da73b70e6", + "sha256:8644a48ddc4a40b3e3a6b9443f396c2ee353afb2d45656c4fc68d04a82e8e3f7", + "sha256:936aa565e673536e8a211e43ec43197406f24cd1f290138bd143765079c8ba00", + "sha256:9afeb88c67bbc663b9f27385c496da056d06ad87f55df6e393e1516cfecb0461", + "sha256:9d62cc7880110e4f83b0a51d218f465d3095e2751fbddd34e553dbd106a929ff", + "sha256:a1fadd062fc8d647ff39220c57ea2b48c99bb73f18223828ec97f88fc27e7898", + "sha256:a7754a783b1e5d6f627c19d099b178059c62f782ab62b4d8ba165b9fbc2ee34c", + "sha256:aa59dd63bb3f736de4fc2d080114429d5d369dfb3265f771778e8349d67a97a4", + "sha256:ae2ee0992f8de249715435942137843a93db204dd7db1e7cc9bdc5a8436443e8", + "sha256:b36842d7cf32929d568f37ec5b3173b72b2ec6572dec4d6be6ce774762215aee", + "sha256:bcbf9379c553b5facc6c04c1e5569b44b38ff16bcbf354676287698d61ee0c92", + "sha256:cbccbda6f1c62ab460449d9c85fdf24d0d32a6bf45176581151e53cc26a5d910", + "sha256:d0caf98dfb8af395d6732bd16561c0a2458851bea522e39f12f04802dbf6f502", + "sha256:d6456afeddba036def1a36d8a2758eca53202308d83db20ab5d0b66590919627", + "sha256:dbaef9a21a4f10bc281684ee4124f169e62bb533c2a92b55f8c06f64f9af7b8f", + "sha256:dce84916c09aaece006272b37234ae84a8ed13abb3a4d341a23933b8701abfb5", + "sha256:eb8c9c8b9869539d58d60ff4a28373a22514d40495911451343971cb4835b7a9", + "sha256:efc98b14ee3a8595e40b1425e8d42f5fd26f11a7b215a81ef9259068931754f4", + "sha256:fa2dc05b87d97acc1c6ae63f3e0f39eae5246565232484b08db6bf2dc1580678", + "sha256:fe7d6ce9f6a5fbe24f09d95ea93e9c7271abc4e1565da511e1449b107b4d7848" + ], + "version": "==1.0.1" + }, "httptools": { "hashes": [ "sha256:0a4b1b2012b28e68306575ad14ad5e9120b34fccd02a81eb08838d7e3bbb48be", @@ -172,26 +243,30 @@ "version": "==4.7.5" }, "pydantic": { - "hashes": [ - "sha256:0b7aadfa1de28057656064e04d9f018d1b186fe2a8e953a2fb41545873b7cf95", - "sha256:0f61e67291b99a927816558a218a4e794db72a33621c836e63d12613a2202cd4", - "sha256:20946280c750753b3e3177c748825ef189d7ab86c514f6a0b118621110d5f0d3", - "sha256:22139ee446992c222977ac0a9269c4da2e9ecc1834f84804ebde008a4649b929", - "sha256:3c0f39e884d7a3572d5cc8322b0fe9bf66114283e22e05a5c4b8961c19588945", - "sha256:446ce773a552a2cb90065d4aa645e16fa7494369b5f0d199e4d41a992a98204d", - "sha256:475e6606873e40717cc3b0eebc7d1101cbfc774e01dadeeea24c121eb5826b86", - "sha256:66124752662de0479a9d0c17bdebdc8a889bccad8846626fb66d8669e8eafb63", - "sha256:896637b7d8e4cdc0bcee1704fcadacdd167c35ac29f02a4395fce7a033925f26", - "sha256:9af44d06db33896a2176603c9cb876df3a60297a292a24d3018956a910cc1402", - "sha256:9e46fac8a4674db0777fd0133aa56817e1481beee50971bab39dded7639f9b2b", - "sha256:ae206e103e976c40ec294cd6c8fcbfbdaced3ab9b736bc53d03fa11b5aaa1628", - "sha256:b11d0bd7ecf41098894e8777ee623c29554dbaa37e862c51bcc5a2b950d1bf77", - "sha256:d73070028f7b046a5b2e611a9799c238d7bd245f8fe30f4ad7ff29ddb63aac40", - "sha256:ddedcdf9d5c24939578449a8e099ceeec3b3d76243fc143aff63ebf6d5aade10", - "sha256:e08e21f4d5395ac17cde19de26be63fb16fb870f0cfde1481ddc22d5e2353548", - "sha256:e6239199b363bc53262bcb57f1441206d4b2d46b392eccba2213d8358d6e284a" - ], - "version": "==1.5" + "extras": [ + "dotenv" + ], + "hashes": [ + "sha256:0a1cdf24e567d42dc762d3fed399bd211a13db2e8462af9dfa93b34c41648efb", + "sha256:2007eb062ed0e57875ce8ead12760a6e44bf5836e6a1a7ea81d71eeecf3ede0f", + "sha256:20a15a303ce1e4d831b4e79c17a4a29cb6740b12524f5bba3ea363bff65732bc", + "sha256:2a6904e9f18dea58f76f16b95cba6a2f20b72d787abd84ecd67ebc526e61dce6", + "sha256:3714a4056f5bdbecf3a41e0706ec9b228c9513eee2ad884dc2c568c4dfa540e9", + "sha256:473101121b1bd454c8effc9fe66d54812fdc128184d9015c5aaa0d4e58a6d338", + "sha256:68dece67bff2b3a5cc188258e46b49f676a722304f1c6148ae08e9291e284d98", + "sha256:70f27d2f0268f490fe3de0a9b6fca7b7492b8fd6623f9fecd25b221ebee385e3", + "sha256:8433dbb87246c0f562af75d00fa80155b74e4f6924b0db6a2078a3cd2f11c6c4", + "sha256:8be325fc9da897029ee48d1b5e40df817d97fe969f3ac3fd2434ba7e198c55d5", + "sha256:93b9f265329d9827f39f0fca68f5d72cc8321881cdc519a1304fa73b9f8a75bd", + "sha256:9be755919258d5d168aeffbe913ed6e8bd562e018df7724b68cabdee3371e331", + "sha256:ab863853cb502480b118187d670f753be65ec144e1654924bec33d63bc8b3ce2", + "sha256:b96ce81c4b5ca62ab81181212edfd057beaa41411cd9700fbcb48a6ba6564b4e", + "sha256:da8099fca5ee339d5572cfa8af12cf0856ae993406f0b1eb9bb38c8a660e7416", + "sha256:e2c753d355126ddd1eefeb167fa61c7037ecd30b98e7ebecdc0d1da463b4ea09", + "sha256:f0018613c7a0d19df3240c2a913849786f21b6539b9f23d85ce4067489dfacfa" + ], + "index": "pypi", + "version": "==1.5.1" }, "python-dateutil": { "hashes": [ @@ -206,7 +281,6 @@ "sha256:25c0ff1a3e12f4bde8d592cc254ab075cfe734fc5dd989036716fd17ee7e5ec7", "sha256:3b9909bc96b0edc6b01586e1eed05e71174ef4e04c71da5786370cebea53ad74" ], - "index": "pypi", "version": "==0.13.0" }, "requests": { @@ -240,11 +314,11 @@ }, "uvicorn": { "hashes": [ - "sha256:0f58170165c4495f563d8224b2f415a0829af0412baa034d6f777904613087fd", - "sha256:6fdaf8e53bf1b2ddf0fe9ed06079b5348d7d1d87b3365fe2549e6de0d49e631c" + "sha256:50577d599775dac2301bac8bd5b540d19a9560144143c5bdab13cba92783b6e7", + "sha256:596eaa8645b6dbc24d6610e335f8ddf5f925b4c4b86fdc7146abb0bf0da65d17" ], "index": "pypi", - "version": "==0.11.3" + "version": "==0.11.5" }, "uvloop": { "hashes": [ @@ -321,10 +395,10 @@ }, "astroid": { "hashes": [ - "sha256:71ea07f44df9568a75d0f354c49143a4575d90645e9fead6dfb52c26a85ed13a", - "sha256:840947ebfa8b58f318d42301cf8c0a20fd794a33b61cc4638e28e9e61ba32f42" + "sha256:29fa5d46a2404d01c834fcb802a3943685f1fc538eb2a02a161349f5505ac196", + "sha256:2fecea42b20abb1922ed65c7b5be27edfba97211b04b2b6abc6a43549a024ea6" ], - "version": "==2.3.3" + "version": "==2.4.0" }, "async-asgi-testclient": { "hashes": [ @@ -388,10 +462,10 @@ }, "click": { "hashes": [ - "sha256:8a18b4ea89d8820c5d0c7da8a64b2c324b4dabb695804dbfea19b9be9d88c0cc", - "sha256:e345d143d80bf5ee7534056164e5e112ea5e22716bbb1ce727941f4c8b471b9a" + "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", + "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" ], - "version": "==7.1.1" + "version": "==7.1.2" }, "coverage": { "hashes": [ @@ -596,11 +670,11 @@ }, "pylint": { "hashes": [ - "sha256:3db5468ad013380e987410a8d6956226963aed94ecb5f9d3a28acca6d9ac36cd", - "sha256:886e6afc935ea2590b462664b161ca9a5e40168ea99e5300935f6591ad467df4" + "sha256:588e114e3f9a1630428c35b7dd1c82c1c93e1b0e78ee312ae4724c5e1a1e0245", + "sha256:bd556ba95a4cf55a1fc0004c00cf4560b1e70598a54a74c6904d933c8f3bd5a8" ], "index": "pypi", - "version": "==2.4.4" + "version": "==2.5.0" }, "pyparsing": { "hashes": [ @@ -619,11 +693,11 @@ }, "pytest-asyncio": { "hashes": [ - "sha256:9fac5100fd716cbecf6ef89233e8590a4ad61d729d1732e0a96b84182df1daaf", - "sha256:d734718e25cfc32d2bf78d346e99d33724deeba774cc4afdf491530c6184b63b" + "sha256:6096d101a1ae350d971df05e25f4a8b4d3cd13ffb1b32e42d902ac49670d2bfa", + "sha256:c54866f3cf5dd2063992ba2c34784edae11d3ed19e006d220a3cf0bfc4191fcb" ], "index": "pypi", - "version": "==0.10.0" + "version": "==0.11.0" }, "pytest-cov": { "hashes": [ @@ -685,11 +759,11 @@ }, "responses": { "hashes": [ - "sha256:0474ce3c897fbcc1aef286117c93499882d5c440f06a805947e4b1cb5ab3d474", - "sha256:f83613479a021e233e82d52ffb3e2e0e2836d24b0cc88a0fa31978789f78d0e5" + "sha256:1a78bc010b20a5022a2c0cb76b8ee6dc1e34d887972615ebd725ab9a166a4960", + "sha256:3d596d0be06151330cb230a2d630717ab20f7a81f205019481e206eb5db79915" ], "index": "pypi", - "version": "==0.10.12" + "version": "==0.10.14" }, "six": { "hashes": [ @@ -761,9 +835,9 @@ }, "wrapt": { "hashes": [ - "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1" + "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7" ], - "version": "==1.11.2" + "version": "==1.12.1" }, "zipp": { "hashes": [ diff --git a/app/caches.py b/app/caches.py new file mode 100644 index 00000000..df95f508 --- /dev/null +++ b/app/caches.py @@ -0,0 +1,52 @@ +"""app.caches.py""" +import functools +import logging +from typing import Union + +import aiocache + +from .config import get_settings + +LOGGER = logging.getLogger(name="app.caches") + +SETTINGS = get_settings() + +if SETTINGS.rediscloud_url: + REDIS_URL = SETTINGS.rediscloud_url + LOGGER.info("Using Rediscloud") +else: + REDIS_URL = SETTINGS.local_redis_url + LOGGER.info("Using Local Redis") + + +@functools.lru_cache() +def get_cache(namespace) -> Union[aiocache.RedisCache, aiocache.SimpleMemoryCache]: + """Retunr """ + if REDIS_URL: + LOGGER.info("using RedisCache") + return aiocache.RedisCache( + endpoint=REDIS_URL.host, + port=REDIS_URL.port, + password=REDIS_URL.password, + namespace=namespace, + create_connection_timeout=5, + ) + LOGGER.info("using SimpleMemoryCache") + return aiocache.SimpleMemoryCache(namespace=namespace) + + +async def check_cache(data_id: str, namespace: str = None): + """Check the data of a cache given an id.""" + cache = get_cache(namespace) + result = await cache.get(data_id, None) + LOGGER.info(f"{data_id} cache pulled") + await cache.close() + return result + + +async def load_cache(data_id: str, data, namespace: str = None, cache_life: int = 3600): + """Load data into the cache.""" + cache = get_cache(namespace) + await cache.set(data_id, data, ttl=cache_life) + LOGGER.info(f"{data_id} cache loaded") + await cache.close() diff --git a/app/config.py b/app/config.py new file mode 100644 index 00000000..7d911e4d --- /dev/null +++ b/app/config.py @@ -0,0 +1,29 @@ +"""app.config.py""" +import functools +import logging + +from pydantic import AnyUrl, BaseSettings + +CFG_LOGGER = logging.getLogger("app.config") + + +class _Settings(BaseSettings): + port: int = 5000 + rediscloud_url: AnyUrl = None + local_redis_url: AnyUrl = None + + +@functools.lru_cache() +def get_settings(**kwargs) -> BaseSettings: + """ + Read settings from the environment or `.env` file. + https://pydantic-docs.helpmanual.io/usage/settings/#dotenv-env-support + + Usage: + import app.config + + settings = app.config.get_settings(_env_file="") + port_number = settings.port + """ + CFG_LOGGER.info("Loading Config settings from Environment ...") + return _Settings(**kwargs) diff --git a/app/config/__init__.py b/app/config/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/app/config/settings.py b/app/config/settings.py deleted file mode 100644 index 4a02a734..00000000 --- a/app/config/settings.py +++ /dev/null @@ -1,10 +0,0 @@ -"""app.config.settings.py""" -import os - -# Load enviroment variables from .env file. -from dotenv import load_dotenv - -load_dotenv() - -# The port to serve the app application on. -PORT = int(os.getenv("PORT", "5000")) diff --git a/app/io.py b/app/io.py index 8130c146..3bd443b6 100644 --- a/app/io.py +++ b/app/io.py @@ -1,28 +1,56 @@ """app.io.py""" import json import pathlib -from typing import Dict, Union +from typing import Dict, List, Union + +import aiofiles HERE = pathlib.Path(__file__) DATA = HERE.joinpath("..", "data").resolve() def save( - name: str, content: Union[str, Dict], write_mode: str = "w", indent: int = 2, **json_dumps_kwargs + name: str, content: Union[str, Dict, List], write_mode: str = "w", indent: int = 2, **json_dumps_kwargs ) -> pathlib.Path: """Save content to a file. If content is a dictionary, use json.dumps().""" path = DATA / name - if isinstance(content, dict): + if isinstance(content, (dict, list)): content = json.dumps(content, indent=indent, **json_dumps_kwargs) with open(DATA / name, mode=write_mode) as f_out: f_out.write(content) return path -def load(name: str, **json_kwargs) -> Union[str, Dict]: +def load(name: str, **json_kwargs) -> Union[str, Dict, List]: """Loads content from a file. If file ends with '.json', call json.load() and return a Dictionary.""" path = DATA / name with open(path) as f_in: if path.suffix == ".json": return json.load(f_in, **json_kwargs) return f_in.read() + + +class AIO: + """Asynsc compatible file io operations.""" + + @classmethod + async def save( + cls, name: str, content: Union[str, Dict, List], write_mode: str = "w", indent: int = 2, **json_dumps_kwargs + ): + """Save content to a file. If content is a dictionary, use json.dumps().""" + path = DATA / name + if isinstance(content, (dict, list)): + content = json.dumps(content, indent=indent, **json_dumps_kwargs) + async with aiofiles.open(DATA / name, mode=write_mode) as f_out: + await f_out.write(content) + return path + + @classmethod + async def load(cls, name: str, **json_kwargs) -> Union[str, Dict, List]: + """Loads content from a file. If file ends with '.json', call json.load() and return a Dictionary.""" + path = DATA / name + async with aiofiles.open(path) as f_in: + content = await f_in.read() + if path.suffix == ".json": + content = json.loads(content, **json_kwargs) + return content diff --git a/app/main.py b/app/main.py index 0ab95fdb..3e5ee010 100644 --- a/app/main.py +++ b/app/main.py @@ -2,7 +2,6 @@ app.main.py """ import logging -import os import pydantic import uvicorn @@ -11,6 +10,7 @@ from fastapi.middleware.gzip import GZipMiddleware from fastapi.responses import JSONResponse +from .config import get_settings from .data import data_source from .routers import V1, V2 from .utils.httputils import setup_client_session, teardown_client_session @@ -20,6 +20,8 @@ # ############ LOGGER = logging.getLogger("api") +SETTINGS = get_settings() + APP = FastAPI( title="Coronavirus Tracker", description=( @@ -93,5 +95,5 @@ async def handle_validation_error( # Running of app. if __name__ == "__main__": uvicorn.run( - "app.main:APP", host="127.0.0.1", port=int(os.getenv("PORT", "5000")), log_level="info", + "app.main:APP", host="127.0.0.1", port=SETTINGS.port, log_level="info", ) diff --git a/app/services/location/jhu.py b/app/services/location/jhu.py index bd247113..11f6d120 100644 --- a/app/services/location/jhu.py +++ b/app/services/location/jhu.py @@ -8,6 +8,7 @@ from asyncache import cached from cachetools import TTLCache +from ...caches import check_cache, load_cache from ...coordinates import Coordinates from ...location import TimelinedLocation from ...timeline import Timeline @@ -19,6 +20,7 @@ LOGGER = logging.getLogger("services.location.jhu") PID = os.getpid() + class JhuLocationService(LocationService): """ Service for retrieving locations from Johns Hopkins CSSE (https://github.com/CSSEGISandData/COVID-19). @@ -44,7 +46,7 @@ async def get(self, loc_id): # pylint: disable=arguments-differ ) -@cached(cache=TTLCache(maxsize=1024, ttl=3600)) +@cached(cache=TTLCache(maxsize=1024, ttl=1800)) async def get_category(category): """ Retrieves the data for the provided category. The data is cached for 1 hour. @@ -56,68 +58,78 @@ async def get_category(category): category = category.lower() data_id = f"jhu.{category}" - # URL to request data from. - url = BASE_URL + "time_series_covid19_%s_global.csv" % category + # check shared cache + cache_results = await check_cache(data_id) + if cache_results: + LOGGER.info(f"{data_id} using shared cache results") + results = cache_results + else: + LOGGER.info(f"{data_id} shared cache empty") + # URL to request data from. + url = BASE_URL + "time_series_covid19_%s_global.csv" % category - # Request the data - LOGGER.info(f"{data_id} Requesting data...") - async with httputils.CLIENT_SESSION.get(url) as response: - text = await response.text() + # Request the data + LOGGER.info(f"{data_id} Requesting data...") + async with httputils.CLIENT_SESSION.get(url) as response: + text = await response.text() - LOGGER.debug(f"{data_id} Data received") + LOGGER.debug(f"{data_id} Data received") - # Parse the CSV. - data = list(csv.DictReader(text.splitlines())) - LOGGER.debug(f"{data_id} CSV parsed") + # Parse the CSV. + data = list(csv.DictReader(text.splitlines())) + LOGGER.debug(f"{data_id} CSV parsed") - # The normalized locations. - locations = [] + # The normalized locations. + locations = [] - for item in data: - # Filter out all the dates. - dates = dict(filter(lambda element: date_util.is_date(element[0]), item.items())) + for item in data: + # Filter out all the dates. + dates = dict(filter(lambda element: date_util.is_date(element[0]), item.items())) - # Make location history from dates. - history = {date: int(amount or 0) for date, amount in dates.items()} + # Make location history from dates. + history = {date: int(amount or 0) for date, amount in dates.items()} - # Country for this location. - country = item["Country/Region"] + # Country for this location. + country = item["Country/Region"] - # Latest data insert value. - latest = list(history.values())[-1] + # Latest data insert value. + latest = list(history.values())[-1] + + # Normalize the item and append to locations. + locations.append( + { + # General info. + "country": country, + "country_code": countries.country_code(country), + "province": item["Province/State"], + # Coordinates. + "coordinates": {"lat": item["Lat"], "long": item["Long"],}, + # History. + "history": history, + # Latest statistic. + "latest": int(latest or 0), + } + ) + LOGGER.debug(f"{data_id} Data normalized") + + # Latest total. + latest = sum(map(lambda location: location["latest"], locations)) + + # Return the final data. + results = { + "locations": locations, + "latest": latest, + "last_updated": datetime.utcnow().isoformat() + "Z", + "source": "https://github.com/ExpDev07/coronavirus-tracker-api", + } + # save the results to distributed cache + await load_cache(data_id, results) - # Normalize the item and append to locations. - locations.append( - { - # General info. - "country": country, - "country_code": countries.country_code(country), - "province": item["Province/State"], - # Coordinates. - "coordinates": {"lat": item["Lat"], "long": item["Long"],}, - # History. - "history": history, - # Latest statistic. - "latest": int(latest or 0), - } - ) - LOGGER.debug(f"{data_id} Data normalized") - - # Latest total. - latest = sum(map(lambda location: location["latest"], locations)) - - # Return the final data. - results = { - "locations": locations, - "latest": latest, - "last_updated": datetime.utcnow().isoformat() + "Z", - "source": "https://github.com/ExpDev07/coronavirus-tracker-api", - } LOGGER.info(f"{data_id} results:\n{pf(results, depth=1)}") return results -@cached(cache=TTLCache(maxsize=1024, ttl=3600)) +@cached(cache=TTLCache(maxsize=1024, ttl=1800)) async def get_locations(): """ Retrieves the locations from the categories. The locations are cached for 1 hour. diff --git a/pylintrc b/pylintrc deleted file mode 100644 index af114a33..00000000 --- a/pylintrc +++ /dev/null @@ -1,582 +0,0 @@ -[MASTER] - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code. -extension-pkg-whitelist=pydantic - -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=CVS - -# Add files or directories matching the regex patterns to the blacklist. The -# regex matches against base names, not paths. -ignore-patterns= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the -# number of processors available to use. -jobs=1 - -# Control the amount of potential inferred values when inferring a single -# object. This can help the performance when dealing with large functions or -# complex, nested conditions. -limit-inference-results=100 - -# List of plugins (as comma separated values of python module names) to load, -# usually to register additional checkers. -load-plugins= - -# Pickle collected data for later comparisons. -persistent=yes - -# Specify a configuration file. -#rcfile= - -# When enabled, pylint would attempt to guess common misconfiguration and emit -# user-friendly hints instead of false-positive error messages. -suggestion-mode=yes - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use "--disable=all --enable=classes -# --disable=W". -disable=print-statement, - parameter-unpacking, - unpacking-in-except, - old-raise-syntax, - backtick, - long-suffix, - old-ne-operator, - old-octal-literal, - import-star-module-level, - non-ascii-bytes-literal, - raw-checker-failed, - bad-inline-option, - locally-disabled, - file-ignored, - suppressed-message, - useless-suppression, - deprecated-pragma, - use-symbolic-message-instead, - apply-builtin, - basestring-builtin, - buffer-builtin, - cmp-builtin, - coerce-builtin, - execfile-builtin, - file-builtin, - long-builtin, - raw_input-builtin, - reduce-builtin, - standarderror-builtin, - unicode-builtin, - xrange-builtin, - coerce-method, - delslice-method, - getslice-method, - setslice-method, - no-absolute-import, - old-division, - dict-iter-method, - dict-view-method, - next-method-called, - metaclass-assignment, - indexing-exception, - raising-string, - reload-builtin, - oct-method, - hex-method, - nonzero-method, - cmp-method, - input-builtin, - round-builtin, - intern-builtin, - unichr-builtin, - map-builtin-not-iterating, - zip-builtin-not-iterating, - range-builtin-not-iterating, - filter-builtin-not-iterating, - using-cmp-argument, - eq-without-hash, - div-method, - idiv-method, - rdiv-method, - exception-message-attribute, - invalid-str-codec, - sys-max-int, - bad-python3-import, - deprecated-string-function, - deprecated-str-translate-call, - deprecated-itertools-function, - deprecated-types-field, - next-method-defined, - dict-items-not-iterating, - dict-keys-not-iterating, - dict-values-not-iterating, - deprecated-operator-function, - deprecated-urllib-function, - xreadlines-attribute, - deprecated-sys-function, - exception-escape, - comprehension-escape, - bad-continuation, # conflicts with black - duplicate-code # turn back on ASAP - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member - - -[REPORTS] - -# Python expression which should return a score less than or equal to 10. You -# have access to the variables 'error', 'warning', 'refactor', and 'convention' -# which contain the number of messages in each category, as well as 'statement' -# which is the total number of statements analyzed. This score is used by the -# global evaluation report (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages. -reports=no - -# Activate the evaluation score. -score=yes - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=sys.exit - - -[BASIC] - -# Naming style matching correct argument names. -argument-naming-style=snake_case - -# Regular expression matching correct argument names. Overrides argument- -# naming-style. -#argument-rgx= - -# Naming style matching correct attribute names. -attr-naming-style=snake_case - -# Regular expression matching correct attribute names. Overrides attr-naming- -# style. -#attr-rgx= - -# Bad variable names which should always be refused, separated by a comma. -bad-names=foo, - bar, - baz, - toto, - tutu, - tata - -# Naming style matching correct class attribute names. -class-attribute-naming-style=any - -# Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style. -#class-attribute-rgx= - -# Naming style matching correct class names. -class-naming-style=PascalCase - -# Regular expression matching correct class names. Overrides class-naming- -# style. -#class-rgx= - -# Naming style matching correct constant names. -const-naming-style=UPPER_CASE - -# Regular expression matching correct constant names. Overrides const-naming- -# style. -#const-rgx= - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming style matching correct function names. -function-naming-style=snake_case - -# Regular expression matching correct function names. Overrides function- -# naming-style. -#function-rgx= - -# Good variable names which should always be accepted, separated by a comma. -good-names=i, - j, - k, - ex, - Run, - _ - -# Include a hint for the correct naming format with invalid-name. -include-naming-hint=no - -# Naming style matching correct inline iteration names. -inlinevar-naming-style=any - -# Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style. -#inlinevar-rgx= - -# Naming style matching correct method names. -method-naming-style=snake_case - -# Regular expression matching correct method names. Overrides method-naming- -# style. -#method-rgx= - -# Naming style matching correct module names. -module-naming-style=snake_case - -# Regular expression matching correct module names. Overrides module-naming- -# style. -#module-rgx= - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -# These decorators are taken in consideration only for invalid-name. -property-classes=abc.abstractproperty - -# Naming style matching correct variable names. -variable-naming-style=snake_case - -# Regular expression matching correct variable names. Overrides variable- -# naming-style. -#variable-rgx= - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=120 # matches black setting - -# Maximum number of lines in a module. -max-module-lines=1000 - -# List of optional constructs for which whitespace checking is disabled. `dict- -# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. -# `trailing-comma` allows a space between comma and closing bracket: (a, ). -# `empty-line` allows space-only lines. -no-space-check=trailing-comma, - dict-separator - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[LOGGING] - -# Format style used to check logging format string. `old` means using % -# formatting, `new` is for `{}` formatting,and `fstr` is for f-strings. -logging-format-style=fstr - -# Logging modules to check that the string format arguments are in logging -# function parameter format. -logging-modules=logging - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX - - -[SIMILARITIES] - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=no - -# Minimum lines number of a similarity. -min-similarity-lines=4 - - -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes. -max-spelling-suggestions=4 - -# Spelling dictionary name. Available dictionaries: none. To make it work, -# install the python-enchant package. -spelling-dict= - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains the private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to the private dictionary (see the -# --spelling-private-dict-file option) instead of raising a message. -spelling-store-unknown-words=no - - -[STRING] - -# This flag controls whether the implicit-str-concat-in-sequence should -# generate a warning on implicit string concatenation in sequences defined over -# several lines. -check-str-concat-over-line-jumps=no - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# Tells whether to warn about missing members when the owner of the attribute -# is inferred to be None. -ignore-none=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis). It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - -# List of decorators that change the signature of a decorated function. -signature-mutators= - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid defining new builtins when possible. -additional-builtins= - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb - -# A regular expression matching the name of dummy variables (i.e. expected to -# not be used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore. -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io - - -[CLASSES] - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp, - __post_init__ - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=cls - - -[DESIGN] - -# Maximum number of arguments for function / method. -max-args=5 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Maximum number of boolean expressions in an if statement (see R0916). -max-bool-expr=5 - -# Maximum number of branch for function / method body. -max-branches=12 - -# Maximum number of locals for function / method body. -max-locals=15 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of return / yield for function / method body. -max-returns=6 - -# Maximum number of statements in function / method body. -max-statements=50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - - -[IMPORTS] - -# List of modules that can be imported at any level, not just the top level -# one. -allow-any-import-level= - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma. -deprecated-modules=optparse,tkinter.tix - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled). -ext-import-graph= - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled). -import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled). -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - -# Couples of modules and preferred modules, separated by a comma. -preferred-modules= - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "BaseException, Exception". -overgeneral-exceptions=BaseException, - Exception diff --git a/pyproject.toml b/pyproject.toml index f1226541..b6bc6af6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,3 +24,39 @@ include_trailing_comma = "True" force_grid_wrap = 0 use_parentheses = "True" line_length = 120 + +[tool.pylint.master] +extension-pkg-whitelist = "pydantic" +ignore = "CVS" +suggestion-mode = "yes" +[tool.pylint.messages_control] +disable = ''' +duplicate-code, +line-too-long, +logging-fstring-interpolation, +bad-continuation, +''' +[tool.pylint.logging] +logging-modules = "logging" +[tool.pylint.imports] +allow-wildcard-with-all = "no" +[tool.pylint.format] +indent-after-paren = "4" +max-line-length = "120" # matches black setting +max-module-lines = "800" +no-space-check = ''' +trailing-comma, +dict-separator +''' +single-line-class-stmt = "no" +single-line-if-stmt = "no" +[tool.pylint.miscellaneous] +notes= ''' +FIXME, +XXX +''' +[tool.pylint.similarities] +ignore-comments = "yes" +ignore-docstrings = "yes" +ignore-imports = "no" +min-similarity-lines = "4" diff --git a/requirements-dev.txt b/requirements-dev.txt index 374fb37c..7809162c 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,6 +1,6 @@ -i https://pypi.org/simple appdirs==1.4.3 -astroid==2.3.3 +astroid==2.4.0 async-asgi-testclient==1.4.4 async-generator==1.10 asyncmock==0.4.2 @@ -9,7 +9,7 @@ bandit==1.6.2 black==19.10b0 certifi==2020.4.5.1 chardet==3.0.4 -click==7.1.1 +click==7.1.2 coverage==5.1 coveralls==2.0.0 docopt==0.6.2 @@ -29,15 +29,15 @@ pathspec==0.8.0 pbr==5.4.5 pluggy==0.13.1 py==1.8.1 -pylint==2.4.4 +pylint==2.5.0 pyparsing==2.4.7 -pytest-asyncio==0.10.0 +pytest-asyncio==0.11.0 pytest-cov==2.8.1 pytest==5.4.1 pyyaml==5.3.1 regex==2020.4.4 requests==2.23.0 -responses==0.10.12 +responses==0.10.14 six==1.14.0 smmap==3.0.2 stevedore==1.32.0 @@ -45,5 +45,5 @@ toml==0.10.0 typed-ast==1.4.1 urllib3==1.25.9 wcwidth==0.1.9 -wrapt==1.11.2 +wrapt==1.12.1 zipp==3.1.0 diff --git a/requirements.txt b/requirements.txt index bb9302b1..8e0f2ff3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,28 +1,32 @@ -i https://pypi.org/simple +aiocache[redis]==0.11.1 +aiofiles==0.5.0 aiohttp==3.6.2 +aioredis==1.3.1 async-timeout==3.0.1 asyncache==0.1.1 attrs==19.3.0 cachetools==4.1.0 certifi==2020.4.5.1 chardet==3.0.4 -click==7.1.1 +click==7.1.2 dataclasses==0.6 ; python_version < '3.7' fastapi==0.54.1 gunicorn==20.0.4 h11==0.9.0 +hiredis==1.0.1 httptools==0.1.1 ; sys_platform != 'win32' and sys_platform != 'cygwin' and platform_python_implementation != 'PyPy' idna-ssl==1.1.0 ; python_version < '3.7' idna==2.9 multidict==4.7.5 -pydantic==1.5 +pydantic[dotenv]==1.5.1 python-dateutil==2.8.1 python-dotenv==0.13.0 requests==2.23.0 six==1.14.0 starlette==0.13.2 urllib3==1.25.9 -uvicorn==0.11.3 +uvicorn==0.11.5 uvloop==0.14.0 ; sys_platform != 'win32' and sys_platform != 'cygwin' and platform_python_implementation != 'PyPy' websockets==8.1 yarl==1.4.2 diff --git a/tasks.py b/tasks.py index 06a52486..ae1f09cd 100644 --- a/tasks.py +++ b/tasks.py @@ -46,6 +46,7 @@ def check(ctx, fmt=False, sort=False, diff=False): # pylint: disable=redefined- fmt_args.append("--diff") sort_args.append("--diff") + # FIXME: run each command and check return code cmd_args = [] if fmt: cmd_args.extend(fmt_args) diff --git a/tests/test_io.py b/tests/test_io.py index 83639cc9..c5d16c3a 100644 --- a/tests/test_io.py +++ b/tests/test_io.py @@ -5,8 +5,7 @@ import app.io - -@pytest.mark.parametrize( +IO_PARAMS = ( "name, content, kwargs", [ ("test_file.txt", string.ascii_lowercase, {}), @@ -14,6 +13,9 @@ ("test_custom_json.json", {"z": -1, "b": 1, "y": -2, "a": 0}, {"indent": 4, "sort_keys": True}), ], ) + + +@pytest.mark.parametrize(*IO_PARAMS) def test_save(tmp_path, name, content, kwargs): test_path = tmp_path / name assert not test_path.exists() @@ -23,17 +25,32 @@ def test_save(tmp_path, name, content, kwargs): assert test_path.exists() -@pytest.mark.parametrize( - "name, content, kwargs", - [ - ("test_file.txt", string.ascii_lowercase, {}), - ("test_json_file.json", {"a": 0, "b": 1, "c": 2}, {}), - ("test_custom_json.json", {"z": -1, "b": 1, "y": -2, "a": 0}, {"indent": 4, "sort_keys": True}), - ], -) +@pytest.mark.parametrize(*IO_PARAMS) def test_round_trip(tmp_path, name, content, kwargs): test_path = tmp_path / name assert not test_path.exists() app.io.save(test_path, content, **kwargs) assert app.io.load(test_path) == content + + +@pytest.mark.asyncio +@pytest.mark.parametrize(*IO_PARAMS) +async def test_async_save(tmp_path, name, content, kwargs): + test_path = tmp_path / name + assert not test_path.exists() + + result = await app.io.AIO.save(test_path, content, **kwargs) + assert result == test_path + assert test_path.exists() + + +@pytest.mark.asyncio +@pytest.mark.parametrize(*IO_PARAMS) +async def test_async_round_trip(tmp_path, name, content, kwargs): + test_path = tmp_path / name + assert not test_path.exists() + + await app.io.AIO.save(test_path, content, **kwargs) + load_results = await app.io.AIO.load(test_path) + assert load_results == content