diff --git a/.all-contributorsrc b/.all-contributorsrc
index 0f568cbd..bbf06353 100644
--- a/.all-contributorsrc
+++ b/.all-contributorsrc
@@ -153,6 +153,15 @@
"contributions": [
"code"
]
+ },
+ {
+ "login": "ibhuiyan17",
+ "name": "Ibtida Bhuiyan",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/33792969?v=4",
+ "profile": "http://ibtida.me",
+ "contributions": [
+ "code"
+ ]
}
],
"contributorsPerLine": 7,
diff --git a/.gitignore b/.gitignore
index efd5545c..9c41818c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -66,4 +66,7 @@ docs/_build/
target/
# OSX Stuff
-.DS_Store
\ No newline at end of file
+.DS_Store
+
+# IntelliJ/Pycharm
+.idea/
diff --git a/.travis.yml b/.travis.yml
index 39e16fce..3ea52aa9 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -7,4 +7,8 @@ install:
- "pip install pipenv"
- "pipenv install --dev --skip-lock"
script:
- - "make test lint check-fmt"
+ - "make test"
+ - "make lint"
+ - "make check-fmt"
+after_success:
+ - coveralls
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 00000000..1fde2f33
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,35 @@
+# Contribution to Coronavirus Tracker API
+
+First off, thanks for taking the time to contribute!
+Every commit supports the open source ecosystem in case of [COVID-19](https://en.wikipedia.org/wiki/2019%E2%80%9320_coronavirus_pandemic).
+
+## Testing
+
+We have a handful of unit tests to cover most of functions.
+Please write new test cases for new code you create.
+
+## Submitting changes
+
+* If you're unable to find an open issue, [open a new one](https://github.com/ExpDev07/coronavirus-tracker-api/issues/new). Be sure to include a **title and clear description**, as much relevant information as possible
+* Open a new [GitHub Pull Request to coronavirus-tracker-api](https://github.com/ExpDev07/coronavirus-tracker-api/pulls) with a clear list of what you've done (read more about [pull requests](http://help.github.com/pull-requests/)). Include the relevant issue number if applicable.
+* We will love you forever if you include unit tests. We can always use more test coverage
+
+## Your First Code Contribution
+
+Unsure where to begin contributing to coronavirus-tracker-api ? You can start by looking through these issues labels:
+
+* [Enhancement issues](https://github.com/ExpDev07/coronavirus-tracker-api/labels/enhancement) - issues for new feature or request
+* [Help wanted issues](https://github.com/ExpDev07/coronavirus-tracker-api/labels/help%20wanted) - extra attention is needed
+* [Documentation issues](https://github.com/ExpDev07/coronavirus-tracker-api/labels/documentation) - improvements or additions to documentation
+
+## Styleguide
+
+Please follow [PEP8](https://www.python.org/dev/peps/pep-0008/) guide.
+See [Running Test](./README.md#running-tests), [Linting](./README.md#linting) and [Formatting](./README.md#formatting) sections for further instructions to validate your change.
+
+
+We encourage you to pitch in and join the [Coronavirus Tracker API Team](https://github.com/ExpDev07/coronavirus-tracker-api#contributors-)!
+
+Thanks! :heart: :heart: :heart:
+
+[Coronavirus Tracker API Team](https://github.com/ExpDev07/coronavirus-tracker-api#contributors-)
diff --git a/Makefile b/Makefile
index 78f8f6b4..311b6bc4 100644
--- a/Makefile
+++ b/Makefile
@@ -15,15 +15,12 @@ APP = app
TEST = tests
test:
- $(PYTHON) `which py.test` -s -v $(TEST)
-
+ pytest -v $(TEST) --cov-report term --cov-report xml --cov=$(APP)
lint:
- pylint $(APP) || true
+ pylint $(APP)
fmt:
- isort --apply --atomic
- black . -l 120
+ invoke fmt
check-fmt:
- isort -rc --check
- black . --check --diff
+ invoke check --fmt --sort
diff --git a/Pipfile b/Pipfile
index 6b30ef90..b337c22a 100644
--- a/Pipfile
+++ b/Pipfile
@@ -4,21 +4,31 @@ url = "https://pypi.org/simple"
verify_ssl = true
[dev-packages]
+async-asgi-testclient = "*"
+async_generator = "*"
+asyncmock = "*"
bandit = "*"
black = "==19.10b0"
+coveralls = "*"
+importlib-metadata = {version="*", markers="python_version<'3.8'"}
+invoke = "*"
isort = "*"
-pytest = "*"
pylint = "*"
+pytest = "*"
+pytest-asyncio = "*"
+pytest-cov = "*"
[packages]
+aiohttp = "*"
+asyncache = "*"
+cachetools = "*"
+dataclasses = {version="*", markers="python_version<'3.7'"}
fastapi = "*"
-flask = "*"
-python-dotenv = "*"
-requests = "*"
gunicorn = "*"
-flask-cors = "*"
-cachetools = "*"
+idna_ssl = {version="*", markers="python_version<'3.7'"}
python-dateutil = "*"
+python-dotenv = "*"
+requests = "*"
uvicorn = "*"
[requires]
@@ -27,5 +37,7 @@ python_version = "3.8"
[scripts]
dev = "uvicorn app.main:APP --reload"
start = "uvicorn app.main:APP"
-fmt = "black . -l 120"
-sort = "isort --apply --atomic"
+fmt = "invoke fmt"
+sort = "invoke sort"
+lint = "invoke lint"
+test = "invoke test"
diff --git a/Pipfile.lock b/Pipfile.lock
index c949f6cb..a699f880 100644
--- a/Pipfile.lock
+++ b/Pipfile.lock
@@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
- "sha256": "ecd83aad2c3783fdaa5581f562d022a6b500b3f3b4beb7c3f63d3d5baff85813"
+ "sha256": "1911b081cecdda482b2a9c7c03ebba985c447846506b607df01563600c23126b"
},
"pipfile-spec": 6,
"requires": {
@@ -16,6 +16,45 @@
]
},
"default": {
+ "aiohttp": {
+ "hashes": [
+ "sha256:1e984191d1ec186881ffaed4581092ba04f7c61582a177b187d3a2f07ed9719e",
+ "sha256:259ab809ff0727d0e834ac5e8a283dc5e3e0ecc30c4d80b3cd17a4139ce1f326",
+ "sha256:2f4d1a4fdce595c947162333353d4a44952a724fba9ca3205a3df99a33d1307a",
+ "sha256:32e5f3b7e511aa850829fbe5aa32eb455e5534eaa4b1ce93231d00e2f76e5654",
+ "sha256:344c780466b73095a72c616fac5ea9c4665add7fc129f285fbdbca3cccf4612a",
+ "sha256:460bd4237d2dbecc3b5ed57e122992f60188afe46e7319116da5eb8a9dfedba4",
+ "sha256:4c6efd824d44ae697814a2a85604d8e992b875462c6655da161ff18fd4f29f17",
+ "sha256:50aaad128e6ac62e7bf7bd1f0c0a24bc968a0c0590a726d5a955af193544bcec",
+ "sha256:6206a135d072f88da3e71cc501c59d5abffa9d0bb43269a6dcd28d66bfafdbdd",
+ "sha256:65f31b622af739a802ca6fd1a3076fd0ae523f8485c52924a89561ba10c49b48",
+ "sha256:ae55bac364c405caa23a4f2d6cfecc6a0daada500274ffca4a9230e7129eac59",
+ "sha256:b778ce0c909a2653741cb4b1ac7015b5c130ab9c897611df43ae6a58523cb965"
+ ],
+ "index": "pypi",
+ "version": "==3.6.2"
+ },
+ "async-timeout": {
+ "hashes": [
+ "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f",
+ "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3"
+ ],
+ "version": "==3.0.1"
+ },
+ "asyncache": {
+ "hashes": [
+ "sha256:c741b3ccef2c5291b3da05d97bab3cc8d50f2ac8efd7fd79d47e3d7b6a3774de"
+ ],
+ "index": "pypi",
+ "version": "==0.1.1"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c",
+ "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"
+ ],
+ "version": "==19.3.0"
+ },
"cachetools": {
"hashes": [
"sha256:9a52dd97a85f257f4e4127f15818e71a0c7899f121b34591fcc1173ea79a0198",
@@ -45,29 +84,22 @@
],
"version": "==7.1.1"
},
- "fastapi": {
- "hashes": [
- "sha256:532648b4e16dd33673d71dc0b35dff1b4d20c709d04078010e258b9f3a79771a",
- "sha256:721b11d8ffde52c669f52741b6d9d761fe2e98778586f4cfd6f5e47254ba5016"
- ],
- "index": "pypi",
- "version": "==0.52.0"
- },
- "flask": {
+ "dataclasses": {
"hashes": [
- "sha256:13f9f196f330c7c2c5d7a5cf91af894110ca0215ac051b5844701f2bfd934d52",
- "sha256:45eb5a6fd193d6cf7e0cf5d8a5b31f83d5faae0293695626f539a823e93b13f6"
+ "sha256:454a69d788c7fda44efd71e259be79577822f5e3f53f029a22d08004e951dc9f",
+ "sha256:6988bd2b895eef432d562370bb707d540f32f7360ab13da45340101bc2307d84"
],
"index": "pypi",
- "version": "==1.1.1"
+ "markers": "python_version < '3.7'",
+ "version": "==0.6"
},
- "flask-cors": {
+ "fastapi": {
"hashes": [
- "sha256:72170423eb4612f0847318afff8c247b38bd516b7737adfc10d1c2cdbb382d16",
- "sha256:f4d97201660e6bbcff2d89d082b5b6d31abee04b1b3003ee073a6fd25ad1d69a"
+ "sha256:a5cb9100d5f2b5dd82addbc2cdf8009258bce45b03ba21d3f5eecc88c7b5a716",
+ "sha256:cf26d47ede6bc6e179df951312f55fea7d4005dd53370245e216436ca4e22f22"
],
"index": "pypi",
- "version": "==3.0.8"
+ "version": "==0.53.2"
},
"gunicorn": {
"hashes": [
@@ -109,57 +141,35 @@
],
"version": "==2.9"
},
- "itsdangerous": {
+ "idna-ssl": {
"hashes": [
- "sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19",
- "sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749"
+ "sha256:a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c"
],
+ "index": "pypi",
+ "markers": "python_version < '3.7'",
"version": "==1.1.0"
},
- "jinja2": {
- "hashes": [
- "sha256:93187ffbc7808079673ef52771baa950426fd664d3aad1d0fa3e95644360e250",
- "sha256:b0eaf100007721b5c16c1fc1eecb87409464edc10469ddc9a22a27a99123be49"
- ],
- "version": "==2.11.1"
- },
- "markupsafe": {
- "hashes": [
- "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473",
- "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161",
- "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235",
- "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5",
- "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42",
- "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff",
- "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b",
- "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1",
- "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e",
- "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183",
- "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66",
- "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b",
- "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1",
- "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15",
- "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1",
- "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e",
- "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b",
- "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905",
- "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735",
- "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d",
- "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e",
- "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d",
- "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c",
- "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21",
- "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2",
- "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5",
- "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b",
- "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6",
- "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f",
- "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f",
- "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2",
- "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7",
- "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"
- ],
- "version": "==1.1.1"
+ "multidict": {
+ "hashes": [
+ "sha256:317f96bc0950d249e96d8d29ab556d01dd38888fbe68324f46fd834b430169f1",
+ "sha256:42f56542166040b4474c0c608ed051732033cd821126493cf25b6c276df7dd35",
+ "sha256:4b7df040fb5fe826d689204f9b544af469593fb3ff3a069a6ad3409f742f5928",
+ "sha256:544fae9261232a97102e27a926019100a9db75bec7b37feedd74b3aa82f29969",
+ "sha256:620b37c3fea181dab09267cd5a84b0f23fa043beb8bc50d8474dd9694de1fa6e",
+ "sha256:6e6fef114741c4d7ca46da8449038ec8b1e880bbe68674c01ceeb1ac8a648e78",
+ "sha256:7774e9f6c9af3f12f296131453f7b81dabb7ebdb948483362f5afcaac8a826f1",
+ "sha256:85cb26c38c96f76b7ff38b86c9d560dea10cf3459bb5f4caf72fc1bb932c7136",
+ "sha256:a326f4240123a2ac66bb163eeba99578e9d63a8654a59f4688a79198f9aa10f8",
+ "sha256:ae402f43604e3b2bc41e8ea8b8526c7fa7139ed76b0d64fc48e28125925275b2",
+ "sha256:aee283c49601fa4c13adc64c09c978838a7e812f85377ae130a24d7198c0331e",
+ "sha256:b51249fdd2923739cd3efc95a3d6c363b67bbf779208e9f37fd5e68540d1a4d4",
+ "sha256:bb519becc46275c594410c6c28a8a0adc66fe24fef154a9addea54c1adb006f5",
+ "sha256:c2c37185fb0af79d5c117b8d2764f4321eeb12ba8c141a95d0aa8c2c1d0a11dd",
+ "sha256:dc561313279f9d05a3d0ffa89cd15ae477528ea37aa9795c4654588a3287a9ab",
+ "sha256:e439c9a10a95cb32abd708bb8be83b2134fa93790a4fb0535ca36db3dda94d20",
+ "sha256:fc3b4adc2ee8474cb3cd2a155305d5f8eda0a9c91320f83e55748e1fcb68f8e3"
+ ],
+ "version": "==4.7.5"
},
"pydantic": {
"hashes": [
@@ -275,12 +285,27 @@
],
"version": "==8.1"
},
- "werkzeug": {
- "hashes": [
- "sha256:169ba8a33788476292d04186ab33b01d6add475033dfc07215e6d219cc077096",
- "sha256:6dc65cf9091cf750012f56f2cad759fa9e879f511b5ff8685e456b4e3bf90d16"
- ],
- "version": "==1.0.0"
+ "yarl": {
+ "hashes": [
+ "sha256:0c2ab325d33f1b824734b3ef51d4d54a54e0e7a23d13b86974507602334c2cce",
+ "sha256:0ca2f395591bbd85ddd50a82eb1fde9c1066fafe888c5c7cc1d810cf03fd3cc6",
+ "sha256:2098a4b4b9d75ee352807a95cdf5f10180db903bc5b7270715c6bbe2551f64ce",
+ "sha256:25e66e5e2007c7a39541ca13b559cd8ebc2ad8fe00ea94a2aad28a9b1e44e5ae",
+ "sha256:26d7c90cb04dee1665282a5d1a998defc1a9e012fdca0f33396f81508f49696d",
+ "sha256:308b98b0c8cd1dfef1a0311dc5e38ae8f9b58349226aa0533f15a16717ad702f",
+ "sha256:3ce3d4f7c6b69c4e4f0704b32eca8123b9c58ae91af740481aa57d7857b5e41b",
+ "sha256:58cd9c469eced558cd81aa3f484b2924e8897049e06889e8ff2510435b7ef74b",
+ "sha256:5b10eb0e7f044cf0b035112446b26a3a2946bca9d7d7edb5e54a2ad2f6652abb",
+ "sha256:6faa19d3824c21bcbfdfce5171e193c8b4ddafdf0ac3f129ccf0cdfcb083e462",
+ "sha256:944494be42fa630134bf907714d40207e646fd5a94423c90d5b514f7b0713fea",
+ "sha256:a161de7e50224e8e3de6e184707476b5a989037dcb24292b391a3d66ff158e70",
+ "sha256:a4844ebb2be14768f7994f2017f70aca39d658a96c786211be5ddbe1c68794c1",
+ "sha256:c2b509ac3d4b988ae8769901c66345425e361d518aecbe4acbfc2567e416626a",
+ "sha256:c9959d49a77b0e07559e579f38b2f3711c2b8716b8410b320bf9713013215a1b",
+ "sha256:d8cdee92bc930d8b09d8bd2043cedd544d9c8bd7436a77678dd602467a993080",
+ "sha256:e15199cdb423316e15f108f51249e44eb156ae5dba232cb73be555324a1d49c2"
+ ],
+ "version": "==1.4.2"
}
},
"develop": {
@@ -298,6 +323,29 @@
],
"version": "==2.3.3"
},
+ "async-asgi-testclient": {
+ "hashes": [
+ "sha256:e961c61123eca6dc30c4f67df7fe8a3f695ca9c8b013d97272b930d6d5af4509"
+ ],
+ "index": "pypi",
+ "version": "==1.4.4"
+ },
+ "async-generator": {
+ "hashes": [
+ "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b",
+ "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"
+ ],
+ "index": "pypi",
+ "version": "==1.10"
+ },
+ "asyncmock": {
+ "hashes": [
+ "sha256:c251889d542e98fe5f7ece2b5b8643b7d62b50a5657d34a4cbce8a1d5170d750",
+ "sha256:fd8bc4e7813251a8959d1140924ccba3adbbc7af885dba7047c67f73c0b664b1"
+ ],
+ "index": "pypi",
+ "version": "==0.4.2"
+ },
"attrs": {
"hashes": [
"sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c",
@@ -321,6 +369,20 @@
"index": "pypi",
"version": "==19.10b0"
},
+ "certifi": {
+ "hashes": [
+ "sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3",
+ "sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f"
+ ],
+ "version": "==2019.11.28"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
"click": {
"hashes": [
"sha256:8a18b4ea89d8820c5d0c7da8a64b2c324b4dabb695804dbfea19b9be9d88c0cc",
@@ -328,6 +390,56 @@
],
"version": "==7.1.1"
},
+ "coverage": {
+ "hashes": [
+ "sha256:03f630aba2b9b0d69871c2e8d23a69b7fe94a1e2f5f10df5049c0df99db639a0",
+ "sha256:046a1a742e66d065d16fb564a26c2a15867f17695e7f3d358d7b1ad8a61bca30",
+ "sha256:0a907199566269e1cfa304325cc3b45c72ae341fbb3253ddde19fa820ded7a8b",
+ "sha256:165a48268bfb5a77e2d9dbb80de7ea917332a79c7adb747bd005b3a07ff8caf0",
+ "sha256:1b60a95fc995649464e0cd48cecc8288bac5f4198f21d04b8229dc4097d76823",
+ "sha256:1f66cf263ec77af5b8fe14ef14c5e46e2eb4a795ac495ad7c03adc72ae43fafe",
+ "sha256:2e08c32cbede4a29e2a701822291ae2bc9b5220a971bba9d1e7615312efd3037",
+ "sha256:3844c3dab800ca8536f75ae89f3cf566848a3eb2af4d9f7b1103b4f4f7a5dad6",
+ "sha256:408ce64078398b2ee2ec08199ea3fcf382828d2f8a19c5a5ba2946fe5ddc6c31",
+ "sha256:443be7602c790960b9514567917af538cac7807a7c0c0727c4d2bbd4014920fd",
+ "sha256:4482f69e0701139d0f2c44f3c395d1d1d37abd81bfafbf9b6efbe2542679d892",
+ "sha256:4a8a259bf990044351baf69d3b23e575699dd60b18460c71e81dc565f5819ac1",
+ "sha256:513e6526e0082c59a984448f4104c9bf346c2da9961779ede1fc458e8e8a1f78",
+ "sha256:5f587dfd83cb669933186661a351ad6fc7166273bc3e3a1531ec5c783d997aac",
+ "sha256:62061e87071497951155cbccee487980524d7abea647a1b2a6eb6b9647df9006",
+ "sha256:641e329e7f2c01531c45c687efcec8aeca2a78a4ff26d49184dce3d53fc35014",
+ "sha256:65a7e00c00472cd0f59ae09d2fb8a8aaae7f4a0cf54b2b74f3138d9f9ceb9cb2",
+ "sha256:6ad6ca45e9e92c05295f638e78cd42bfaaf8ee07878c9ed73e93190b26c125f7",
+ "sha256:73aa6e86034dad9f00f4bbf5a666a889d17d79db73bc5af04abd6c20a014d9c8",
+ "sha256:7c9762f80a25d8d0e4ab3cb1af5d9dffbddb3ee5d21c43e3474c84bf5ff941f7",
+ "sha256:85596aa5d9aac1bf39fe39d9fa1051b0f00823982a1de5766e35d495b4a36ca9",
+ "sha256:86a0ea78fd851b313b2e712266f663e13b6bc78c2fb260b079e8b67d970474b1",
+ "sha256:8a620767b8209f3446197c0e29ba895d75a1e272a36af0786ec70fe7834e4307",
+ "sha256:922fb9ef2c67c3ab20e22948dcfd783397e4c043a5c5fa5ff5e9df5529074b0a",
+ "sha256:9fad78c13e71546a76c2f8789623eec8e499f8d2d799f4b4547162ce0a4df435",
+ "sha256:a37c6233b28e5bc340054cf6170e7090a4e85069513320275a4dc929144dccf0",
+ "sha256:c3fc325ce4cbf902d05a80daa47b645d07e796a80682c1c5800d6ac5045193e5",
+ "sha256:cda33311cb9fb9323958a69499a667bd728a39a7aa4718d7622597a44c4f1441",
+ "sha256:db1d4e38c9b15be1521722e946ee24f6db95b189d1447fa9ff18dd16ba89f732",
+ "sha256:eda55e6e9ea258f5e4add23bcf33dc53b2c319e70806e180aecbff8d90ea24de",
+ "sha256:f372cdbb240e09ee855735b9d85e7f50730dcfb6296b74b95a3e5dea0615c4c1"
+ ],
+ "version": "==5.0.4"
+ },
+ "coveralls": {
+ "hashes": [
+ "sha256:4b6bfc2a2a77b890f556bc631e35ba1ac21193c356393b66c84465c06218e135",
+ "sha256:67188c7ec630c5f708c31552f2bcdac4580e172219897c4136504f14b823132f"
+ ],
+ "index": "pypi",
+ "version": "==1.11.1"
+ },
+ "docopt": {
+ "hashes": [
+ "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"
+ ],
+ "version": "==0.6.2"
+ },
"gitdb": {
"hashes": [
"sha256:284a6a4554f954d6e737cddcff946404393e030b76a282c6640df8efd6b3da5e",
@@ -342,6 +454,31 @@
],
"version": "==3.1.0"
},
+ "idna": {
+ "hashes": [
+ "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb",
+ "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa"
+ ],
+ "version": "==2.9"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:2a688cbaa90e0cc587f1df48bdc97a6eadccdcd9c35fb3f976a09e3b5016d90f",
+ "sha256:34513a8a0c4962bc66d35b359558fd8a5e10cd472d37aec5f66858addef32c1e"
+ ],
+ "index": "pypi",
+ "markers": "python_version < '3.8'",
+ "version": "==1.6.0"
+ },
+ "invoke": {
+ "hashes": [
+ "sha256:87b3ef9d72a1667e104f89b159eaf8a514dbf2f3576885b2bbdefe74c3fb2132",
+ "sha256:93e12876d88130c8e0d7fd6618dd5387d6b36da55ad541481dfa5e001656f134",
+ "sha256:de3f23bfe669e3db1085789fd859eb8ca8e0c5d9c20811e2407fa042e8a5e15d"
+ ],
+ "index": "pypi",
+ "version": "==1.4.1"
+ },
"isort": {
"hashes": [
"sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
@@ -383,6 +520,13 @@
],
"version": "==0.6.1"
},
+ "mock": {
+ "hashes": [
+ "sha256:3f9b2c0196c60d21838f307f5825a7b86b678cedc58ab9e50a8988187b4d81e0",
+ "sha256:dd33eb70232b6118298d516bbcecd26704689c386594f0f3c4f13867b2c56f72"
+ ],
+ "version": "==4.0.2"
+ },
"more-itertools": {
"hashes": [
"sha256:5dd8bcf33e5f9513ffa06d5ad33d78f31e1931ac9a18f33d37e77a180d393a7c",
@@ -390,6 +534,28 @@
],
"version": "==8.2.0"
},
+ "multidict": {
+ "hashes": [
+ "sha256:317f96bc0950d249e96d8d29ab556d01dd38888fbe68324f46fd834b430169f1",
+ "sha256:42f56542166040b4474c0c608ed051732033cd821126493cf25b6c276df7dd35",
+ "sha256:4b7df040fb5fe826d689204f9b544af469593fb3ff3a069a6ad3409f742f5928",
+ "sha256:544fae9261232a97102e27a926019100a9db75bec7b37feedd74b3aa82f29969",
+ "sha256:620b37c3fea181dab09267cd5a84b0f23fa043beb8bc50d8474dd9694de1fa6e",
+ "sha256:6e6fef114741c4d7ca46da8449038ec8b1e880bbe68674c01ceeb1ac8a648e78",
+ "sha256:7774e9f6c9af3f12f296131453f7b81dabb7ebdb948483362f5afcaac8a826f1",
+ "sha256:85cb26c38c96f76b7ff38b86c9d560dea10cf3459bb5f4caf72fc1bb932c7136",
+ "sha256:a326f4240123a2ac66bb163eeba99578e9d63a8654a59f4688a79198f9aa10f8",
+ "sha256:ae402f43604e3b2bc41e8ea8b8526c7fa7139ed76b0d64fc48e28125925275b2",
+ "sha256:aee283c49601fa4c13adc64c09c978838a7e812f85377ae130a24d7198c0331e",
+ "sha256:b51249fdd2923739cd3efc95a3d6c363b67bbf779208e9f37fd5e68540d1a4d4",
+ "sha256:bb519becc46275c594410c6c28a8a0adc66fe24fef154a9addea54c1adb006f5",
+ "sha256:c2c37185fb0af79d5c117b8d2764f4321eeb12ba8c141a95d0aa8c2c1d0a11dd",
+ "sha256:dc561313279f9d05a3d0ffa89cd15ae477528ea37aa9795c4654588a3287a9ab",
+ "sha256:e439c9a10a95cb32abd708bb8be83b2134fa93790a4fb0535ca36db3dda94d20",
+ "sha256:fc3b4adc2ee8474cb3cd2a155305d5f8eda0a9c91320f83e55748e1fcb68f8e3"
+ ],
+ "version": "==4.7.5"
+ },
"packaging": {
"hashes": [
"sha256:3c292b474fda1671ec57d46d739d072bfd495a4f51ad01a055121d81e952b7a3",
@@ -448,6 +614,22 @@
"index": "pypi",
"version": "==5.4.1"
},
+ "pytest-asyncio": {
+ "hashes": [
+ "sha256:9fac5100fd716cbecf6ef89233e8590a4ad61d729d1732e0a96b84182df1daaf",
+ "sha256:d734718e25cfc32d2bf78d346e99d33724deeba774cc4afdf491530c6184b63b"
+ ],
+ "index": "pypi",
+ "version": "==0.10.0"
+ },
+ "pytest-cov": {
+ "hashes": [
+ "sha256:cc6742d8bac45070217169f5f72ceee1e0e55b0221f54bcf24845972d3a47f2b",
+ "sha256:cdbdef4f870408ebdbfeb44e63e07eb18bb4619fae852f6e760645fa36172626"
+ ],
+ "index": "pypi",
+ "version": "==2.8.1"
+ },
"pyyaml": {
"hashes": [
"sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97",
@@ -490,6 +672,14 @@
],
"version": "==2020.2.20"
},
+ "requests": {
+ "hashes": [
+ "sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee",
+ "sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6"
+ ],
+ "index": "pypi",
+ "version": "==2.23.0"
+ },
"six": {
"hashes": [
"sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a",
@@ -544,6 +734,13 @@
],
"version": "==1.4.1"
},
+ "urllib3": {
+ "hashes": [
+ "sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc",
+ "sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc"
+ ],
+ "version": "==1.25.8"
+ },
"wcwidth": {
"hashes": [
"sha256:cafe2186b3c009a04067022ce1dcd79cb38d8d65ee4f4791b8888d6599d1bbe1",
@@ -556,6 +753,13 @@
"sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
],
"version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b",
+ "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"
+ ],
+ "version": "==3.1.0"
}
}
}
diff --git a/README.md b/README.md
index 87be1edd..8fb2e962 100644
--- a/README.md
+++ b/README.md
@@ -6,6 +6,7 @@ Provides up-to-date data about Coronavirus outbreak. Includes numbers about conf
Support multiple data-sources.

+[](https://coveralls.io/github/ExpDev07/coronavirus-tracker-api?branch=master)
[](LICENSE.md)
[](#contributors-)
[](https://github.com/ExpDev07/coronavirus-tracker-api/stargazers)
@@ -13,6 +14,7 @@ Support multiple data-sources.
[](https://github.com/ExpDev07/coronavirus-tracker-api/commits/master)
[](https://github.com/ExpDev07/coronavirus-tracker-api/pulls)
[](https://github.com/ExpDev07/coronavirus-tracker-api/issues)
+[](https://lgtm.com/projects/g/ExpDev07/coronavirus-tracker-api/alerts/)
[](https://github.com/psf/black)
[](https://twitter.com/intent/tweet?text=COVID19%20Live%20Tracking%20API:%20&url=https%3A%2F%2Fgithub.com%2FExpDev07%2Fcoronavirus-tracker-api)
@@ -391,7 +393,6 @@ These are the available API wrappers created by the community. They are not nece
You will need the following things properly installed on your computer.
* [Python 3](https://www.python.org/downloads/) (with pip)
-* [Flask](https://pypi.org/project/Flask/)
* [pipenv](https://pypi.org/project/pipenv/)
## Installation
@@ -415,32 +416,26 @@ And don't despair if don't get the python setup working on the first try. No one
* Visit your app at [http://localhost:5000](http://localhost:5000).
### Running Tests
+> [pytest](https://docs.pytest.org/en/latest/)
```bash
-pipenv sync --dev
-pipenv shell
-make test
+pipenv run test
```
+
### Linting
+> [pylint](https://www.pylint.org/)
```bash
-pipenv sync --dev
-pipenv shell
-make lint
+pipenv run lint
```
### Formatting
+> [black](https://black.readthedocs.io/en/stable/)
```bash
pipenv run fmt
```
-or
-```bash
-pipenv shell
-make fmt
-```
-
### Building
@@ -474,6 +469,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
 Turreted 💻 |
+  Ibtida Bhuiyan 💻 |
diff --git a/app/__init__.py b/app/__init__.py
index 16847abb..c43ae7ac 100644
--- a/app/__init__.py
+++ b/app/__init__.py
@@ -1,4 +1,7 @@
+"""
+Corona Virus Tracker API
+~~~~~~~~~~~~~~~~~~~~~~~~
+API for tracking the global coronavirus (COVID-19, SARS-CoV-2) outbreak.
+"""
# See PEP396.
-__version__ = "2.0"
-
-from .core import create_app
+__version__ = "2.0.1"
diff --git a/app/config/settings.py b/app/config/settings.py
index 27c907bd..4a02a734 100644
--- a/app/config/settings.py
+++ b/app/config/settings.py
@@ -1,3 +1,4 @@
+"""app.config.settings.py"""
import os
# Load enviroment variables from .env file.
@@ -5,7 +6,5 @@
load_dotenv()
-"""
-The port to serve the app application on.
-"""
-PORT = int(os.getenv("PORT", 5000))
+# The port to serve the app application on.
+PORT = int(os.getenv("PORT", "5000"))
diff --git a/app/coordinates.py b/app/coordinates.py
index cc27a8e3..be972c6e 100644
--- a/app/coordinates.py
+++ b/app/coordinates.py
@@ -1,3 +1,6 @@
+"""app.coordinates.py"""
+
+
class Coordinates:
"""
A position on earth using decimal coordinates (latitude and longitude).
diff --git a/app/core.py b/app/core.py
deleted file mode 100644
index ef22b686..00000000
--- a/app/core.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from flask import Flask
-from flask_cors import CORS
-
-
-def create_app():
- """
- Construct the core application.
- """
- # Create flask app with CORS enabled.
- app = Flask(__name__)
- CORS(app)
-
- # Set app config from settings.
- app.config.from_pyfile("config/settings.py")
-
- with app.app_context():
- # Import routes.
- from . import routes
-
- # Register api endpoints.
- app.register_blueprint(routes.api_v1)
-
- # Return created app.
- return app
diff --git a/app/data/__init__.py b/app/data/__init__.py
index 73468add..aef58e8c 100644
--- a/app/data/__init__.py
+++ b/app/data/__init__.py
@@ -1,8 +1,9 @@
+"""app.data"""
from ..services.location.csbs import CSBSLocationService
from ..services.location.jhu import JhuLocationService
# Mapping of services to data-sources.
-data_sources = {"jhu": JhuLocationService(), "csbs": CSBSLocationService()}
+DATA_SOURCES = {"jhu": JhuLocationService(), "csbs": CSBSLocationService()}
def data_source(source):
@@ -12,4 +13,4 @@ def data_source(source):
:returns: The service.
:rtype: LocationService
"""
- return data_sources.get(source.lower())
+ return DATA_SOURCES.get(source.lower())
diff --git a/app/location/__init__.py b/app/location/__init__.py
index 4782fddb..d12f28c3 100644
--- a/app/location/__init__.py
+++ b/app/location/__init__.py
@@ -1,14 +1,18 @@
+"""app.location"""
from ..coordinates import Coordinates
from ..utils import countries
from ..utils.populations import country_population
-class Location:
+# pylint: disable=redefined-builtin,invalid-name
+class Location: # pylint: disable=too-many-instance-attributes
"""
A location in the world affected by the coronavirus.
"""
- def __init__(self, id, country, province, coordinates, last_updated, confirmed, deaths, recovered):
+ def __init__(
+ self, id, country, province, coordinates, last_updated, confirmed, deaths, recovered
+ ): # pylint: disable=too-many-arguments
# General info.
self.id = id
self.country = country.strip()
@@ -31,7 +35,7 @@ def country_code(self):
:returns: The country code.
:rtype: str
"""
- return (countries.country_code(self.country) or countries.default_country_code).upper()
+ return (countries.country_code(self.country) or countries.DEFAULT_COUNTRY_CODE).upper()
@property
def country_population(self):
@@ -71,6 +75,7 @@ class TimelinedLocation(Location):
A location with timelines.
"""
+ # pylint: disable=too-many-arguments
def __init__(self, id, country, province, coordinates, last_updated, timelines):
super().__init__(
# General info.
@@ -88,6 +93,7 @@ def __init__(self, id, country, province, coordinates, last_updated, timelines):
# Set timelines.
self.timelines = timelines
+ # pylint: disable=arguments-differ
def serialize(self, timelines=False):
"""
Serializes the location into a dict.
diff --git a/app/location/csbs.py b/app/location/csbs.py
index 0b7c27f8..649e8b22 100644
--- a/app/location/csbs.py
+++ b/app/location/csbs.py
@@ -1,3 +1,4 @@
+"""app.locations.csbs.py"""
from . import Location
@@ -6,6 +7,7 @@ class CSBSLocation(Location):
A CSBS (county) location.
"""
+ # pylint: disable=too-many-arguments,redefined-builtin
def __init__(self, id, state, county, coordinates, last_updated, confirmed, deaths):
super().__init__(
# General info.
@@ -23,7 +25,7 @@ def __init__(self, id, state, county, coordinates, last_updated, confirmed, deat
self.state = state
self.county = county
- def serialize(self, timelines=False):
+ def serialize(self, timelines=False): # pylint: disable=arguments-differ,unused-argument
"""
Serializes the location into a dict.
diff --git a/app/main.py b/app/main.py
index 44876182..0018f8bf 100644
--- a/app/main.py
+++ b/app/main.py
@@ -1,24 +1,19 @@
"""
app.main.py
"""
-import datetime as dt
import logging
import os
-import reprlib
import pydantic
import uvicorn
from fastapi import FastAPI, Request, Response
from fastapi.middleware.cors import CORSMiddleware
-from fastapi.middleware.wsgi import WSGIMiddleware
from fastapi.responses import JSONResponse
-from .core import create_app
from .data import data_source
-from .models.latest import LatestResponse as Latest
-from .models.location import LocationResponse as Location
-from .models.location import LocationsResponse as Locations
-from .router import router
+from .router.v1 import V1
+from .router.v2 import V2
+from .utils.httputils import setup_client_session, teardown_client_session
# ############
# FastAPI App
@@ -27,10 +22,15 @@
APP = FastAPI(
title="Coronavirus Tracker",
- description="API for tracking the global coronavirus (COVID-19, SARS-CoV-2) outbreak. Project page: https://github.com/ExpDev07/coronavirus-tracker-api.",
+ description=(
+ "API for tracking the global coronavirus (COVID-19, SARS-CoV-2) outbreak."
+ " Project page: https://github.com/ExpDev07/coronavirus-tracker-api."
+ ),
version="2.0.1",
docs_url="/",
redoc_url="/docs",
+ on_startup=[setup_client_session],
+ on_shutdown=[teardown_client_session],
)
# #####################
@@ -42,7 +42,7 @@
CORSMiddleware, allow_credentials=True, allow_origins=["*"], allow_methods=["*"], allow_headers=["*"],
)
-# TODO this could probably just be a FastAPI dependency.
+
@APP.middleware("http")
async def add_datasource(request: Request, call_next):
"""
@@ -70,7 +70,9 @@ async def add_datasource(request: Request, call_next):
@APP.exception_handler(pydantic.error_wrappers.ValidationError)
-async def handle_validation_error(request: Request, exc: pydantic.error_wrappers.ValidationError):
+async def handle_validation_error(
+ request: Request, exc: pydantic.error_wrappers.ValidationError
+): # pylint: disable=unused-argument
"""
Handles validation errors.
"""
@@ -83,14 +85,12 @@ async def handle_validation_error(request: Request, exc: pydantic.error_wrappers
# Include routers.
-APP.include_router(router, prefix="/v2", tags=["v2"])
+APP.include_router(V1, prefix="", tags=["v1"])
+APP.include_router(V2, prefix="/v2", tags=["v2"])
-# mount the existing Flask app
-# v1 @ /
-APP.mount("/", WSGIMiddleware(create_app()))
# Running of app.
if __name__ == "__main__":
uvicorn.run(
- "app.main:APP", host="127.0.0.1", port=int(os.getenv("PORT", 5000)), log_level="info",
+ "app.main:APP", host="127.0.0.1", port=int(os.getenv("PORT", "5000")), log_level="info",
)
diff --git a/app/router/__init__.py b/app/router/__init__.py
index eefb5f0a..4eda6c21 100644
--- a/app/router/__init__.py
+++ b/app/router/__init__.py
@@ -1,7 +1,8 @@
+"""app.router"""
from fastapi import APIRouter
-# Create the router.
-router = APIRouter()
+# pylint: disable=redefined-builtin
+from .v1 import all, confirmed, deaths, recovered
# The routes.
-from . import latest, sources, locations # isort:skip
+from .v2 import latest, sources, locations # isort:skip
diff --git a/app/router/sources.py b/app/router/sources.py
deleted file mode 100644
index 538921f4..00000000
--- a/app/router/sources.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from ..data import data_sources
-from . import router
-
-
-@router.get("/sources")
-async def sources():
- """
- Retrieves a list of data-sources that are availble to use.
- """
- return {"sources": list(data_sources.keys())}
diff --git a/app/router/v1/__init__.py b/app/router/v1/__init__.py
new file mode 100644
index 00000000..839bd212
--- /dev/null
+++ b/app/router/v1/__init__.py
@@ -0,0 +1,4 @@
+"""app.router.v1"""
+from fastapi import APIRouter
+
+V1 = APIRouter()
diff --git a/app/router/v1/all.py b/app/router/v1/all.py
new file mode 100644
index 00000000..91b9e826
--- /dev/null
+++ b/app/router/v1/all.py
@@ -0,0 +1,20 @@
+"""app.router.v1.all.py"""
+from ...services.location.jhu import get_category
+from . import V1
+
+
+@V1.get("/all")
+async def all(): # pylint: disable=redefined-builtin
+ """Get all the categories."""
+ confirmed = await get_category("confirmed")
+ deaths = await get_category("deaths")
+ recovered = await get_category("recovered")
+
+ return {
+ # Data.
+ "confirmed": confirmed,
+ "deaths": deaths,
+ "recovered": recovered,
+ # Latest.
+ "latest": {"confirmed": confirmed["latest"], "deaths": deaths["latest"], "recovered": recovered["latest"],},
+ }
diff --git a/app/router/v1/confirmed.py b/app/router/v1/confirmed.py
new file mode 100644
index 00000000..13365e32
--- /dev/null
+++ b/app/router/v1/confirmed.py
@@ -0,0 +1,11 @@
+"""app.router.v1.confirmed.py"""
+from ...services.location.jhu import get_category
+from . import V1
+
+
+@V1.get("/confirmed")
+async def confirmed():
+ """Confirmed cases."""
+ confirmed_data = await get_category("confirmed")
+
+ return confirmed_data
diff --git a/app/router/v1/deaths.py b/app/router/v1/deaths.py
new file mode 100644
index 00000000..fb45498c
--- /dev/null
+++ b/app/router/v1/deaths.py
@@ -0,0 +1,11 @@
+"""app.router.v1.deaths.py"""
+from ...services.location.jhu import get_category
+from . import V1
+
+
+@V1.get("/deaths")
+async def deaths():
+ """Total deaths."""
+ deaths_data = await get_category("deaths")
+
+ return deaths_data
diff --git a/app/router/v1/recovered.py b/app/router/v1/recovered.py
new file mode 100644
index 00000000..3a3a85b7
--- /dev/null
+++ b/app/router/v1/recovered.py
@@ -0,0 +1,11 @@
+"""app.router.v1.recovered.py"""
+from ...services.location.jhu import get_category
+from . import V1
+
+
+@V1.get("/recovered")
+async def recovered():
+ """Recovered cases."""
+ recovered_data = await get_category("recovered")
+
+ return recovered_data
diff --git a/app/router/v2/__init__.py b/app/router/v2/__init__.py
new file mode 100644
index 00000000..62c31905
--- /dev/null
+++ b/app/router/v2/__init__.py
@@ -0,0 +1,4 @@
+"""app.router.v2"""
+from fastapi import APIRouter
+
+V2 = APIRouter()
diff --git a/app/router/latest.py b/app/router/v2/latest.py
similarity index 55%
rename from app/router/latest.py
rename to app/router/v2/latest.py
index 81b254cf..105b16fe 100644
--- a/app/router/latest.py
+++ b/app/router/v2/latest.py
@@ -1,16 +1,17 @@
+"""app.router.v2.latest.py"""
from fastapi import Request
-from ..enums.sources import Sources
-from ..models.latest import LatestResponse as Latest
-from . import router
+from ...enums.sources import Sources
+from ...models.latest import LatestResponse as Latest
+from . import V2
-@router.get("/latest", response_model=Latest)
-def get_latest(request: Request, source: Sources = "jhu"):
+@V2.get("/latest", response_model=Latest)
+async def get_latest(request: Request, source: Sources = "jhu"): # pylint: disable=unused-argument
"""
Getting latest amount of total confirmed cases, deaths, and recoveries.
"""
- locations = request.state.source.get_all()
+ locations = await request.state.source.get_all()
return {
"latest": {
"confirmed": sum(map(lambda location: location.confirmed, locations)),
diff --git a/app/router/locations.py b/app/router/v2/locations.py
similarity index 66%
rename from app/router/locations.py
rename to app/router/v2/locations.py
index af4b1cfd..649f9c9e 100644
--- a/app/router/locations.py
+++ b/app/router/v2/locations.py
@@ -1,13 +1,15 @@
+"""app.router.v2.locations.py"""
from fastapi import HTTPException, Request
-from ..enums.sources import Sources
-from ..models.location import LocationResponse as Location
-from ..models.location import LocationsResponse as Locations
-from . import router
+from ...enums.sources import Sources
+from ...models.location import LocationResponse as Location
+from ...models.location import LocationsResponse as Locations
+from . import V2
-@router.get("/locations", response_model=Locations, response_model_exclude_unset=True)
-def get_locations(
+# pylint: disable=unused-argument,too-many-arguments,redefined-builtin
+@V2.get("/locations", response_model=Locations, response_model_exclude_unset=True)
+async def get_locations(
request: Request,
source: Sources = "jhu",
country_code: str = None,
@@ -26,7 +28,7 @@ def get_locations(
params.pop("timelines", None)
# Retrieve all the locations.
- locations = request.state.source.get_all()
+ locations = await request.state.source.get_all()
# Attempt to filter out locations with properties matching the provided query params.
for key, value in params.items():
@@ -53,9 +55,11 @@ def get_locations(
}
-@router.get("/locations/{id}", response_model=Location)
-def get_location_by_id(request: Request, id: int, source: Sources = "jhu", timelines: bool = True):
+# pylint: disable=invalid-name
+@V2.get("/locations/{id}", response_model=Location)
+async def get_location_by_id(request: Request, id: int, source: Sources = "jhu", timelines: bool = True):
"""
Getting specific location by id.
"""
- return {"location": request.state.source.get(id).serialize(timelines)}
+ location = await request.state.source.get(id)
+ return {"location": location.serialize(timelines)}
diff --git a/app/router/v2/sources.py b/app/router/v2/sources.py
new file mode 100644
index 00000000..ad906e51
--- /dev/null
+++ b/app/router/v2/sources.py
@@ -0,0 +1,11 @@
+"""app.router.v2.sources.py"""
+from ...data import DATA_SOURCES
+from . import V2
+
+
+@V2.get("/sources")
+async def sources():
+ """
+ Retrieves a list of data-sources that are availble to use.
+ """
+ return {"sources": list(DATA_SOURCES.keys())}
diff --git a/app/routes/__init__.py b/app/routes/__init__.py
deleted file mode 100644
index 2a584490..00000000
--- a/app/routes/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-"""
-app.routes
-
-isort:skip_file
-"""
-from flask import Blueprint, redirect, request, abort, current_app as app
-from ..data import data_source
-
-# Follow the import order to avoid circular dependency
-api_v1 = Blueprint("api_v1", __name__, url_prefix="")
-
-# API version 1.
-from .v1 import confirmed, deaths, recovered, all
diff --git a/app/routes/v1/__init__.py b/app/routes/v1/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/app/routes/v1/all.py b/app/routes/v1/all.py
deleted file mode 100644
index 9638c4bd..00000000
--- a/app/routes/v1/all.py
+++ /dev/null
@@ -1,23 +0,0 @@
-from flask import jsonify
-
-from ...routes import api_v1 as api
-from ...services.location.jhu import get_category
-
-
-@api.route("/all")
-def all():
- # Get all the categories.
- confirmed = get_category("confirmed")
- deaths = get_category("deaths")
- recovered = get_category("recovered")
-
- return jsonify(
- {
- # Data.
- "confirmed": confirmed,
- "deaths": deaths,
- "recovered": recovered,
- # Latest.
- "latest": {"confirmed": confirmed["latest"], "deaths": deaths["latest"], "recovered": recovered["latest"],},
- }
- )
diff --git a/app/routes/v1/confirmed.py b/app/routes/v1/confirmed.py
deleted file mode 100644
index 85cfe039..00000000
--- a/app/routes/v1/confirmed.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from flask import jsonify
-
-from ...routes import api_v1 as api
-from ...services.location.jhu import get_category
-
-
-@api.route("/confirmed")
-def confirmed():
- return jsonify(get_category("confirmed"))
diff --git a/app/routes/v1/deaths.py b/app/routes/v1/deaths.py
deleted file mode 100644
index cb65874b..00000000
--- a/app/routes/v1/deaths.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from flask import jsonify
-
-from ...routes import api_v1 as api
-from ...services.location.jhu import get_category
-
-
-@api.route("/deaths")
-def deaths():
- return jsonify(get_category("deaths"))
diff --git a/app/routes/v1/recovered.py b/app/routes/v1/recovered.py
deleted file mode 100644
index be5fe646..00000000
--- a/app/routes/v1/recovered.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from flask import jsonify
-
-from ...routes import api_v1 as api
-from ...services.location.jhu import get_category
-
-
-@api.route("/recovered")
-def recovered():
- return jsonify(get_category("recovered"))
diff --git a/app/services/location/__init__.py b/app/services/location/__init__.py
index 80b5e05c..6d292b54 100644
--- a/app/services/location/__init__.py
+++ b/app/services/location/__init__.py
@@ -1,3 +1,4 @@
+"""app.services.location"""
from abc import ABC, abstractmethod
@@ -7,7 +8,7 @@ class LocationService(ABC):
"""
@abstractmethod
- def get_all(self):
+ async def get_all(self):
"""
Gets and returns all of the locations.
@@ -17,7 +18,7 @@ def get_all(self):
raise NotImplementedError
@abstractmethod
- def get(self, id):
+ async def get(self, id): # pylint: disable=redefined-builtin,invalid-name
"""
Gets and returns location with the provided id.
diff --git a/app/services/location/csbs.py b/app/services/location/csbs.py
index 6a13f41e..dbd8d82d 100644
--- a/app/services/location/csbs.py
+++ b/app/services/location/csbs.py
@@ -1,41 +1,46 @@
+"""app.services.location.csbs.py"""
import csv
from datetime import datetime
-import requests
-from cachetools import TTLCache, cached
+from asyncache import cached
+from cachetools import TTLCache
from ...coordinates import Coordinates
from ...location.csbs import CSBSLocation
+from ...utils import httputils
from . import LocationService
class CSBSLocationService(LocationService):
"""
- Servive for retrieving locations from csbs
+ Service for retrieving locations from csbs
"""
- def get_all(self):
- # Get the locations
- return get_locations()
+ async def get_all(self):
+ # Get the locations.
+ locations = await get_locations()
+ return locations
- def get(self, id):
- return self.get_all()[id]
+ async def get(self, loc_id): # pylint: disable=arguments-differ
+ # Get location at the index equal to the provided id.
+ locations = await self.get_all()
+ return locations[loc_id]
# Base URL for fetching data
-base_url = "https://facts.csbs.org/covid-19/covid19_county.csv"
+BASE_URL = "https://facts.csbs.org/covid-19/covid19_county.csv"
@cached(cache=TTLCache(maxsize=1, ttl=3600))
-def get_locations():
+async def get_locations():
"""
Retrieves county locations; locations are cached for 1 hour
:returns: The locations.
:rtype: dict
"""
- request = requests.get(base_url)
- text = request.text
+ async with httputils.CLIENT_SESSION.get(BASE_URL) as response:
+ text = await response.text()
data = list(csv.DictReader(text.splitlines()))
@@ -47,11 +52,11 @@ def get_locations():
county = item["County Name"]
# Ensure country is specified.
- if county == "Unassigned" or county == "Unknown":
+ if county in {"Unassigned", "Unknown"}:
continue
# Coordinates.
- coordinates = Coordinates(item["Latitude"], item["Longitude"])
+ coordinates = Coordinates(item["Latitude"], item["Longitude"]) # pylint: disable=unused-variable
# Date string without "EDT" at end.
last_update = " ".join(item["Last Update"].split(" ")[0:2])
diff --git a/app/services/location/jhu.py b/app/services/location/jhu.py
index ef99dddc..316de367 100644
--- a/app/services/location/jhu.py
+++ b/app/services/location/jhu.py
@@ -1,14 +1,16 @@
+"""app.services.location.jhu.py"""
import csv
from datetime import datetime
-import requests
-from cachetools import TTLCache, cached
+from asyncache import cached
+from cachetools import TTLCache
from ...coordinates import Coordinates
from ...location import TimelinedLocation
from ...timeline import Timeline
from ...utils import countries
from ...utils import date as date_util
+from ...utils import httputils
from . import LocationService
@@ -17,28 +19,28 @@ class JhuLocationService(LocationService):
Service for retrieving locations from Johns Hopkins CSSE (https://github.com/CSSEGISandData/COVID-19).
"""
- def get_all(self):
+ async def get_all(self):
# Get the locations.
- return get_locations()
+ locations = await get_locations()
+ return locations
- def get(self, id):
+ async def get(self, loc_id): # pylint: disable=arguments-differ
# Get location at the index equal to provided id.
- return self.get_all()[id]
+ locations = await self.get_all()
+ return locations[loc_id]
# ---------------------------------------------------------------
-"""
-Base URL for fetching category.
-"""
-base_url = (
+# Base URL for fetching category.
+BASE_URL = (
"https://raw.githubusercontent.com/CSSEGISandData/2019-nCoV/master/csse_covid_19_data/csse_covid_19_time_series/"
)
@cached(cache=TTLCache(maxsize=1024, ttl=3600))
-def get_category(category):
+async def get_category(category):
"""
Retrieves the data for the provided category. The data is cached for 1 hour.
@@ -50,11 +52,11 @@ def get_category(category):
category = category.lower()
# URL to request data from.
- url = base_url + "time_series_covid19_%s_global.csv" % category
+ url = BASE_URL + "time_series_covid19_%s_global.csv" % category
# Request the data
- request = requests.get(url)
- text = request.text
+ async with httputils.CLIENT_SESSION.get(url) as response:
+ text = await response.text()
# Parse the CSV.
data = list(csv.DictReader(text.splitlines()))
@@ -104,7 +106,7 @@ def get_category(category):
@cached(cache=TTLCache(maxsize=1024, ttl=3600))
-def get_locations():
+async def get_locations():
"""
Retrieves the locations from the categories. The locations are cached for 1 hour.
@@ -112,20 +114,24 @@ def get_locations():
:rtype: List[Location]
"""
# Get all of the data categories locations.
- confirmed = get_category("confirmed")["locations"]
- deaths = get_category("deaths")["locations"]
- # recovered = get_category('recovered')['locations']
+ confirmed = await get_category("confirmed")
+ deaths = await get_category("deaths")
+ # recovered = await get_category("recovered")
+
+ locations_confirmed = confirmed["locations"]
+ locations_deaths = deaths["locations"]
+ # locations_recovered = recovered["locations"]
# Final locations to return.
locations = []
# Go through locations.
- for index, location in enumerate(confirmed):
+ for index, location in enumerate(locations_confirmed):
# Get the timelines.
timelines = {
- "confirmed": confirmed[index]["history"],
- "deaths": deaths[index]["history"],
- # 'recovered' : recovered[index]['history'],
+ "confirmed": locations_confirmed[index]["history"],
+ "deaths": locations_deaths[index]["history"],
+ # 'recovered' : locations_recovered[index]['history'],
}
# Grab coordinates.
diff --git a/app/timeline.py b/app/timeline.py
index 4916ea2b..0b40d496 100644
--- a/app/timeline.py
+++ b/app/timeline.py
@@ -1,5 +1,5 @@
+"""app.timeline.py"""
from collections import OrderedDict
-from datetime import datetime
class Timeline:
@@ -7,8 +7,8 @@ class Timeline:
Timeline with history of data.
"""
- def __init__(self, history={}):
- self.__timeline = history
+ def __init__(self, history=None):
+ self.__timeline = history if history else {}
@property
def timeline(self):
@@ -26,7 +26,7 @@ def latest(self):
values = list(self.timeline.values())
# Last item is the latest.
- if len(values):
+ if values:
return values[-1] or 0
# Fallback value of 0.
diff --git a/app/utils/countries.py b/app/utils/countries.py
index 6647e679..5f926f37 100644
--- a/app/utils/countries.py
+++ b/app/utils/countries.py
@@ -1,16 +1,16 @@
+"""app.utils.countries.py"""
import logging
-from itertools import chain
LOGGER = logging.getLogger(__name__)
# Default country code.
-default_country_code = "XX"
+DEFAULT_COUNTRY_CODE = "XX"
# Mapping of country names to alpha-2 codes according to
# https://en.wikipedia.org/wiki/ISO_3166-1.
# As a reference see also https://github.com/TakahikoKawasaki/nv-i18n (in Java)
# fmt: off
-country_name__country_code = {
+COUNTRY_NAME__COUNTRY_CODE = {
"Afghanistan" : "AF",
"Ã…land Islands" : "AX",
"Albania" : "AL",
@@ -215,6 +215,7 @@
"Morocco" : "MA",
"Mozambique" : "MZ",
"Myanmar" : "MM",
+ "Burma" : "MM",
"Namibia" : "NA",
"Nauru" : "NR",
"Nepal" : "NP",
@@ -359,17 +360,20 @@
# "Disputed Territory" : "XX",
# "Others" has no mapping, i.e. the default val is used
- # "Cruise Ship" has no mapping, i.e. the default val is used
+
+ # ships:
+ # "Cruise Ship"
+ # "MS Zaandam"
}
# fmt: on
-def country_code(s):
+def country_code(value):
"""
Return two letter country code (Alpha-2) according to https://en.wikipedia.org/wiki/ISO_3166-1
Defaults to "XX".
"""
- country_code = country_name__country_code.get(s, default_country_code)
- if country_code == default_country_code:
- LOGGER.warning(f"No country code found for '{s}'. Using '{country_code}'!")
+ code = COUNTRY_NAME__COUNTRY_CODE.get(value, DEFAULT_COUNTRY_CODE)
+ if code == DEFAULT_COUNTRY_CODE:
+ LOGGER.warning(f"No country code found for '{value}'. Using '{code}'!")
- return country_code
+ return code
diff --git a/app/utils/date.py b/app/utils/date.py
index 3a18832e..5a2cc8e5 100644
--- a/app/utils/date.py
+++ b/app/utils/date.py
@@ -1,3 +1,4 @@
+"""app.utils.date.py"""
from dateutil.parser import parse
diff --git a/app/utils/httputils.py b/app/utils/httputils.py
new file mode 100644
index 00000000..a0793170
--- /dev/null
+++ b/app/utils/httputils.py
@@ -0,0 +1,30 @@
+"""app.utils.httputils.py"""
+import logging
+
+from aiohttp import ClientSession
+
+# Singleton aiohttp.ClientSession instance.
+CLIENT_SESSION: ClientSession
+
+
+LOGGER = logging.getLogger(__name__)
+
+
+async def setup_client_session():
+ """Set up the application-global aiohttp.ClientSession instance.
+
+ aiohttp recommends that only one ClientSession exist for the lifetime of an application.
+ See: https://docs.aiohttp.org/en/stable/client_quickstart.html#make-a-request
+
+ """
+ global CLIENT_SESSION # pylint: disable=global-statement
+ LOGGER.info("Setting up global aiohttp.ClientSession.")
+ CLIENT_SESSION = ClientSession()
+
+
+async def teardown_client_session():
+ """Close the application-global aiohttp.ClientSession.
+ """
+ global CLIENT_SESSION # pylint: disable=global-statement
+ LOGGER.info("Closing global aiohttp.ClientSession.")
+ await CLIENT_SESSION.close()
diff --git a/app/utils/populations.py b/app/utils/populations.py
index ea72c334..1d8bd843 100644
--- a/app/utils/populations.py
+++ b/app/utils/populations.py
@@ -1,19 +1,16 @@
+"""app.utils.populations.py"""
import logging
-from io import BytesIO, StringIO
-from zipfile import ZipFile, ZipInfo
import requests
-from cachetools import TTLCache, cached
-
-from .countries import country_code
LOGGER = logging.getLogger(__name__)
# Fetching of the populations.
def fetch_populations():
"""
- Returns a dictionary containing the population of each country fetched from the GeoNames (https://www.geonames.org/).
-
+ Returns a dictionary containing the population of each country fetched from the GeoNames.
+ https://www.geonames.org/
+
:returns: The mapping of populations.
:rtype: dict
"""
@@ -34,7 +31,7 @@ def fetch_populations():
# Mapping of alpha-2 codes country codes to population.
-populations = fetch_populations()
+POPULATIONS = fetch_populations()
# Retrieving.
def country_population(country_code, default=None):
@@ -44,4 +41,4 @@ def country_population(country_code, default=None):
:returns: The population.
:rtype: int
"""
- return populations.get(country_code, default)
+ return POPULATIONS.get(country_code, default)
diff --git a/pylintrc b/pylintrc
index 4db0f41f..af114a33 100644
--- a/pylintrc
+++ b/pylintrc
@@ -3,7 +3,7 @@
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code.
-extension-pkg-whitelist=
+extension-pkg-whitelist=pydantic
# Add files or directories to the blacklist. They should be base names, not
# paths.
@@ -139,7 +139,8 @@ disable=print-statement,
deprecated-sys-function,
exception-escape,
comprehension-escape,
- bad-continuation # conflicts with black
+ bad-continuation, # conflicts with black
+ duplicate-code # turn back on ASAP
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
@@ -335,7 +336,7 @@ single-line-if-stmt=no
# Format style used to check logging format string. `old` means using %
# formatting, `new` is for `{}` formatting,and `fstr` is for f-strings.
-logging-format-style=old
+logging-format-style=fstr
# Logging modules to check that the string format arguments are in logging
# function parameter format.
@@ -346,8 +347,7 @@ logging-modules=logging
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,
- XXX,
- TODO
+ XXX
[SIMILARITIES]
diff --git a/tasks.py b/tasks.py
new file mode 100644
index 00000000..3ff5f24c
--- /dev/null
+++ b/tasks.py
@@ -0,0 +1,66 @@
+"""
+tasks.py
+--------
+Project invoke tasks
+
+Available commands
+ invoke --list
+ invoke fmt
+ invoke sort
+ invoke check
+"""
+import invoke
+
+TARGETS_DESCRIPTION = "Paths/directories to format. [default: . ]"
+
+
+@invoke.task(help={"targets": TARGETS_DESCRIPTION})
+def sort(ctx, targets="."):
+ """Sort module imports."""
+ print("sorting imports ...")
+ args = ["isort", "-rc", "--atomic", targets]
+ ctx.run(" ".join(args))
+
+
+@invoke.task(pre=[sort], help={"targets": TARGETS_DESCRIPTION})
+def fmt(ctx, targets="."):
+ """Format python source code & sort imports."""
+ print("formatting ...")
+ args = ["black", targets]
+ ctx.run(" ".join(args))
+
+
+@invoke.task
+def check(ctx, fmt=False, sort=False, diff=False): # pylint: disable=redefined-outer-name
+ """Check code format and import order."""
+ if not any([fmt, sort]):
+ fmt = True
+ sort = True
+
+ fmt_args = ["black", "--check", "."]
+ sort_args = ["isort", "-rc", "--check", "."]
+
+ if diff:
+ fmt_args.append("--diff")
+ sort_args.append("--diff")
+
+ cmd_args = []
+ if fmt:
+ cmd_args.extend(fmt_args)
+ if sort:
+ if cmd_args:
+ cmd_args.append("&")
+ cmd_args.extend(sort_args)
+ ctx.run(" ".join(cmd_args))
+
+
+@invoke.task
+def lint(ctx):
+ """Run linter."""
+ ctx.run(" ".join(["pylint", "app"]))
+
+
+@invoke.task
+def test(ctx):
+ """Run pytest tests."""
+ ctx.run(" ".join(["pytest", "-v"]))
diff --git a/tests/conftest.py b/tests/conftest.py
index a9811d22..b6399fec 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -3,16 +3,134 @@
Global conftest file for shared pytest fixtures
"""
+import datetime
+import os
+
import pytest
+from async_asgi_testclient import TestClient as AsyncTestClient
from fastapi.testclient import TestClient
from app.main import APP
+from app.utils import httputils
+
+try:
+ from unittest.mock import AsyncMock
+except ImportError:
+ # Python 3.7 backwards compat
+ from asyncmock import AsyncMock
+
+try:
+ from contextlib import asynccontextmanager
+except ImportError:
+ # Python 3.6 backwards compat
+ from async_generator import asynccontextmanager
@pytest.fixture
def api_client():
"""
- Returns a TestClient.
+ Returns a fastapi.testclient.TestClient.
The test client uses the requests library for making http requests.
"""
return TestClient(APP)
+
+
+@pytest.fixture
+async def async_api_client():
+ """
+ Returns an async_asgi_testclient.TestClient.
+ """
+ return AsyncTestClient(APP)
+
+
+class DateTimeStrpTime:
+ """Returns instance of `DateTimeStrpTime`
+ when calling `app.services.location.jhu.datetime.trptime(date, '%m/%d/%y').isoformat()`.
+ """
+
+ def __init__(self, date, strformat):
+ self.date = date
+ self.strformat = strformat
+
+ def isoformat(self):
+ return datetime.datetime.strptime(self.date, self.strformat).isoformat()
+
+
+class FakeRequestsGetResponse:
+ """Fake instance of a response from `aiohttp.ClientSession.get`.
+ """
+
+ def __init__(self, url, filename, state):
+ self.url = url
+ self.filename = filename
+ self.state = state
+
+ async def text(self):
+ return self.read_file(self.state)
+
+ def read_file(self, state):
+ """
+ Mock HTTP GET-method and return text from file
+ """
+ state = state.lower()
+
+ # Determine filepath.
+ filepath = os.path.join(os.path.dirname(__file__), "example_data/{}.csv".format(state))
+
+ # Return fake response.
+ print("Try to read {}".format(filepath))
+ with open(filepath, "r") as file:
+ return file.read()
+
+
+@pytest.fixture(scope="class")
+def mock_client_session_class(request):
+ """Class fixture to expose an AsyncMock to unittest.TestCase subclasses.
+
+ See: https://docs.pytest.org/en/5.4.1/unittest.html#mixing-pytest-fixtures-into-unittest-testcase-subclasses-using-marks
+ """
+
+ httputils.CLIENT_SESSION = request.cls.mock_client_session = AsyncMock()
+ httputils.CLIENT_SESSION.get = mocked_session_get
+ try:
+ yield
+ finally:
+ del httputils.CLIENT_SESSION
+
+
+@pytest.fixture
+async def mock_client_session():
+ """Context manager fixture that replaces the global client_session with an AsyncMock
+ instance.
+ """
+
+ httputils.CLIENT_SESSION = AsyncMock()
+ httputils.CLIENT_SESSION.get = mocked_session_get
+ try:
+ yield httputils.CLIENT_SESSION
+ finally:
+ del httputils.CLIENT_SESSION
+
+
+@asynccontextmanager
+async def mocked_session_get(*args, **kwargs):
+ """Mock response from client_session.get.
+ """
+
+ url = args[0]
+ filename = url.split("/")[-1]
+
+ # clean up for id token (e.g. Deaths)
+ state = filename.split("-")[-1].replace(".csv", "").lower().capitalize()
+
+ yield FakeRequestsGetResponse(url, filename, state)
+
+
+def mocked_strptime_isoformat(*args, **kwargs):
+ """Mock return value from datetime.strptime().isoformat().
+ """
+
+ date = args[0]
+ strformat = args[1]
+
+ return DateTimeStrpTime(date, strformat)
diff --git a/tests/example_data/sample_covid19_county.csv b/tests/example_data/covid19_county.csv
similarity index 100%
rename from tests/example_data/sample_covid19_county.csv
rename to tests/example_data/covid19_county.csv
diff --git a/tests/test_countries.py b/tests/test_countries.py
index 2c9ba65e..e28fb469 100644
--- a/tests/test_countries.py
+++ b/tests/test_countries.py
@@ -16,8 +16,8 @@
("Bolivia, Plurinational State of", "BO"),
("Korea, Democratic People's Republic of", "KP"),
("US", "US"),
- ("BlaBla", countries.default_country_code),
- ("Others", countries.default_country_code),
+ ("BlaBla", countries.DEFAULT_COUNTRY_CODE),
+ ("Others", countries.DEFAULT_COUNTRY_CODE),
],
)
def test_countries_country_name__country_code(country_name, expected_country_code):
diff --git a/tests/test_csbs.py b/tests/test_csbs.py
index 64852102..828a5b65 100644
--- a/tests/test_csbs.py
+++ b/tests/test_csbs.py
@@ -1,9 +1,5 @@
-import datetime
-from unittest import mock
-
import pytest
-import app
from app.services.location import csbs
@@ -21,7 +17,7 @@ def read_file(self):
"""
Mock HTTP GET-method and return text from file
"""
- filepath = "tests/example_data/sample_covid19_county.csv"
+ filepath = "tests/example_data/covid19_county.csv"
print("Try to read {}".format(filepath))
with open(filepath, "r") as file:
return file.read()
@@ -29,9 +25,10 @@ def read_file(self):
return FakeRequestsGetResponse()
-@mock.patch("app.services.location.csbs.requests.get", side_effect=mocked_csbs_requests_get)
-def test_get_locations(mock_request_get):
- data = csbs.get_locations()
+@pytest.mark.asyncio
+async def test_get_locations(mock_client_session):
+ data = await csbs.get_locations()
+
assert isinstance(data, list)
# check to see that Unknown/Unassigned has been filtered
diff --git a/tests/test_httputils.py b/tests/test_httputils.py
new file mode 100644
index 00000000..547f3725
--- /dev/null
+++ b/tests/test_httputils.py
@@ -0,0 +1,19 @@
+import pytest
+
+from app.utils import httputils
+
+
+@pytest.mark.asyncio
+async def test_setup_teardown_client_session():
+ with pytest.raises(AttributeError):
+ # Ensure client_session is undefined prior to setup
+ httputils.CLIENT_SESSION
+
+ await httputils.setup_client_session()
+
+ assert httputils.CLIENT_SESSION
+
+ await httputils.teardown_client_session()
+ assert httputils.CLIENT_SESSION.closed
+
+ del httputils.CLIENT_SESSION
diff --git a/tests/test_jhu.py b/tests/test_jhu.py
index f9c214a6..3790218d 100644
--- a/tests/test_jhu.py
+++ b/tests/test_jhu.py
@@ -1,86 +1,24 @@
-import datetime
from unittest import mock
import pytest
-import app
from app import location
from app.services.location import jhu
-from app.utils import date
+from tests.conftest import mocked_strptime_isoformat
DATETIME_STRING = "2020-03-17T10:23:22.505550"
-def mocked_requests_get(*args, **kwargs):
- class FakeRequestsGetResponse:
- """
- Returns instance of `FakeRequestsGetResponse`
- when calling `app.services.location.jhu.requests.get()`
- """
+@pytest.mark.asyncio
+async def test_get_locations(mock_client_session):
+ with mock.patch("app.services.location.jhu.datetime") as mock_datetime:
+ mock_datetime.utcnow.return_value.isoformat.return_value = DATETIME_STRING
+ mock_datetime.strptime.side_effect = mocked_strptime_isoformat
+ output = await jhu.get_locations()
- def __init__(self, url, filename, state):
- self.url = url
- self.filename = filename
- self.state = state
- self.text = self.read_file(self.state)
-
- def read_file(self, state):
- """
- Mock HTTP GET-method and return text from file
- """
- state = state.lower()
-
- # Determine filepath.
- filepath = "tests/example_data/{}.csv".format(state)
-
- # Return fake response.
- print("Try to read {}".format(filepath))
- with open(filepath, "r") as file:
- return file.read()
-
- # get url from `request.get`
- url = args[0]
-
- # get filename from url
- filename = url.split("/")[-1]
-
- # clean up for id token (e.g. Deaths)
- state = filename.split("-")[-1].replace(".csv", "").lower().capitalize()
-
- return FakeRequestsGetResponse(url, filename, state)
-
-
-def mocked_strptime_isoformat(*args, **kwargs):
- class DateTimeStrpTime:
- """
- Returns instance of `DateTimeStrpTime`
- when calling `app.services.location.jhu.datetime.trptime(date, '%m/%d/%y').isoformat()`
- """
-
- def __init__(self, date, strformat):
- self.date = date
- self.strformat = strformat
-
- def isoformat(self):
- return datetime.datetime.strptime(self.date, self.strformat).isoformat()
-
- date = args[0]
- strformat = args[1]
-
- return DateTimeStrpTime(date, strformat)
-
-
-@mock.patch("app.services.location.jhu.datetime")
-@mock.patch("app.services.location.jhu.requests.get", side_effect=mocked_requests_get)
-def test_get_locations(mock_request_get, mock_datetime):
- # mock app.services.location.jhu.datetime.utcnow().isoformat()
- mock_datetime.utcnow.return_value.isoformat.return_value = DATETIME_STRING
- mock_datetime.strptime.side_effect = mocked_strptime_isoformat
-
- output = jhu.get_locations()
assert isinstance(output, list)
assert isinstance(output[0], location.Location)
# `jhu.get_locations()` creates id based on confirmed list
- location_confirmed = jhu.get_category("confirmed")
+ location_confirmed = await jhu.get_category("confirmed")
assert len(output) == len(location_confirmed["locations"])
diff --git a/tests/test_routes.py b/tests/test_routes.py
index 9e1c03ef..605ce2c0 100644
--- a/tests/test_routes.py
+++ b/tests/test_routes.py
@@ -4,17 +4,16 @@
from unittest import mock
import pytest
-from fastapi.testclient import TestClient
+from async_asgi_testclient import TestClient
-import app
-from app import services
from app.main import APP
-from .test_jhu import DATETIME_STRING, mocked_requests_get, mocked_strptime_isoformat
+from .conftest import mocked_strptime_isoformat
+from .test_jhu import DATETIME_STRING
-@mock.patch("app.services.location.jhu.datetime")
-@mock.patch("app.services.location.jhu.requests.get", side_effect=mocked_requests_get)
+@pytest.mark.usefixtures("mock_client_session_class")
+@pytest.mark.asyncio
class FlaskRoutesTest(unittest.TestCase):
"""
Need to mock some objects to control testing data locally
@@ -22,11 +21,7 @@ class FlaskRoutesTest(unittest.TestCase):
Store all integration testcases in one class to ensure app context
"""
- # load app context only once.
- app = app.create_app()
-
def setUp(self):
- self.client = FlaskRoutesTest.app.test_client()
self.asgi_client = TestClient(APP)
self.date = DATETIME_STRING
@@ -36,89 +31,110 @@ def read_file_v1(self, state):
expected_json_output = file.read()
return expected_json_output
- def test_root_api(self, mock_request_get, mock_datetime):
+ async def test_root_api(self):
"""Validate that / returns a 200 and is not a redirect."""
- response = self.asgi_client.get("/")
+ response = await self.asgi_client.get("/")
assert response.status_code == 200
assert not response.is_redirect
- def test_v1_confirmed(self, mock_request_get, mock_datetime):
- mock_datetime.utcnow.return_value.isoformat.return_value = self.date
- mock_datetime.strptime.side_effect = mocked_strptime_isoformat
+ async def test_v1_confirmed(self):
state = "confirmed"
expected_json_output = self.read_file_v1(state=state)
- return_data = self.client.get("/{}".format(state)).data.decode()
- assert return_data == expected_json_output
+ with mock.patch("app.services.location.jhu.datetime") as mock_datetime:
+ mock_datetime.utcnow.return_value.isoformat.return_value = self.date
+ mock_datetime.strptime.side_effect = mocked_strptime_isoformat
+ response = await self.asgi_client.get("/{}".format(state))
+
+ return_data = response.json()
+ assert return_data == json.loads(expected_json_output)
- def test_v1_deaths(self, mock_request_get, mock_datetime):
- mock_datetime.utcnow.return_value.isoformat.return_value = self.date
- mock_datetime.strptime.side_effect = mocked_strptime_isoformat
+ async def test_v1_deaths(self):
state = "deaths"
expected_json_output = self.read_file_v1(state=state)
- return_data = self.client.get("/{}".format(state)).data.decode()
- assert return_data == expected_json_output
+ with mock.patch("app.services.location.jhu.datetime") as mock_datetime:
+ mock_datetime.utcnow.return_value.isoformat.return_value = self.date
+ mock_datetime.strptime.side_effect = mocked_strptime_isoformat
+ response = await self.asgi_client.get("/{}".format(state))
- def test_v1_recovered(self, mock_request_get, mock_datetime):
- mock_datetime.utcnow.return_value.isoformat.return_value = self.date
- mock_datetime.strptime.side_effect = mocked_strptime_isoformat
+ return_data = response.json()
+ assert return_data == json.loads(expected_json_output)
+
+ async def test_v1_recovered(self):
state = "recovered"
expected_json_output = self.read_file_v1(state=state)
- return_data = self.client.get("/{}".format(state)).data.decode()
- assert return_data == expected_json_output
+ with mock.patch("app.services.location.jhu.datetime") as mock_datetime:
+ mock_datetime.utcnow.return_value.isoformat.return_value = self.date
+ mock_datetime.strptime.side_effect = mocked_strptime_isoformat
+ response = await self.asgi_client.get("/{}".format(state))
+
+ return_data = response.json()
+ assert return_data == json.loads(expected_json_output)
- def test_v1_all(self, mock_request_get, mock_datetime):
- mock_datetime.utcnow.return_value.isoformat.return_value = self.date
- mock_datetime.strptime.side_effect = mocked_strptime_isoformat
+ async def test_v1_all(self):
state = "all"
expected_json_output = self.read_file_v1(state=state)
- return_data = self.client.get("/{}".format(state)).data.decode()
- # print(return_data)
- assert return_data == expected_json_output
- def test_v2_latest(self, mock_request_get, mock_datetime):
- mock_datetime.utcnow.return_value.isoformat.return_value = DATETIME_STRING
- mock_datetime.strptime.side_effect = mocked_strptime_isoformat
+ with mock.patch("app.services.location.jhu.datetime") as mock_datetime:
+ mock_datetime.utcnow.return_value.isoformat.return_value = self.date
+ mock_datetime.strptime.side_effect = mocked_strptime_isoformat
+ response = await self.asgi_client.get("/{}".format(state))
+
+ return_data = response.json()
+ assert return_data == json.loads(expected_json_output)
+
+ async def test_v2_latest(self):
state = "latest"
- return_data = self.asgi_client.get(f"/v2/{state}").json()
- check_dict = {"latest": {"confirmed": 1940, "deaths": 1940, "recovered": 0}}
+ with mock.patch("app.services.location.jhu.datetime") as mock_datetime:
+ mock_datetime.utcnow.return_value.isoformat.return_value = DATETIME_STRING
+ mock_datetime.strptime.side_effect = mocked_strptime_isoformat
+ response = await self.asgi_client.get(f"/v2/{state}")
+ return_data = response.json()
+ check_dict = {"latest": {"confirmed": 1940, "deaths": 1940, "recovered": 0}}
assert return_data == check_dict
- def test_v2_locations(self, mock_request_get, mock_datetime):
- mock_datetime.utcnow.return_value.isoformat.return_value = DATETIME_STRING
- mock_datetime.strptime.side_effect = mocked_strptime_isoformat
+ async def test_v2_locations(self):
state = "locations"
- return_data = self.asgi_client.get("/v2/{}".format(state)).json()
+
+ with mock.patch("app.services.location.jhu.datetime") as mock_datetime:
+ mock_datetime.utcnow.return_value.isoformat.return_value = DATETIME_STRING
+ mock_datetime.strptime.side_effect = mocked_strptime_isoformat
+ response = await self.asgi_client.get("/v2/{}".format(state))
+
+ return_data = response.json()
filepath = "tests/expected_output/v2_{state}.json".format(state=state)
with open(filepath, "r") as file:
expected_json_output = file.read()
+ # TODO: Why is this failing?
# assert return_data == json.loads(expected_json_output)
- def test_v2_locations_id(self, mock_request_get, mock_datetime):
- mock_datetime.utcnow.return_value.isoformat.return_value = DATETIME_STRING
- mock_datetime.strptime.side_effect = mocked_strptime_isoformat
-
+ async def test_v2_locations_id(self):
state = "locations"
test_id = 1
- return_data = self.asgi_client.get("/v2/{}/{}".format(state, test_id)).json()
+
+ with mock.patch("app.services.location.jhu.datetime") as mock_datetime:
+ mock_datetime.utcnow.return_value.isoformat.return_value = DATETIME_STRING
+ mock_datetime.strptime.side_effect = mocked_strptime_isoformat
+ response = await self.asgi_client.get("/v2/{}/{}".format(state, test_id))
+
+ return_data = response.json()
filepath = "tests/expected_output/v2_{state}_id_{test_id}.json".format(state=state, test_id=test_id)
with open(filepath, "r") as file:
expected_json_output = file.read()
+ # TODO: Why is this failing?
# assert return_data == expected_json_output
- def tearDown(self):
- pass
-
+@pytest.mark.asyncio
@pytest.mark.parametrize(
"query_params,expected_status",
[
@@ -132,13 +148,15 @@ def tearDown(self):
({"source": "jhu", "country_code": "US"}, 404),
],
)
-def test_locations_status_code(api_client, query_params, expected_status):
- response = api_client.get("/v2/locations", params=query_params)
+async def test_locations_status_code(async_api_client, query_params, expected_status, mock_client_session):
+ response = await async_api_client.get("/v2/locations", query_string=query_params)
+
print(f"GET {response.url}\n{response}")
print(f"\tjson:\n{pf(response.json())[:1000]}\n\t...")
assert response.status_code == expected_status
+@pytest.mark.asyncio
@pytest.mark.parametrize(
"query_params",
[
@@ -150,8 +168,9 @@ def test_locations_status_code(api_client, query_params, expected_status):
{"source": "jhu", "timelines": True},
],
)
-def test_latest(api_client, query_params):
- response = api_client.get("/v2/latest", params=query_params)
+async def test_latest(async_api_client, query_params, mock_client_session):
+ response = await async_api_client.get("/v2/latest", query_string=query_params)
+
print(f"GET {response.url}\n{response}")
response_json = response.json()