Skip to content

Commit 0f3009e

Browse files
authored
Merge branch 'master' into master
2 parents 761f62d + 464453d commit 0f3009e

File tree

18 files changed

+215
-119
lines changed

18 files changed

+215
-119
lines changed

.deepsource.toml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
version = 1
2+
3+
test_patterns = ["tests/**"]
4+
5+
[[analyzers]]
6+
name = "python"
7+
enabled = true
8+
9+
[analyzers.meta]
10+
runtime_version = "3.x.x"

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@ htmlcov/
5151
nosetests.xml
5252
coverage.xml
5353
*,cover
54+
locustfile.py
5455

5556
# Translations
5657
*.mo

app/data/__init__.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,11 @@
44
from ..services.location.nyt import NYTLocationService
55

66
# Mapping of services to data-sources.
7-
DATA_SOURCES = {"jhu": JhuLocationService(), "csbs": CSBSLocationService(), "nyt": NYTLocationService()}
7+
DATA_SOURCES = {
8+
"jhu": JhuLocationService(),
9+
"csbs": CSBSLocationService(),
10+
"nyt": NYTLocationService(),
11+
}
812

913

1014
def data_source(source):

app/io.py

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,11 @@
1010

1111

1212
def save(
13-
name: str, content: Union[str, Dict, List], write_mode: str = "w", indent: int = 2, **json_dumps_kwargs
13+
name: str,
14+
content: Union[str, Dict, List],
15+
write_mode: str = "w",
16+
indent: int = 2,
17+
**json_dumps_kwargs,
1418
) -> pathlib.Path:
1519
"""Save content to a file. If content is a dictionary, use json.dumps()."""
1620
path = DATA / name
@@ -35,7 +39,12 @@ class AIO:
3539

3640
@classmethod
3741
async def save(
38-
cls, name: str, content: Union[str, Dict, List], write_mode: str = "w", indent: int = 2, **json_dumps_kwargs
42+
cls,
43+
name: str,
44+
content: Union[str, Dict, List],
45+
write_mode: str = "w",
46+
indent: int = 2,
47+
**json_dumps_kwargs,
3948
):
4049
"""Save content to a file. If content is a dictionary, use json.dumps()."""
4150
path = DATA / name

app/location/__init__.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ class Location: # pylint: disable=too-many-instance-attributes
1111
"""
1212

1313
def __init__(
14-
self, id, country, province, coordinates, last_updated, confirmed, deaths, recovered
14+
self, id, country, province, coordinates, last_updated, confirmed, deaths, recovered,
1515
): # pylint: disable=too-many-arguments
1616
# General info.
1717
self.id = id
@@ -66,7 +66,11 @@ def serialize(self):
6666
# Last updated.
6767
"last_updated": self.last_updated,
6868
# Latest data (statistics).
69-
"latest": {"confirmed": self.confirmed, "deaths": self.deaths, "recovered": self.recovered},
69+
"latest": {
70+
"confirmed": self.confirmed,
71+
"deaths": self.deaths,
72+
"recovered": self.recovered,
73+
},
7074
}
7175

7276

app/main.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,11 @@
5959

6060
# Enable CORS.
6161
APP.add_middleware(
62-
CORSMiddleware, allow_credentials=True, allow_origins=["*"], allow_methods=["*"], allow_headers=["*"],
62+
CORSMiddleware,
63+
allow_credentials=True,
64+
allow_origins=["*"],
65+
allow_methods=["*"],
66+
allow_headers=["*"],
6367
)
6468
APP.add_middleware(GZipMiddleware, minimum_size=1000)
6569

app/routers/v1.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,11 @@ async def all_categories():
1919
"deaths": deaths,
2020
"recovered": recovered,
2121
# Latest.
22-
"latest": {"confirmed": confirmed["latest"], "deaths": deaths["latest"], "recovered": recovered["latest"],},
22+
"latest": {
23+
"confirmed": confirmed["latest"],
24+
"deaths": deaths["latest"],
25+
"recovered": recovered["latest"],
26+
},
2327
}
2428

2529

app/routers/v2.py

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -65,11 +65,17 @@ async def get_locations(
6565

6666
# Do filtering.
6767
try:
68-
locations = [location for location in locations if str(getattr(location, key)).lower() == str(value)]
68+
locations = [
69+
location
70+
for location in locations
71+
if str(getattr(location, key)).lower() == str(value)
72+
]
6973
except AttributeError:
7074
pass
7175
if not locations:
72-
raise HTTPException(404, detail=f"Source `{source}` does not have the desired location data.")
76+
raise HTTPException(
77+
404, detail=f"Source `{source}` does not have the desired location data.",
78+
)
7379

7480
# Return final serialized data.
7581
return {
@@ -84,7 +90,9 @@ async def get_locations(
8490

8591
# pylint: disable=invalid-name
8692
@V2.get("/locations/{id}", response_model=LocationResponse)
87-
async def get_location_by_id(request: Request, id: int, source: Sources = "jhu", timelines: bool = True):
93+
async def get_location_by_id(
94+
request: Request, id: int, source: Sources = "jhu", timelines: bool = True
95+
):
8896
"""
8997
Getting specific location by id.
9098
"""

app/services/location/csbs.py

Lines changed: 54 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
from asyncache import cached
77
from cachetools import TTLCache
88

9+
from ...caches import check_cache, load_cache
910
from ...coordinates import Coordinates
1011
from ...location.csbs import CSBSLocation
1112
from ...utils import httputils
@@ -34,7 +35,7 @@ async def get(self, loc_id): # pylint: disable=arguments-differ
3435
BASE_URL = "https://facts.csbs.org/covid-19/covid19_county.csv"
3536

3637

37-
@cached(cache=TTLCache(maxsize=1, ttl=3600))
38+
@cached(cache=TTLCache(maxsize=1, ttl=1800))
3839
async def get_locations():
3940
"""
4041
Retrieves county locations; locations are cached for 1 hour
@@ -44,48 +45,58 @@ async def get_locations():
4445
"""
4546
data_id = "csbs.locations"
4647
LOGGER.info(f"{data_id} Requesting data...")
47-
async with httputils.CLIENT_SESSION.get(BASE_URL) as response:
48-
text = await response.text()
49-
50-
LOGGER.debug(f"{data_id} Data received")
51-
52-
data = list(csv.DictReader(text.splitlines()))
53-
LOGGER.debug(f"{data_id} CSV parsed")
54-
55-
locations = []
56-
57-
for i, item in enumerate(data):
58-
# General info.
59-
state = item["State Name"]
60-
county = item["County Name"]
61-
62-
# Ensure country is specified.
63-
if county in {"Unassigned", "Unknown"}:
64-
continue
65-
66-
# Coordinates.
67-
coordinates = Coordinates(item["Latitude"], item["Longitude"]) # pylint: disable=unused-variable
68-
69-
# Date string without "EDT" at end.
70-
last_update = " ".join(item["Last Update"].split(" ")[0:2])
71-
72-
# Append to locations.
73-
locations.append(
74-
CSBSLocation(
75-
# General info.
76-
i,
77-
state,
78-
county,
79-
# Coordinates.
80-
Coordinates(item["Latitude"], item["Longitude"]),
81-
# Last update (parse as ISO).
82-
datetime.strptime(last_update, "%Y-%m-%d %H:%M").isoformat() + "Z",
83-
# Statistics.
84-
int(item["Confirmed"] or 0),
85-
int(item["Death"] or 0),
48+
# check shared cache
49+
cache_results = await check_cache(data_id)
50+
if cache_results:
51+
LOGGER.info(f"{data_id} using shared cache results")
52+
locations = cache_results
53+
else:
54+
LOGGER.info(f"{data_id} shared cache empty")
55+
async with httputils.CLIENT_SESSION.get(BASE_URL) as response:
56+
text = await response.text()
57+
58+
LOGGER.debug(f"{data_id} Data received")
59+
60+
data = list(csv.DictReader(text.splitlines()))
61+
LOGGER.debug(f"{data_id} CSV parsed")
62+
63+
locations = []
64+
65+
for i, item in enumerate(data):
66+
# General info.
67+
state = item["State Name"]
68+
county = item["County Name"]
69+
70+
# Ensure country is specified.
71+
if county in {"Unassigned", "Unknown"}:
72+
continue
73+
74+
# Date string without "EDT" at end.
75+
last_update = " ".join(item["Last Update"].split(" ")[0:2])
76+
77+
# Append to locations.
78+
locations.append(
79+
CSBSLocation(
80+
# General info.
81+
i,
82+
state,
83+
county,
84+
# Coordinates.
85+
Coordinates(item["Latitude"], item["Longitude"]),
86+
# Last update (parse as ISO).
87+
datetime.strptime(last_update, "%Y-%m-%d %H:%M").isoformat() + "Z",
88+
# Statistics.
89+
int(item["Confirmed"] or 0),
90+
int(item["Death"] or 0),
91+
)
8692
)
87-
)
88-
LOGGER.info(f"{data_id} Data normalized")
89-
93+
LOGGER.info(f"{data_id} Data normalized")
94+
# save the results to distributed cache
95+
# TODO: fix json serialization
96+
try:
97+
await load_cache(data_id, locations)
98+
except TypeError as type_err:
99+
LOGGER.error(type_err)
100+
90101
# Return the locations.
91102
return locations

app/services/location/jhu.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -41,12 +41,10 @@ async def get(self, loc_id): # pylint: disable=arguments-differ
4141

4242

4343
# Base URL for fetching category.
44-
BASE_URL = (
45-
"https://raw.githubusercontent.com/CSSEGISandData/2019-nCoV/master/csse_covid_19_data/csse_covid_19_time_series/"
46-
)
44+
BASE_URL = "https://raw.githubusercontent.com/CSSEGISandData/2019-nCoV/master/csse_covid_19_data/csse_covid_19_time_series/"
4745

4846

49-
@cached(cache=TTLCache(maxsize=128, ttl=1800))
47+
@cached(cache=TTLCache(maxsize=4, ttl=1800))
5048
async def get_category(category):
5149
"""
5250
Retrieves the data for the provided category. The data is cached for 30 minutes locally, 1 hour via shared Redis.
@@ -129,7 +127,7 @@ async def get_category(category):
129127
return results
130128

131129

132-
@cached(cache=TTLCache(maxsize=1024, ttl=1800))
130+
@cached(cache=TTLCache(maxsize=1, ttl=1800))
133131
async def get_locations():
134132
"""
135133
Retrieves the locations from the categories. The locations are cached for 1 hour.

0 commit comments

Comments
 (0)