Skip to content

Commit 1e8c717

Browse files
committed
apply bridge pattern
1 parent 1c7e4ae commit 1e8c717

File tree

8 files changed

+325
-344
lines changed

8 files changed

+325
-344
lines changed

app/data/__init__.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,21 @@
11
"""app.data"""
2-
from ..services.location.csbs import CSBSLocationService
3-
from ..services.location.jhu import JhuLocationService
4-
from ..services.location.nyt import NYTLocationService
2+
from ..services.location.csbs import BasicLocationService
3+
from ..services.location.csbslocations import CSBSLocations
4+
from ..services.location.jhulocations import JHULocations
5+
from ..services.location.nytlocations import NYTLocations
56

67
# Mapping of services to data-sources.
78
DATA_SOURCES = {
8-
"jhu": JhuLocationService(),
9-
"csbs": CSBSLocationService(),
10-
"nyt": NYTLocationService(),
9+
"jhu": BasicLocationService(JHULocations()),
10+
"csbs": BasicLocationService(CSBSLocations()),
11+
"nyt": BasicLocationService(NYTLocations()),
1112
}
1213

1314

1415
def data_source(source):
1516
"""
1617
Retrieves the provided data-source service.
17-
1818
:returns: The service.
1919
:rtype: LocationService
2020
"""
21-
return DATA_SOURCES.get(source.lower())
21+
return DATA_SOURCES.get(source.lower())

app/services/location/__init__.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,13 @@
11
"""app.services.location"""
22
from abc import ABC, abstractmethod
3-
3+
from . import DataSourcesInterface
44

55
class LocationService(ABC):
66
"""
77
Service for retrieving locations.
88
"""
9+
def __init__(self, dataSource: DataSourcesInterface):
10+
self.dataSource = dataSource
911

1012
@abstractmethod
1113
async def get_all(self):

app/services/location/csbs.py

Lines changed: 2 additions & 71 deletions
Original file line numberDiff line numberDiff line change
@@ -15,14 +15,14 @@
1515
LOGGER = logging.getLogger("services.location.csbs")
1616

1717

18-
class CSBSLocationService(LocationService):
18+
class BasicLocationService(LocationService):
1919
"""
2020
Service for retrieving locations from csbs
2121
"""
2222

2323
async def get_all(self):
2424
# Get the locations.
25-
locations = await get_locations()
25+
locations = await self.dataSource.get_locations()
2626
return locations
2727

2828
async def get(self, loc_id): # pylint: disable=arguments-differ
@@ -31,72 +31,3 @@ async def get(self, loc_id): # pylint: disable=arguments-differ
3131
return locations[loc_id]
3232

3333

34-
# Base URL for fetching data
35-
BASE_URL = "https://facts.csbs.org/covid-19/covid19_county.csv"
36-
37-
38-
@cached(cache=TTLCache(maxsize=1, ttl=1800))
39-
async def get_locations():
40-
"""
41-
Retrieves county locations; locations are cached for 1 hour
42-
43-
:returns: The locations.
44-
:rtype: dict
45-
"""
46-
data_id = "csbs.locations"
47-
LOGGER.info(f"{data_id} Requesting data...")
48-
# check shared cache
49-
cache_results = await check_cache(data_id)
50-
if cache_results:
51-
LOGGER.info(f"{data_id} using shared cache results")
52-
locations = cache_results
53-
else:
54-
LOGGER.info(f"{data_id} shared cache empty")
55-
async with httputils.CLIENT_SESSION.get(BASE_URL) as response:
56-
text = await response.text()
57-
58-
LOGGER.debug(f"{data_id} Data received")
59-
60-
data = list(csv.DictReader(text.splitlines()))
61-
LOGGER.debug(f"{data_id} CSV parsed")
62-
63-
locations = []
64-
65-
for i, item in enumerate(data):
66-
# General info.
67-
state = item["State Name"]
68-
county = item["County Name"]
69-
70-
# Ensure country is specified.
71-
if county in {"Unassigned", "Unknown"}:
72-
continue
73-
74-
# Date string without "EDT" at end.
75-
last_update = " ".join(item["Last Update"].split(" ")[0:2])
76-
77-
# Append to locations.
78-
locations.append(
79-
CSBSLocation(
80-
# General info.
81-
i,
82-
state,
83-
county,
84-
# Coordinates.
85-
Coordinates(item["Latitude"], item["Longitude"]),
86-
# Last update (parse as ISO).
87-
datetime.strptime(last_update, "%Y-%m-%d %H:%M").isoformat() + "Z",
88-
# Statistics.
89-
int(item["Confirmed"] or 0),
90-
int(item["Death"] or 0),
91-
)
92-
)
93-
LOGGER.info(f"{data_id} Data normalized")
94-
# save the results to distributed cache
95-
# TODO: fix json serialization
96-
try:
97-
await load_cache(data_id, locations)
98-
except TypeError as type_err:
99-
LOGGER.error(type_err)
100-
101-
# Return the locations.
102-
return locations
Lines changed: 83 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,83 @@
1+
import csv
2+
import logging
3+
from datetime import datetime
4+
5+
from asyncache import cached
6+
from cachetools import TTLCache
7+
8+
from ...caches import check_cache, load_cache
9+
from ...coordinates import Coordinates
10+
from ...location.csbs import CSBSLocation
11+
from ...utils import httputils
12+
from . import DataSourcesInterface
13+
14+
LOGGER = logging.getLogger("services.location.csbs")
15+
# Base URL for fetching data
16+
BASE_URL = "https://facts.csbs.org/covid-19/covid19_county.csv"
17+
18+
class CSBSLocations(DataSourcesInterface):
19+
20+
@cached(cache=TTLCache(maxsize=1, ttl=1800))
21+
async def get_locations():
22+
"""
23+
Retrieves county locations; locations are cached for 1 hour
24+
:returns: The locations.
25+
:rtype: dict
26+
"""
27+
data_id = "csbs.locations"
28+
LOGGER.info(f"{data_id} Requesting data...")
29+
# check shared cache
30+
cache_results = await check_cache(data_id)
31+
if cache_results:
32+
LOGGER.info(f"{data_id} using shared cache results")
33+
locations = cache_results
34+
else:
35+
LOGGER.info(f"{data_id} shared cache empty")
36+
async with httputils.CLIENT_SESSION.get(BASE_URL) as response:
37+
text = await response.text()
38+
39+
LOGGER.debug(f"{data_id} Data received")
40+
41+
data = list(csv.DictReader(text.splitlines()))
42+
LOGGER.debug(f"{data_id} CSV parsed")
43+
44+
locations = []
45+
46+
for i, item in enumerate(data):
47+
# General info.
48+
state = item["State Name"]
49+
county = item["County Name"]
50+
51+
# Ensure country is specified.
52+
if county in {"Unassigned", "Unknown"}:
53+
continue
54+
55+
# Date string without "EDT" at end.
56+
last_update = " ".join(item["Last Update"].split(" ")[0:2])
57+
58+
# Append to locations.
59+
locations.append(
60+
CSBSLocation(
61+
# General info.
62+
i,
63+
state,
64+
county,
65+
# Coordinates.
66+
Coordinates(item["Latitude"], item["Longitude"]),
67+
# Last update (parse as ISO).
68+
datetime.strptime(last_update, "%Y-%m-%d %H:%M").isoformat() + "Z",
69+
# Statistics.
70+
int(item["Confirmed"] or 0),
71+
int(item["Death"] or 0),
72+
)
73+
)
74+
LOGGER.info(f"{data_id} Data normalized")
75+
# save the results to distributed cache
76+
# TODO: fix json serialization
77+
try:
78+
await load_cache(data_id, locations)
79+
except TypeError as type_err:
80+
LOGGER.error(type_err)
81+
82+
# Return the locations.
83+
return locations

0 commit comments

Comments
 (0)