|
10 | 10 | from ...coordinates import Coordinates |
11 | 11 | from ...location.csbs import CSBSLocation |
12 | 12 | from ...utils import httputils |
13 | | -from . import LocationService |
| 13 | +from . import LocationService, LocationGateway |
14 | 14 |
|
15 | 15 | LOGGER = logging.getLogger("services.location.csbs") |
16 | 16 |
|
17 | 17 |
|
18 | | -class CSBSLocationService(LocationService): |
19 | | - """ |
20 | | - Service for retrieving locations from csbs |
21 | | - """ |
22 | 18 |
|
23 | | - async def get_all(self): |
24 | | - # Get the locations. |
25 | | - locations = await get_locations() |
26 | | - return locations |
27 | 19 |
|
28 | | - async def get(self, loc_id): # pylint: disable=arguments-differ |
29 | | - # Get location at the index equal to the provided id. |
30 | | - locations = await self.get_all() |
31 | | - return locations[loc_id] |
32 | | - |
33 | | - |
34 | | -# Base URL for fetching data |
35 | | -BASE_URL = "https://facts.csbs.org/covid-19/covid19_county.csv" |
36 | | - |
37 | | - |
38 | | -@cached(cache=TTLCache(maxsize=1, ttl=1800)) |
39 | | -async def get_locations(): |
40 | | - """ |
41 | | - Retrieves county locations; locations are cached for 1 hour |
42 | | -
|
43 | | - :returns: The locations. |
44 | | - :rtype: dict |
45 | | - """ |
46 | | - data_id = "csbs.locations" |
47 | | - LOGGER.info(f"{data_id} Requesting data...") |
48 | | - # check shared cache |
49 | | - cache_results = await check_cache(data_id) |
50 | | - if cache_results: |
51 | | - LOGGER.info(f"{data_id} using shared cache results") |
52 | | - locations = cache_results |
53 | | - else: |
54 | | - LOGGER.info(f"{data_id} shared cache empty") |
55 | | - async with httputils.CLIENT_SESSION.get(BASE_URL) as response: |
56 | | - text = await response.text() |
57 | | - |
58 | | - LOGGER.debug(f"{data_id} Data received") |
59 | | - |
60 | | - data = list(csv.DictReader(text.splitlines())) |
61 | | - LOGGER.debug(f"{data_id} CSV parsed") |
62 | | - |
63 | | - locations = [] |
64 | | - |
65 | | - for i, item in enumerate(data): |
66 | | - # General info. |
67 | | - state = item["State Name"] |
68 | | - county = item["County Name"] |
69 | | - |
70 | | - # Ensure country is specified. |
71 | | - if county in {"Unassigned", "Unknown"}: |
72 | | - continue |
73 | | - |
74 | | - # Date string without "EDT" at end. |
75 | | - last_update = " ".join(item["Last Update"].split(" ")[0:2]) |
76 | | - |
77 | | - # Append to locations. |
78 | | - locations.append( |
79 | | - CSBSLocation( |
80 | | - # General info. |
81 | | - i, |
82 | | - state, |
83 | | - county, |
84 | | - # Coordinates. |
85 | | - Coordinates(item["Latitude"], item["Longitude"]), |
86 | | - # Last update (parse as ISO). |
87 | | - datetime.strptime(last_update, "%Y-%m-%d %H:%M").isoformat() + "Z", |
88 | | - # Statistics. |
89 | | - int(item["Confirmed"] or 0), |
90 | | - int(item["Death"] or 0), |
| 20 | + |
| 21 | +class CSBSGateway(LocationGateway): |
| 22 | + |
| 23 | + def __init__(self, base_url): |
| 24 | + self.BASE_URL = base_url |
| 25 | + |
| 26 | + @cached(cache=TTLCache(maxsize=1, ttl=1800)) |
| 27 | + async def get_locations(self): |
| 28 | + """ |
| 29 | + Retrieves county locations; locations are cached for 1 hour |
| 30 | +
|
| 31 | + :returns: The locations. |
| 32 | + :rtype: dict |
| 33 | + """ |
| 34 | + data_id = "csbs.locations" |
| 35 | + LOGGER.info(f"{data_id} Requesting data...") |
| 36 | + # check shared cache |
| 37 | + cache_results = await check_cache(data_id) |
| 38 | + if cache_results: |
| 39 | + LOGGER.info(f"{data_id} using shared cache results") |
| 40 | + locations = cache_results |
| 41 | + else: |
| 42 | + LOGGER.info(f"{data_id} shared cache empty") |
| 43 | + async with httputils.CLIENT_SESSION.get(self.BASE_URL) as response: |
| 44 | + text = await response.text() |
| 45 | + |
| 46 | + LOGGER.debug(f"{data_id} Data received") |
| 47 | + |
| 48 | + data = list(csv.DictReader(text.splitlines())) |
| 49 | + LOGGER.debug(f"{data_id} CSV parsed") |
| 50 | + |
| 51 | + locations = [] |
| 52 | + |
| 53 | + for i, item in enumerate(data): |
| 54 | + # General info. |
| 55 | + state = item["State Name"] |
| 56 | + county = item["County Name"] |
| 57 | + |
| 58 | + # Ensure country is specified. |
| 59 | + if county in {"Unassigned", "Unknown"}: |
| 60 | + continue |
| 61 | + |
| 62 | + # Date string without "EDT" at end. |
| 63 | + last_update = " ".join(item["Last Update"].split(" ")[0:2]) |
| 64 | + |
| 65 | + # Append to locations. |
| 66 | + locations.append( |
| 67 | + CSBSLocation( |
| 68 | + # General info. |
| 69 | + i, |
| 70 | + state, |
| 71 | + county, |
| 72 | + # Coordinates. |
| 73 | + Coordinates(item["Latitude"], item["Longitude"]), |
| 74 | + # Last update (parse as ISO). |
| 75 | + datetime.strptime(last_update, "%Y-%m-%d %H:%M").isoformat() + "Z", |
| 76 | + # Statistics. |
| 77 | + int(item["Confirmed"] or 0), |
| 78 | + int(item["Death"] or 0), |
| 79 | + ) |
91 | 80 | ) |
92 | | - ) |
93 | | - LOGGER.info(f"{data_id} Data normalized") |
94 | | - # save the results to distributed cache |
95 | | - # TODO: fix json serialization |
96 | | - try: |
97 | | - await load_cache(data_id, locations) |
98 | | - except TypeError as type_err: |
99 | | - LOGGER.error(type_err) |
100 | | - |
101 | | - # Return the locations. |
102 | | - return locations |
| 81 | + LOGGER.info(f"{data_id} Data normalized") |
| 82 | + # save the results to distributed cache |
| 83 | + # TODO: fix json serialization |
| 84 | + try: |
| 85 | + await load_cache(data_id, locations) |
| 86 | + except TypeError as type_err: |
| 87 | + LOGGER.error(type_err) |
| 88 | + |
| 89 | + # Return the locations. |
| 90 | + return locations |
| 91 | + |
| 92 | + |
| 93 | + |
| 94 | + |
| 95 | + |
0 commit comments