@@ -19,84 +19,74 @@ class CSBSLocationService(LocationService):
1919 """
2020 Service for retrieving locations from csbs
2121 """
22-
23- async def get_all (self ):
24- # Get the locations.
25- locations = await get_locations ()
26- return locations
27-
28- async def get (self , loc_id ): # pylint: disable=arguments-differ
29- # Get location at the index equal to the provided id.
30- locations = await self .get_all ()
31- return locations [loc_id ]
32-
33-
22+ def __init__ (self ):
23+ super ().__init__ ()
3424# Base URL for fetching data
35- BASE_URL = "https://facts.csbs.org/covid-19/covid19_county.csv"
36-
37-
38- @cached (cache = TTLCache (maxsize = 1 , ttl = 1800 ))
39- async def get_locations ():
40- """
41- Retrieves county locations; locations are cached for 1 hour
42-
43- :returns: The locations.
44- :rtype: dict
45- """
46- data_id = "csbs.locations"
47- LOGGER .info (f"{ data_id } Requesting data..." )
48- # check shared cache
49- cache_results = await check_cache (data_id )
50- if cache_results :
51- LOGGER .info (f"{ data_id } using shared cache results" )
52- locations = cache_results
53- else :
54- LOGGER .info (f"{ data_id } shared cache empty" )
55- async with httputils .CLIENT_SESSION .get (BASE_URL ) as response :
56- text = await response .text ()
57-
58- LOGGER .debug (f"{ data_id } Data received" )
59-
60- data = list (csv .DictReader (text .splitlines ()))
61- LOGGER .debug (f"{ data_id } CSV parsed" )
62-
63- locations = []
64-
65- for i , item in enumerate (data ):
66- # General info.
67- state = item ["State Name" ]
68- county = item ["County Name" ]
69-
70- # Ensure country is specified.
71- if county in {"Unassigned" , "Unknown" }:
72- continue
73-
74- # Date string without "EDT" at end.
75- last_update = " " .join (item ["Last Update" ].split (" " )[0 :2 ])
76-
77- # Append to locations.
78- locations .append (
79- CSBSLocation (
80- # General info.
81- i ,
82- state ,
83- county ,
84- # Coordinates.
85- Coordinates (item ["Latitude" ], item ["Longitude" ]),
86- # Last update (parse as ISO).
87- datetime .strptime (last_update , "%Y-%m-%d %H:%M" ).isoformat () + "Z" ,
88- # Statistics.
89- int (item ["Confirmed" ] or 0 ),
90- int (item ["Death" ] or 0 ),
25+ BASE_URL = "https://facts.csbs.org/covid-19/covid19_county.csv"
26+
27+
28+ @cached (cache = TTLCache (maxsize = 1 , ttl = 1800 ))
29+ async def get_locations ():
30+ """
31+ Retrieves county locations; locations are cached for 1 hour
32+
33+ :returns: The locations.
34+ :rtype: dict
35+ """
36+ data_id = "csbs.locations"
37+ LOGGER .info (f"{ data_id } Requesting data..." )
38+ # check shared cache
39+ cache_results = await check_cache (data_id )
40+ if cache_results :
41+ LOGGER .info (f"{ data_id } using shared cache results" )
42+ locations = cache_results
43+ else :
44+ LOGGER .info (f"{ data_id } shared cache empty" )
45+ async with httputils .CLIENT_SESSION .get (BASE_URL ) as response :
46+ text = await response .text ()
47+
48+ LOGGER .debug (f"{ data_id } Data received" )
49+
50+ data = list (csv .DictReader (text .splitlines ()))
51+ LOGGER .debug (f"{ data_id } CSV parsed" )
52+
53+ locations = []
54+
55+ for i , item in enumerate (data ):
56+ # General info.
57+ state = item ["State Name" ]
58+ county = item ["County Name" ]
59+
60+ # Ensure country is specified.
61+ if county in {"Unassigned" , "Unknown" }:
62+ continue
63+
64+ # Date string without "EDT" at end.
65+ last_update = " " .join (item ["Last Update" ].split (" " )[0 :2 ])
66+
67+ # Append to locations.
68+ locations .append (
69+ CSBSLocation (
70+ # General info.
71+ i ,
72+ state ,
73+ county ,
74+ # Coordinates.
75+ Coordinates (item ["Latitude" ], item ["Longitude" ]),
76+ # Last update (parse as ISO).
77+ datetime .strptime (last_update , "%Y-%m-%d %H:%M" ).isoformat () + "Z" ,
78+ # Statistics.
79+ int (item ["Confirmed" ] or 0 ),
80+ int (item ["Death" ] or 0 ),
81+ )
9182 )
92- )
93- LOGGER .info (f"{ data_id } Data normalized" )
94- # save the results to distributed cache
95- # TODO: fix json serialization
96- try :
97- await load_cache (data_id , locations )
98- except TypeError as type_err :
99- LOGGER .error (type_err )
100-
101- # Return the locations.
102- return locations
83+ LOGGER .info (f"{ data_id } Data normalized" )
84+ # save the results to distributed cache
85+ # TODO: fix json serialization
86+ try :
87+ await load_cache (data_id , locations )
88+ except TypeError as type_err :
89+ LOGGER .error (type_err )
90+
91+ # Return the locations.
92+ return locations
0 commit comments