66from asyncache import cached
77from cachetools import TTLCache
88
9+ from ...caches import check_cache , load_cache
910from ...coordinates import Coordinates
1011from ...location .nyt import NYTLocation
1112from ...timeline import Timeline
@@ -66,7 +67,7 @@ def get_grouped_locations_dict(data):
6667 return grouped_locations
6768
6869
69- @cached (cache = TTLCache (maxsize = 1 , ttl = 3600 ))
70+ @cached (cache = TTLCache (maxsize = 1 , ttl = 1800 ))
7071async def get_locations ():
7172 """
7273 Returns a list containing parsed NYT data by US county. The data is cached for 1 hour.
@@ -77,55 +78,64 @@ async def get_locations():
7778 data_id = "nyt.locations"
7879 # Request the data.
7980 LOGGER .info (f"{ data_id } Requesting data..." )
80- async with httputils .CLIENT_SESSION .get (BASE_URL ) as response :
81- text = await response .text ()
82-
83- LOGGER .debug (f"{ data_id } Data received" )
84-
85- # Parse the CSV.
86- data = list (csv .DictReader (text .splitlines ()))
87- LOGGER .debug (f"{ data_id } CSV parsed" )
88-
89- # Group together locations (NYT data ordered by dates not location).
90- grouped_locations = get_grouped_locations_dict (data )
91-
92- # The normalized locations.
93- locations = []
94-
95- for idx , (county_state , histories ) in enumerate (grouped_locations .items ()):
96- # Make location history for confirmed and deaths from dates.
97- # List is tuples of (date, amount) in order of increasing dates.
98- confirmed_list = histories ["confirmed" ]
99- confirmed_history = {date : int (amount or 0 ) for date , amount in confirmed_list }
100-
101- deaths_list = histories ["deaths" ]
102- deaths_history = {date : int (amount or 0 ) for date , amount in deaths_list }
103-
104- # Normalize the item and append to locations.
105- locations .append (
106- NYTLocation (
107- id = idx ,
108- state = county_state [1 ],
109- county = county_state [0 ],
110- coordinates = Coordinates (None , None ), # NYT does not provide coordinates
111- last_updated = datetime .utcnow ().isoformat () + "Z" , # since last request
112- timelines = {
113- "confirmed" : Timeline (
114- {
115- datetime .strptime (date , "%Y-%m-%d" ).isoformat () + "Z" : amount
116- for date , amount in confirmed_history .items ()
117- }
118- ),
119- "deaths" : Timeline (
120- {
121- datetime .strptime (date , "%Y-%m-%d" ).isoformat () + "Z" : amount
122- for date , amount in deaths_history .items ()
123- }
124- ),
125- "recovered" : Timeline ({}),
126- },
81+ # check shared cache
82+ cache_results = await check_cache (data_id )
83+ if cache_results :
84+ LOGGER .info (f"{ data_id } using shared cache results" )
85+ locations = cache_results
86+ else :
87+ LOGGER .info (f"{ data_id } shared cache empty" )
88+ async with httputils .CLIENT_SESSION .get (BASE_URL ) as response :
89+ text = await response .text ()
90+
91+ LOGGER .debug (f"{ data_id } Data received" )
92+
93+ # Parse the CSV.
94+ data = list (csv .DictReader (text .splitlines ()))
95+ LOGGER .debug (f"{ data_id } CSV parsed" )
96+
97+ # Group together locations (NYT data ordered by dates not location).
98+ grouped_locations = get_grouped_locations_dict (data )
99+
100+ # The normalized locations.
101+ locations = []
102+
103+ for idx , (county_state , histories ) in enumerate (grouped_locations .items ()):
104+ # Make location history for confirmed and deaths from dates.
105+ # List is tuples of (date, amount) in order of increasing dates.
106+ confirmed_list = histories ["confirmed" ]
107+ confirmed_history = {date : int (amount or 0 ) for date , amount in confirmed_list }
108+
109+ deaths_list = histories ["deaths" ]
110+ deaths_history = {date : int (amount or 0 ) for date , amount in deaths_list }
111+
112+ # Normalize the item and append to locations.
113+ locations .append (
114+ NYTLocation (
115+ id = idx ,
116+ state = county_state [1 ],
117+ county = county_state [0 ],
118+ coordinates = Coordinates (None , None ), # NYT does not provide coordinates
119+ last_updated = datetime .utcnow ().isoformat () + "Z" , # since last request
120+ timelines = {
121+ "confirmed" : Timeline (
122+ {
123+ datetime .strptime (date , "%Y-%m-%d" ).isoformat () + "Z" : amount
124+ for date , amount in confirmed_history .items ()
125+ }
126+ ),
127+ "deaths" : Timeline (
128+ {
129+ datetime .strptime (date , "%Y-%m-%d" ).isoformat () + "Z" : amount
130+ for date , amount in deaths_history .items ()
131+ }
132+ ),
133+ "recovered" : Timeline ({}),
134+ },
135+ )
127136 )
128- )
129- LOGGER .info (f"{ data_id } Data normalized" )
137+ LOGGER .info (f"{ data_id } Data normalized" )
138+ # save the results to distributed cache
139+ await load_cache (data_id , locations )
130140
131141 return locations
0 commit comments