99from cachetools import TTLCache
1010
1111from ...caches import check_cache , load_cache
12- from ...coordinates import Coordinates
1312from ...location import TimelinedLocation
14- from ...models import Timeline
13+ from ...models import Coordinates , Timeline
1514from ...utils import countries
1615from ...utils import date as date_util
1716from ...utils import httputils
@@ -140,22 +139,31 @@ async def get_locations():
140139 # Get all of the data categories locations.
141140 confirmed = await get_category ("confirmed" )
142141 deaths = await get_category ("deaths" )
143- # recovered = await get_category("recovered")
142+ recovered = await get_category ("recovered" )
144143
145144 locations_confirmed = confirmed ["locations" ]
146145 locations_deaths = deaths ["locations" ]
147- # locations_recovered = recovered["locations"]
146+ locations_recovered = recovered ["locations" ]
148147
149148 # Final locations to return.
150149 locations = []
151-
150+ # ***************************************************************************
151+ # TODO: This iteration approach assumes the indexes remain the same
152+ # and opens us to a CRITICAL ERROR. The removal of a column in the data source
153+ # would break the API or SHIFT all the data confirmed, deaths, recovery producting
154+ # incorrect data to consumers.
155+ # ***************************************************************************
152156 # Go through locations.
153157 for index , location in enumerate (locations_confirmed ):
154158 # Get the timelines.
159+
160+ # TEMP: Fix for merging recovery data. See TODO above for more details.
161+ key = (location ["country" ], location ["province" ])
162+
155163 timelines = {
156- "confirmed" : locations_confirmed [ index ] ["history" ],
157- "deaths" : locations_deaths [ index ][ "history" ] ,
158- # ' recovered' : locations_recovered[ index]['history'] ,
164+ "confirmed" : location ["history" ],
165+ "deaths" : parse_history ( key , locations_deaths , index ) ,
166+ " recovered" : parse_history ( key , locations_recovered , index ) ,
159167 }
160168
161169 # Grab coordinates.
@@ -169,7 +177,7 @@ async def get_locations():
169177 location ["country" ],
170178 location ["province" ],
171179 # Coordinates.
172- Coordinates (coordinates ["lat" ], coordinates ["long" ]),
180+ Coordinates (latitude = coordinates ["lat" ], longitude = coordinates ["long" ]),
173181 # Last update.
174182 datetime .utcnow ().isoformat () + "Z" ,
175183 # Timelines (parse dates as ISO).
@@ -186,11 +194,34 @@ async def get_locations():
186194 for date , amount in timelines ["deaths" ].items ()
187195 }
188196 ),
189- "recovered" : Timeline (),
197+ "recovered" : Timeline (
198+ {
199+ datetime .strptime (date , "%m/%d/%y" ).isoformat () + "Z" : amount
200+ for date , amount in timelines ["recovered" ].items ()
201+ }
202+ ),
190203 },
191204 )
192205 )
193206 LOGGER .info (f"{ data_id } Data normalized" )
194207
195208 # Finally, return the locations.
196209 return locations
210+
211+
212+ def parse_history (key : tuple , locations : list , index : int ):
213+ """
214+ Helper for validating and extracting history content from
215+ locations data based on index. Validates with the current country/province
216+ key to make sure no index/column issue.
217+
218+ TEMP: solution because implement a more efficient and better approach in the refactor.
219+ """
220+ location_history = {}
221+ try :
222+ if key == (locations [index ]["country" ], locations [index ]["province" ]):
223+ location_history = locations [index ]["history" ]
224+ except (IndexError , KeyError ):
225+ LOGGER .debug (f"iteration data merge error: { index } { key } " )
226+
227+ return location_history
0 commit comments