@@ -140,22 +140,31 @@ async def get_locations():
140140 # Get all of the data categories locations.
141141 confirmed = await get_category ("confirmed" )
142142 deaths = await get_category ("deaths" )
143- # recovered = await get_category("recovered")
143+ recovered = await get_category ("recovered" )
144144
145145 locations_confirmed = confirmed ["locations" ]
146146 locations_deaths = deaths ["locations" ]
147- # locations_recovered = recovered["locations"]
147+ locations_recovered = recovered ["locations" ]
148148
149149 # Final locations to return.
150150 locations = []
151-
151+ # ***************************************************************************
152+ # TODO: This iteration approach assumes the indexes remain the same
153+ # and opens us to a CRITICAL ERROR. The removal of a column in the data source
154+ # would break the API or SHIFT all the data confirmed, deaths, recovery producting
155+ # incorrect data to consumers.
156+ # ***************************************************************************
152157 # Go through locations.
153158 for index , location in enumerate (locations_confirmed ):
154159 # Get the timelines.
160+
161+ # TEMP: Fix for merging recovery data. See TODO above for more details.
162+ key = (location ["country" ], location ["province" ])
163+
155164 timelines = {
156- "confirmed" : locations_confirmed [ index ] ["history" ],
157- "deaths" : locations_deaths [ index ][ "history" ] ,
158- # ' recovered' : locations_recovered[ index]['history'] ,
165+ "confirmed" : location ["history" ],
166+ "deaths" : parse_history ( key , locations_deaths , index ) ,
167+ " recovered" : parse_history ( key , locations_recovered , index ) ,
159168 }
160169
161170 # Grab coordinates.
@@ -186,11 +195,34 @@ async def get_locations():
186195 for date , amount in timelines ["deaths" ].items ()
187196 }
188197 ),
189- "recovered" : Timeline ({}),
198+ "recovered" : Timeline (
199+ {
200+ datetime .strptime (date , "%m/%d/%y" ).isoformat () + "Z" : amount
201+ for date , amount in timelines ["recovered" ].items ()
202+ }
203+ ),
190204 },
191205 )
192206 )
193207 LOGGER .info (f"{ data_id } Data normalized" )
194208
195209 # Finally, return the locations.
196210 return locations
211+
212+
213+ def parse_history (key : tuple , locations : list , index : int ):
214+ """
215+ Helper for validating and extracting history content from
216+ locations data based on index. Validates with the current country/province
217+ key to make sure no index/column issue.
218+
219+ TEMP: solution because implement a more efficient and better approach in the refactor.
220+ """
221+ location_history = {}
222+ try :
223+ if key == (locations [index ]["country" ], locations [index ]["province" ]):
224+ location_history = locations [index ]["history" ]
225+ except (IndexError , KeyError ):
226+ LOGGER .debug (f"iteration data merge error: { index } { key } " )
227+
228+ return location_history
0 commit comments