@@ -142,22 +142,31 @@ async def get_locations():
142142 # Get all of the data categories locations.
143143 confirmed = await get_category ("confirmed" )
144144 deaths = await get_category ("deaths" )
145- # recovered = await get_category("recovered")
145+ recovered = await get_category ("recovered" )
146146
147147 locations_confirmed = confirmed ["locations" ]
148148 locations_deaths = deaths ["locations" ]
149- # locations_recovered = recovered["locations"]
149+ locations_recovered = recovered ["locations" ]
150150
151151 # Final locations to return.
152152 locations = []
153-
153+ # ***************************************************************************
154+ # TODO: This iteration approach assumes the indexes remain the same
155+ # and opens us to a CRITICAL ERROR. The removal of a column in the data source
156+ # would break the API or SHIFT all the data confirmed, deaths, recovery producting
157+ # incorrect data to consumers.
158+ # ***************************************************************************
154159 # Go through locations.
155160 for index , location in enumerate (locations_confirmed ):
156161 # Get the timelines.
162+
163+ # TEMP: Fix for merging recovery data. See TODO above for more details.
164+ key = (location ['country' ], location ['province' ])
165+
157166 timelines = {
158- "confirmed" : locations_confirmed [ index ] ["history" ],
159- "deaths" : locations_deaths [ index ][ "history" ] ,
160- # ' recovered' : locations_recovered[ index]['history'] ,
167+ "confirmed" : location ["history" ],
168+ "deaths" : parse_history ( key , locations_deaths , index ) ,
169+ " recovered" : parse_history ( key , locations_recovered , index ) ,
161170 }
162171
163172 # Grab coordinates.
@@ -188,11 +197,34 @@ async def get_locations():
188197 for date , amount in timelines ["deaths" ].items ()
189198 }
190199 ),
191- "recovered" : Timeline ({}),
200+ "recovered" : Timeline (
201+ {
202+ datetime .strptime (date , "%m/%d/%y" ).isoformat () + "Z" : amount
203+ for date , amount in timelines ["recovered" ].items ()
204+ }
205+ ),
192206 },
193207 )
194208 )
195209 LOGGER .info (f"{ data_id } Data normalized" )
196210
197211 # Finally, return the locations.
198212 return locations
213+
214+
215+ def parse_history (key : tuple , locations : list , index : int ):
216+ """
217+ Helper for validating and extracting history content from
218+ locations data based on index. Validates with the current country/province
219+ key to make sure no index/column issue.
220+
221+ TEMP: solution because implement a more efficient and better approach in the refactor.
222+ """
223+ location_history = {}
224+ try :
225+ if key == (locations [index ]["country" ], locations [index ]["province" ]):
226+ location_history = locations [index ]["history" ]
227+ except IndexError or KeyError as e :
228+ LOGGER .warn (f"iteration data merge error: { index } { key } " )
229+
230+ return location_history
0 commit comments