@@ -36,6 +36,89 @@ async def get(self, loc_id): # pylint: disable=arguments-differ
3636 locations = await self .get_all ()
3737 return locations [loc_id ]
3838
39+ @cached (cache = TTLCache (maxsize = 1 , ttl = 1800 ))
40+ async def get_locations ():
41+ """
42+ Retrieves the locations from the categories. The locations are cached for 1 hour.
43+
44+ :returns: The locations.
45+ :rtype: List[Location]
46+ """
47+ data_id = "jhu.locations"
48+ LOGGER .info (f"pid:{ PID } : { data_id } Requesting data..." )
49+ # Get all of the data categories locations.
50+ confirmed = await get_category ("confirmed" )
51+ deaths = await get_category ("deaths" )
52+ recovered = await get_category ("recovered" )
53+
54+ locations_confirmed = confirmed ["locations" ]
55+ locations_deaths = deaths ["locations" ]
56+ locations_recovered = recovered ["locations" ]
57+
58+ # Final locations to return.
59+ locations = []
60+ # ***************************************************************************
61+ # TODO: This iteration approach assumes the indexes remain the same
62+ # and opens us to a CRITICAL ERROR. The removal of a column in the data source
63+ # would break the API or SHIFT all the data confirmed, deaths, recovery producting
64+ # incorrect data to consumers.
65+ # ***************************************************************************
66+ # Go through locations.
67+ for index , location in enumerate (locations_confirmed ):
68+ # Get the timelines.
69+
70+ # TEMP: Fix for merging recovery data. See TODO above for more details.
71+ key = (location ["country" ], location ["province" ])
72+
73+ timelines = {
74+ "confirmed" : location ["history" ],
75+ "deaths" : parse_history (key , locations_deaths , index ),
76+ "recovered" : parse_history (key , locations_recovered , index ),
77+ }
78+
79+ # Grab coordinates.
80+ coordinates = location ["coordinates" ]
81+
82+ # Create location (supporting timelines) and append.
83+ locations .append (
84+ TimelinedLocation (
85+ # General info.
86+ index ,
87+ location ["country" ],
88+ location ["province" ],
89+ # Coordinates.
90+ Coordinates (
91+ latitude = coordinates ["lat" ], longitude = coordinates ["long" ]),
92+ # Last update.
93+ datetime .utcnow ().isoformat () + "Z" ,
94+ # Timelines (parse dates as ISO).
95+ {
96+ "confirmed" : Timeline (
97+ timeline = {
98+ datetime .strptime (date , "%m/%d/%y" ).isoformat () + "Z" : amount
99+ for date , amount in timelines ["confirmed" ].items ()
100+ }
101+ ),
102+ "deaths" : Timeline (
103+ timeline = {
104+ datetime .strptime (date , "%m/%d/%y" ).isoformat () + "Z" : amount
105+ for date , amount in timelines ["deaths" ].items ()
106+ }
107+ ),
108+ "recovered" : Timeline (
109+ timeline = {
110+ datetime .strptime (date , "%m/%d/%y" ).isoformat () + "Z" : amount
111+ for date , amount in timelines ["recovered" ].items ()
112+ }
113+ ),
114+ },
115+ )
116+ )
117+ LOGGER .info (f"{ data_id } Data normalized" )
118+
119+ # Finally, return the locations.
120+ return locations
121+
39122
40123# ---------------------------------------------------------------
41124
@@ -82,10 +165,12 @@ async def get_category(category):
82165
83166 for item in data :
84167 # Filter out all the dates.
85- dates = dict (filter (lambda element : date_util .is_date (element [0 ]), item .items ()))
168+ dates = dict (
169+ filter (lambda element : date_util .is_date (element [0 ]), item .items ()))
86170
87171 # Make location history from dates.
88- history = {date : int (float (amount or 0 )) for date , amount in dates .items ()}
172+ history = {date : int (float (amount or 0 ))
173+ for date , amount in dates .items ()}
89174
90175 # Country for this location.
91176 country = item ["Country/Region" ]
@@ -101,7 +186,7 @@ async def get_category(category):
101186 "country_code" : countries .country_code (country ),
102187 "province" : item ["Province/State" ],
103188 # Coordinates.
104- "coordinates" : {"lat" : item ["Lat" ], "long" : item ["Long" ],},
189+ "coordinates" : {"lat" : item ["Lat" ], "long" : item ["Long" ], },
105190 # History.
106191 "history" : history ,
107192 # Latest statistic.
@@ -127,89 +212,6 @@ async def get_category(category):
127212 return results
128213
129214
130- @cached (cache = TTLCache (maxsize = 1 , ttl = 1800 ))
131- async def get_locations ():
132- """
133- Retrieves the locations from the categories. The locations are cached for 1 hour.
134-
135- :returns: The locations.
136- :rtype: List[Location]
137- """
138- data_id = "jhu.locations"
139- LOGGER .info (f"pid:{ PID } : { data_id } Requesting data..." )
140- # Get all of the data categories locations.
141- confirmed = await get_category ("confirmed" )
142- deaths = await get_category ("deaths" )
143- recovered = await get_category ("recovered" )
144-
145- locations_confirmed = confirmed ["locations" ]
146- locations_deaths = deaths ["locations" ]
147- locations_recovered = recovered ["locations" ]
148-
149- # Final locations to return.
150- locations = []
151- # ***************************************************************************
152- # TODO: This iteration approach assumes the indexes remain the same
153- # and opens us to a CRITICAL ERROR. The removal of a column in the data source
154- # would break the API or SHIFT all the data confirmed, deaths, recovery producting
155- # incorrect data to consumers.
156- # ***************************************************************************
157- # Go through locations.
158- for index , location in enumerate (locations_confirmed ):
159- # Get the timelines.
160-
161- # TEMP: Fix for merging recovery data. See TODO above for more details.
162- key = (location ["country" ], location ["province" ])
163-
164- timelines = {
165- "confirmed" : location ["history" ],
166- "deaths" : parse_history (key , locations_deaths , index ),
167- "recovered" : parse_history (key , locations_recovered , index ),
168- }
169-
170- # Grab coordinates.
171- coordinates = location ["coordinates" ]
172-
173- # Create location (supporting timelines) and append.
174- locations .append (
175- TimelinedLocation (
176- # General info.
177- index ,
178- location ["country" ],
179- location ["province" ],
180- # Coordinates.
181- Coordinates (latitude = coordinates ["lat" ], longitude = coordinates ["long" ]),
182- # Last update.
183- datetime .utcnow ().isoformat () + "Z" ,
184- # Timelines (parse dates as ISO).
185- {
186- "confirmed" : Timeline (
187- timeline = {
188- datetime .strptime (date , "%m/%d/%y" ).isoformat () + "Z" : amount
189- for date , amount in timelines ["confirmed" ].items ()
190- }
191- ),
192- "deaths" : Timeline (
193- timeline = {
194- datetime .strptime (date , "%m/%d/%y" ).isoformat () + "Z" : amount
195- for date , amount in timelines ["deaths" ].items ()
196- }
197- ),
198- "recovered" : Timeline (
199- timeline = {
200- datetime .strptime (date , "%m/%d/%y" ).isoformat () + "Z" : amount
201- for date , amount in timelines ["recovered" ].items ()
202- }
203- ),
204- },
205- )
206- )
207- LOGGER .info (f"{ data_id } Data normalized" )
208-
209- # Finally, return the locations.
210- return locations
211-
212-
213215def parse_history (key : tuple , locations : list , index : int ):
214216 """
215217 Helper for validating and extracting history content from
0 commit comments