@@ -26,15 +26,88 @@ class JhuLocationService(LocationService):
2626 Service for retrieving locations from Johns Hopkins CSSE (https://github.com/CSSEGISandData/COVID-19).
2727 """
2828
29- async def get_all (self ):
30- # Get the locations.
31- locations = await get_locations ()
29+ @cached (cache = TTLCache (maxsize = 1 , ttl = 1800 ))
30+ async def get_locations (self ):
31+ """
32+ Retrieves the locations from the categories. The locations are cached for 1 hour.
33+
34+ :returns: The locations.
35+ :rtype: List[Location]
36+ """
37+ data_id = "jhu.locations"
38+ LOGGER .info (f"pid:{ PID } : { data_id } Requesting data..." )
39+ # Get all of the data categories locations.
40+ confirmed = await get_category ("confirmed" )
41+ deaths = await get_category ("deaths" )
42+ recovered = await get_category ("recovered" )
43+
44+ locations_confirmed = confirmed ["locations" ]
45+ locations_deaths = deaths ["locations" ]
46+ locations_recovered = recovered ["locations" ]
47+
48+ # Final locations to return.
49+ locations = []
50+ # ***************************************************************************
51+ # TODO: This iteration approach assumes the indexes remain the same
52+ # and opens us to a CRITICAL ERROR. The removal of a column in the data source
53+ # would break the API or SHIFT all the data confirmed, deaths, recovery producting
54+ # incorrect data to consumers.
55+ # ***************************************************************************
56+ # Go through locations.
57+ for index , location in enumerate (locations_confirmed ):
58+ # Get the timelines.
59+
60+ # TEMP: Fix for merging recovery data. See TODO above for more details.
61+ key = (location ["country" ], location ["province" ])
62+
63+ timelines = {
64+ "confirmed" : location ["history" ],
65+ "deaths" : parse_history (key , locations_deaths , index ),
66+ "recovered" : parse_history (key , locations_recovered , index ),
67+ }
68+
69+ # Grab coordinates.
70+ coordinates = location ["coordinates" ]
71+
72+ # Create location (supporting timelines) and append.
73+ locations .append (
74+ TimelinedLocation (
75+ # General info.
76+ index ,
77+ location ["country" ],
78+ location ["province" ],
79+ # Coordinates.
80+ Coordinates (latitude = coordinates ["lat" ], longitude = coordinates ["long" ]),
81+ # Last update.
82+ datetime .utcnow ().isoformat () + "Z" ,
83+ # Timelines (parse dates as ISO).
84+ {
85+ "confirmed" : Timeline (
86+ timeline = {
87+ datetime .strptime (date , "%m/%d/%y" ).isoformat () + "Z" : amount
88+ for date , amount in timelines ["confirmed" ].items ()
89+ }
90+ ),
91+ "deaths" : Timeline (
92+ timeline = {
93+ datetime .strptime (date , "%m/%d/%y" ).isoformat () + "Z" : amount
94+ for date , amount in timelines ["deaths" ].items ()
95+ }
96+ ),
97+ "recovered" : Timeline (
98+ timeline = {
99+ datetime .strptime (date , "%m/%d/%y" ).isoformat () + "Z" : amount
100+ for date , amount in timelines ["recovered" ].items ()
101+ }
102+ ),
103+ },
104+ )
105+ )
106+ LOGGER .info (f"{ data_id } Data normalized" )
107+
108+ # Finally, return the locations.
32109 return locations
33110
34- async def get (self , loc_id ): # pylint: disable=arguments-differ
35- # Get location at the index equal to provided id.
36- locations = await self .get_all ()
37- return locations [loc_id ]
38111
39112
40113# ---------------------------------------------------------------
@@ -127,87 +200,6 @@ async def get_category(category):
127200 return results
128201
129202
130- @cached (cache = TTLCache (maxsize = 1 , ttl = 1800 ))
131- async def get_locations ():
132- """
133- Retrieves the locations from the categories. The locations are cached for 1 hour.
134-
135- :returns: The locations.
136- :rtype: List[Location]
137- """
138- data_id = "jhu.locations"
139- LOGGER .info (f"pid:{ PID } : { data_id } Requesting data..." )
140- # Get all of the data categories locations.
141- confirmed = await get_category ("confirmed" )
142- deaths = await get_category ("deaths" )
143- recovered = await get_category ("recovered" )
144-
145- locations_confirmed = confirmed ["locations" ]
146- locations_deaths = deaths ["locations" ]
147- locations_recovered = recovered ["locations" ]
148-
149- # Final locations to return.
150- locations = []
151- # ***************************************************************************
152- # TODO: This iteration approach assumes the indexes remain the same
153- # and opens us to a CRITICAL ERROR. The removal of a column in the data source
154- # would break the API or SHIFT all the data confirmed, deaths, recovery producting
155- # incorrect data to consumers.
156- # ***************************************************************************
157- # Go through locations.
158- for index , location in enumerate (locations_confirmed ):
159- # Get the timelines.
160-
161- # TEMP: Fix for merging recovery data. See TODO above for more details.
162- key = (location ["country" ], location ["province" ])
163-
164- timelines = {
165- "confirmed" : location ["history" ],
166- "deaths" : parse_history (key , locations_deaths , index ),
167- "recovered" : parse_history (key , locations_recovered , index ),
168- }
169-
170- # Grab coordinates.
171- coordinates = location ["coordinates" ]
172-
173- # Create location (supporting timelines) and append.
174- locations .append (
175- TimelinedLocation (
176- # General info.
177- index ,
178- location ["country" ],
179- location ["province" ],
180- # Coordinates.
181- Coordinates (latitude = coordinates ["lat" ], longitude = coordinates ["long" ]),
182- # Last update.
183- datetime .utcnow ().isoformat () + "Z" ,
184- # Timelines (parse dates as ISO).
185- {
186- "confirmed" : Timeline (
187- timeline = {
188- datetime .strptime (date , "%m/%d/%y" ).isoformat () + "Z" : amount
189- for date , amount in timelines ["confirmed" ].items ()
190- }
191- ),
192- "deaths" : Timeline (
193- timeline = {
194- datetime .strptime (date , "%m/%d/%y" ).isoformat () + "Z" : amount
195- for date , amount in timelines ["deaths" ].items ()
196- }
197- ),
198- "recovered" : Timeline (
199- timeline = {
200- datetime .strptime (date , "%m/%d/%y" ).isoformat () + "Z" : amount
201- for date , amount in timelines ["recovered" ].items ()
202- }
203- ),
204- },
205- )
206- )
207- LOGGER .info (f"{ data_id } Data normalized" )
208-
209- # Finally, return the locations.
210- return locations
211203
212204
213205def parse_history (key : tuple , locations : list , index : int ):
0 commit comments