66from asyncache import cached
77from cachetools import TTLCache
88
9+ from ...caches import check_cache , load_cache
910from ...coordinates import Coordinates
1011from ...location .csbs import CSBSLocation
1112from ...utils import httputils
@@ -34,7 +35,7 @@ async def get(self, loc_id): # pylint: disable=arguments-differ
3435BASE_URL = "https://facts.csbs.org/covid-19/covid19_county.csv"
3536
3637
37- @cached (cache = TTLCache (maxsize = 1 , ttl = 3600 ))
38+ @cached (cache = TTLCache (maxsize = 1 , ttl = 1800 ))
3839async def get_locations ():
3940 """
4041 Retrieves county locations; locations are cached for 1 hour
@@ -44,48 +45,58 @@ async def get_locations():
4445 """
4546 data_id = "csbs.locations"
4647 LOGGER .info (f"{ data_id } Requesting data..." )
47- async with httputils .CLIENT_SESSION .get (BASE_URL ) as response :
48- text = await response .text ()
49-
50- LOGGER .debug (f"{ data_id } Data received" )
51-
52- data = list (csv .DictReader (text .splitlines ()))
53- LOGGER .debug (f"{ data_id } CSV parsed" )
54-
55- locations = []
56-
57- for i , item in enumerate (data ):
58- # General info.
59- state = item ["State Name" ]
60- county = item ["County Name" ]
61-
62- # Ensure country is specified.
63- if county in {"Unassigned" , "Unknown" }:
64- continue
65-
66- # Coordinates.
67- coordinates = Coordinates (item ["Latitude" ], item ["Longitude" ]) # pylint: disable=unused-variable
68-
69- # Date string without "EDT" at end.
70- last_update = " " .join (item ["Last Update" ].split (" " )[0 :2 ])
71-
72- # Append to locations.
73- locations .append (
74- CSBSLocation (
75- # General info.
76- i ,
77- state ,
78- county ,
79- # Coordinates.
80- Coordinates (item ["Latitude" ], item ["Longitude" ]),
81- # Last update (parse as ISO).
82- datetime .strptime (last_update , "%Y-%m-%d %H:%M" ).isoformat () + "Z" ,
83- # Statistics.
84- int (item ["Confirmed" ] or 0 ),
85- int (item ["Death" ] or 0 ),
48+ # check shared cache
49+ cache_results = await check_cache (data_id )
50+ if cache_results :
51+ LOGGER .info (f"{ data_id } using shared cache results" )
52+ locations = cache_results
53+ else :
54+ LOGGER .info (f"{ data_id } shared cache empty" )
55+ async with httputils .CLIENT_SESSION .get (BASE_URL ) as response :
56+ text = await response .text ()
57+
58+ LOGGER .debug (f"{ data_id } Data received" )
59+
60+ data = list (csv .DictReader (text .splitlines ()))
61+ LOGGER .debug (f"{ data_id } CSV parsed" )
62+
63+ locations = []
64+
65+ for i , item in enumerate (data ):
66+ # General info.
67+ state = item ["State Name" ]
68+ county = item ["County Name" ]
69+
70+ # Ensure country is specified.
71+ if county in {"Unassigned" , "Unknown" }:
72+ continue
73+
74+ # Date string without "EDT" at end.
75+ last_update = " " .join (item ["Last Update" ].split (" " )[0 :2 ])
76+
77+ # Append to locations.
78+ locations .append (
79+ CSBSLocation (
80+ # General info.
81+ i ,
82+ state ,
83+ county ,
84+ # Coordinates.
85+ Coordinates (item ["Latitude" ], item ["Longitude" ]),
86+ # Last update (parse as ISO).
87+ datetime .strptime (last_update , "%Y-%m-%d %H:%M" ).isoformat () + "Z" ,
88+ # Statistics.
89+ int (item ["Confirmed" ] or 0 ),
90+ int (item ["Death" ] or 0 ),
91+ )
8692 )
87- )
88- LOGGER .info (f"{ data_id } Data normalized" )
89-
93+ LOGGER .info (f"{ data_id } Data normalized" )
94+ # save the results to distributed cache
95+ # TODO: fix json serialization
96+ try :
97+ await load_cache (data_id , locations )
98+ except TypeError as type_err :
99+ LOGGER .error (type_err )
100+
90101 # Return the locations.
91102 return locations
0 commit comments