forked from ExpDev07/coronavirus-tracker-api
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathcsbs.py
More file actions
102 lines (82 loc) · 3.01 KB
/
csbs.py
File metadata and controls
102 lines (82 loc) · 3.01 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
"""app.services.location.csbs.py"""
import csv
import logging
from datetime import datetime
from asyncache import cached
from cachetools import TTLCache
from ...caches import check_cache, load_cache
from ...coordinates import Coordinates
from ...location.csbs import CSBSLocation
from ...utils import httputils
from . import LocationService
LOGGER = logging.getLogger("services.location.csbs")
class CSBSLocationService(LocationService):
"""
Service for retrieving locations from csbs
"""
async def get_all(self):
# Get the locations.
locations = await get_locations()
return locations
async def get(self, loc_id): # pylint: disable=arguments-differ
# Get location at the index equal to the provided id.
locations = await self.get_all()
return locations[loc_id]
# Base URL for fetching data
BASE_URL = "https://facts.csbs.org/covid-19/covid19_county.csv"
@cached(cache=TTLCache(maxsize=1, ttl=1800))
async def get_locations():
"""
Retrieves county locations; locations are cached for 1 hour
:returns: The locations.
:rtype: dict
"""
data_id = "csbs.locations"
LOGGER.info(f"{data_id} Requesting data...")
# check shared cache
cache_results = await check_cache(data_id)
if cache_results:
LOGGER.info(f"{data_id} using shared cache results")
locations = cache_results
else:
LOGGER.info(f"{data_id} shared cache empty")
async with httputils.CLIENT_SESSION.get(BASE_URL) as response:
text = await response.text()
LOGGER.debug(f"{data_id} Data received")
data = list(csv.DictReader(text.splitlines()))
LOGGER.debug(f"{data_id} CSV parsed")
locations = []
for i, item in enumerate(data):
# General info.
state = item["State Name"]
county = item["County Name"]
# Ensure country is specified.
if county in {"Unassigned", "Unknown"}:
continue
# Date string without "EDT" at end.
last_update = " ".join(item["Last Update"].split(" ")[0:2])
# Append to locations.
locations.append(
CSBSLocation(
# General info.
i,
state,
county,
# Coordinates.
Coordinates(item["Latitude"], item["Longitude"]),
# Last update (parse as ISO).
datetime.strptime(last_update, "%Y-%m-%d %H:%M").isoformat() + "Z",
# Statistics.
int(item["Confirmed"] or 0),
int(item["Death"] or 0),
)
)
LOGGER.info(f"{data_id} Data normalized")
# save the results to distributed cache
# TODO: fix json serialization
try:
await load_cache(data_id, locations)
except TypeError as type_err:
LOGGER.error(type_err)
# Return the locations.
return locations