forked from ExpDev07/coronavirus-tracker-api
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathjhu.py
More file actions
171 lines (137 loc) · 5.22 KB
/
jhu.py
File metadata and controls
171 lines (137 loc) · 5.22 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
"""app.services.location.jhu.py"""
import csv
from datetime import datetime
from asyncache import cached
from cachetools import TTLCache
from ...coordinates import Coordinates
from ...location import TimelinedLocation
from ...timeline import Timeline
from ...utils import countries
from ...utils import date as date_util
from ...utils import httputils
from . import LocationService
class JhuLocationService(LocationService):
"""
Service for retrieving locations from Johns Hopkins CSSE (https://github.com/CSSEGISandData/COVID-19).
"""
async def get_all(self):
# Get the locations.
locations = await get_locations()
return locations
async def get(self, loc_id): # pylint: disable=arguments-differ
# Get location at the index equal to provided id.
locations = await self.get_all()
return locations[loc_id]
# ---------------------------------------------------------------
# Base URL for fetching category.
BASE_URL = (
"https://raw.githubusercontent.com/CSSEGISandData/2019-nCoV/master/csse_covid_19_data/csse_covid_19_time_series/"
)
@cached(cache=TTLCache(maxsize=1024, ttl=3600))
async def get_category(category):
"""
Retrieves the data for the provided category. The data is cached for 1 hour.
:returns: The data for category.
:rtype: dict
"""
# Adhere to category naming standard.
category = category.lower()
# URL to request data from.
url = BASE_URL + "time_series_covid19_%s_global.csv" % category
# Request the data
async with httputils.CLIENT_SESSION.get(url) as response:
text = await response.text()
# Parse the CSV.
data = list(csv.DictReader(text.splitlines()))
# The normalized locations.
locations = []
for item in data:
# Filter out all the dates.
dates = dict(filter(lambda element: date_util.is_date(element[0]), item.items()))
# Make location history from dates.
history = {date: int(amount or 0) for date, amount in dates.items()}
# Country for this location.
country = item["Country/Region"]
# Latest data insert value.
latest = list(history.values())[-1]
# Normalize the item and append to locations.
locations.append(
{
# General info.
"country": country,
"country_code": countries.country_code(country),
"province": item["Province/State"],
# Coordinates.
"coordinates": {"lat": item["Lat"], "long": item["Long"],},
# History.
"history": history,
# Latest statistic.
"latest": int(latest or 0),
}
)
# Latest total.
latest = sum(map(lambda location: location["latest"], locations))
# Return the final data.
return {
"locations": locations,
"latest": latest,
"last_updated": datetime.utcnow().isoformat() + "Z",
"source": "https://github.com/ExpDev07/coronavirus-tracker-api",
}
@cached(cache=TTLCache(maxsize=1024, ttl=3600))
async def get_locations():
"""
Retrieves the locations from the categories. The locations are cached for 1 hour.
:returns: The locations.
:rtype: List[Location]
"""
# Get all of the data categories locations.
confirmed = await get_category("confirmed")
deaths = await get_category("deaths")
# recovered = await get_category("recovered")
locations_confirmed = confirmed["locations"]
locations_deaths = deaths["locations"]
# locations_recovered = recovered["locations"]
# Final locations to return.
locations = []
# Go through locations.
for index, location in enumerate(locations_confirmed):
# Get the timelines.
timelines = {
"confirmed": locations_confirmed[index]["history"],
"deaths": locations_deaths[index]["history"],
# 'recovered' : locations_recovered[index]['history'],
}
# Grab coordinates.
coordinates = location["coordinates"]
# Create location (supporting timelines) and append.
locations.append(
TimelinedLocation(
# General info.
index,
location["country"],
location["province"],
# Coordinates.
Coordinates(coordinates["lat"], coordinates["long"]),
# Last update.
datetime.utcnow().isoformat() + "Z",
# Timelines (parse dates as ISO).
{
"confirmed": Timeline(
{
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
for date, amount in timelines["confirmed"].items()
}
),
"deaths": Timeline(
{
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
for date, amount in timelines["deaths"].items()
}
),
"recovered": Timeline({}),
},
)
)
# Finally, return the locations.
return locations