|
8 | 8 | from asyncache import cached |
9 | 9 | from cachetools import TTLCache |
10 | 10 |
|
| 11 | +from ...caches import check_cache, get_cache, load_cache |
11 | 12 | from ...coordinates import Coordinates |
12 | 13 | from ...location import TimelinedLocation |
13 | 14 | from ...timeline import Timeline |
@@ -57,68 +58,74 @@ async def get_category(category): |
57 | 58 | category = category.lower() |
58 | 59 | data_id = f"jhu.{category}" |
59 | 60 |
|
60 | | - # TODO: check cache |
| 61 | + # check shared cache |
| 62 | + cache_results = await check_cache(data_id) |
| 63 | + if cache_results: |
| 64 | + LOGGER.info(f"{data_id} using shared cache results") |
| 65 | + results = cache_results |
| 66 | + else: |
| 67 | + LOGGER.info(f"{data_id} shared cache empty") |
| 68 | + # URL to request data from. |
| 69 | + url = BASE_URL + "time_series_covid19_%s_global.csv" % category |
61 | 70 |
|
62 | | - # URL to request data from. |
63 | | - url = BASE_URL + "time_series_covid19_%s_global.csv" % category |
| 71 | + # Request the data |
| 72 | + LOGGER.info(f"{data_id} Requesting data...") |
| 73 | + async with httputils.CLIENT_SESSION.get(url) as response: |
| 74 | + text = await response.text() |
64 | 75 |
|
65 | | - # Request the data |
66 | | - LOGGER.info(f"{data_id} Requesting data...") |
67 | | - async with httputils.CLIENT_SESSION.get(url) as response: |
68 | | - text = await response.text() |
| 76 | + LOGGER.debug(f"{data_id} Data received") |
69 | 77 |
|
70 | | - LOGGER.debug(f"{data_id} Data received") |
| 78 | + # Parse the CSV. |
| 79 | + data = list(csv.DictReader(text.splitlines())) |
| 80 | + LOGGER.debug(f"{data_id} CSV parsed") |
71 | 81 |
|
72 | | - # Parse the CSV. |
73 | | - data = list(csv.DictReader(text.splitlines())) |
74 | | - LOGGER.debug(f"{data_id} CSV parsed") |
| 82 | + # The normalized locations. |
| 83 | + locations = [] |
75 | 84 |
|
76 | | - # The normalized locations. |
77 | | - locations = [] |
| 85 | + for item in data: |
| 86 | + # Filter out all the dates. |
| 87 | + dates = dict(filter(lambda element: date_util.is_date(element[0]), item.items())) |
78 | 88 |
|
79 | | - for item in data: |
80 | | - # Filter out all the dates. |
81 | | - dates = dict(filter(lambda element: date_util.is_date(element[0]), item.items())) |
| 89 | + # Make location history from dates. |
| 90 | + history = {date: int(amount or 0) for date, amount in dates.items()} |
82 | 91 |
|
83 | | - # Make location history from dates. |
84 | | - history = {date: int(amount or 0) for date, amount in dates.items()} |
| 92 | + # Country for this location. |
| 93 | + country = item["Country/Region"] |
85 | 94 |
|
86 | | - # Country for this location. |
87 | | - country = item["Country/Region"] |
| 95 | + # Latest data insert value. |
| 96 | + latest = list(history.values())[-1] |
88 | 97 |
|
89 | | - # Latest data insert value. |
90 | | - latest = list(history.values())[-1] |
| 98 | + # Normalize the item and append to locations. |
| 99 | + locations.append( |
| 100 | + { |
| 101 | + # General info. |
| 102 | + "country": country, |
| 103 | + "country_code": countries.country_code(country), |
| 104 | + "province": item["Province/State"], |
| 105 | + # Coordinates. |
| 106 | + "coordinates": {"lat": item["Lat"], "long": item["Long"],}, |
| 107 | + # History. |
| 108 | + "history": history, |
| 109 | + # Latest statistic. |
| 110 | + "latest": int(latest or 0), |
| 111 | + } |
| 112 | + ) |
| 113 | + LOGGER.debug(f"{data_id} Data normalized") |
| 114 | + |
| 115 | + # Latest total. |
| 116 | + latest = sum(map(lambda location: location["latest"], locations)) |
| 117 | + |
| 118 | + # Return the final data. |
| 119 | + results = { |
| 120 | + "locations": locations, |
| 121 | + "latest": latest, |
| 122 | + "last_updated": datetime.utcnow().isoformat() + "Z", |
| 123 | + "source": "https://github.com/ExpDev07/coronavirus-tracker-api", |
| 124 | + } |
| 125 | + # save the results to distributed cache |
| 126 | + await load_cache(data_id, results) |
91 | 127 |
|
92 | | - # Normalize the item and append to locations. |
93 | | - locations.append( |
94 | | - { |
95 | | - # General info. |
96 | | - "country": country, |
97 | | - "country_code": countries.country_code(country), |
98 | | - "province": item["Province/State"], |
99 | | - # Coordinates. |
100 | | - "coordinates": {"lat": item["Lat"], "long": item["Long"],}, |
101 | | - # History. |
102 | | - "history": history, |
103 | | - # Latest statistic. |
104 | | - "latest": int(latest or 0), |
105 | | - } |
106 | | - ) |
107 | | - LOGGER.debug(f"{data_id} Data normalized") |
108 | | - |
109 | | - # Latest total. |
110 | | - latest = sum(map(lambda location: location["latest"], locations)) |
111 | | - |
112 | | - # Return the final data. |
113 | | - results = { |
114 | | - "locations": locations, |
115 | | - "latest": latest, |
116 | | - "last_updated": datetime.utcnow().isoformat() + "Z", |
117 | | - "source": "https://github.com/ExpDev07/coronavirus-tracker-api", |
118 | | - } |
119 | 128 | LOGGER.info(f"{data_id} results:\n{pf(results, depth=1)}") |
120 | | - # save the results to distributed cache |
121 | | - # TODO: async |
122 | 129 | return results |
123 | 130 |
|
124 | 131 |
|
|
0 commit comments