Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
149 changes: 76 additions & 73 deletions app/services/location/jhu.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,86 +45,89 @@ async def get(self, loc_id): # pylint: disable=arguments-differ


@cached(cache=TTLCache(maxsize=4, ttl=1800))
async def get_category(category):
async def get_category(category_list):
"""
Retrieves the data for the provided category. The data is cached for 30 minutes locally, 1 hour via shared Redis.

:returns: The data for category.
:rtype: dict
"""
# Adhere to category naming standard.
category = category.lower()
data_id = f"jhu.{category}"

# check shared cache
cache_results = await check_cache(data_id)
if cache_results:
LOGGER.info(f"{data_id} using shared cache results")
results = cache_results
else:
LOGGER.info(f"{data_id} shared cache empty")
# URL to request data from.
url = BASE_URL + "time_series_covid19_%s_global.csv" % category

# Request the data
LOGGER.info(f"{data_id} Requesting data...")
async with httputils.CLIENT_SESSION.get(url) as response:
text = await response.text()

LOGGER.debug(f"{data_id} Data received")

# Parse the CSV.
data = list(csv.DictReader(text.splitlines()))
LOGGER.debug(f"{data_id} CSV parsed")

# The normalized locations.
locations = []

for item in data:
# Filter out all the dates.
dates = dict(filter(lambda element: date_util.is_date(element[0]), item.items()))

# Make location history from dates.
history = {date: int(float(amount or 0)) for date, amount in dates.items()}

# Country for this location.
country = item["Country/Region"]

# Latest data insert value.
latest = list(history.values())[-1]

# Normalize the item and append to locations.
locations.append(
{
# General info.
"country": country,
"country_code": countries.country_code(country),
"province": item["Province/State"],
# Coordinates.
"coordinates": {"lat": item["Lat"], "long": item["Long"],},
# History.
"history": history,
# Latest statistic.
"latest": int(latest or 0),
}
)
LOGGER.debug(f"{data_id} Data normalized")

# Latest total.
latest = sum(map(lambda location: location["latest"], locations))

# Return the final data.
results = {
"locations": locations,
"latest": latest,
"last_updated": datetime.utcnow().isoformat() + "Z",
"source": "https://github.com/ExpDev07/coronavirus-tracker-api",
}
# save the results to distributed cache
await load_cache(data_id, results)

LOGGER.info(f"{data_id} results:\n{pf(results, depth=1)}")
return results
results_list = []
for each_category in category_list:
category = each_category.lower()
data_id = f"jhu.{category}"

# check shared cache
cache_results = await check_cache(data_id)
if cache_results:
LOGGER.info(f"{data_id} using shared cache results")
results = cache_results
else:
LOGGER.info(f"{data_id} shared cache empty")
# URL to request data from.
url = BASE_URL + "time_series_covid19_%s_global.csv" % category

# Request the data
LOGGER.info(f"{data_id} Requesting data...")
async with httputils.CLIENT_SESSION.get(url) as response:
text = await response.text()

LOGGER.debug(f"{data_id} Data received")

# Parse the CSV.
data = list(csv.DictReader(text.splitlines()))
LOGGER.debug(f"{data_id} CSV parsed")

# The normalized locations.
locations = []

for item in data:
# Filter out all the dates.
dates = dict(filter(lambda element: date_util.is_date(element[0]), item.items()))

# Make location history from dates.
history = {date: int(float(amount or 0)) for date, amount in dates.items()}

# Country for this location.
country = item["Country/Region"]

# Latest data insert value.
latest = list(history.values())[-1]

# Normalize the item and append to locations.
locations.append(
{
# General info.
"country": country,
"country_code": countries.country_code(country),
"province": item["Province/State"],
# Coordinates.
"coordinates": {"lat": item["Lat"], "long": item["Long"],},
# History.
"history": history,
# Latest statistic.
"latest": int(latest or 0),
}
)
LOGGER.debug(f"{data_id} Data normalized")

# Latest total.
latest = sum(map(lambda location: location["latest"], locations))

# Return the final data.
results = {
"locations": locations,
"latest": latest,
"last_updated": datetime.utcnow().isoformat() + "Z",
"source": "https://github.com/ExpDev07/coronavirus-tracker-api",
}
# save the results to distributed cache
await load_cache(data_id, results)

LOGGER.info(f"{data_id} results:\n{pf(results, depth=1)}")
results_list.append(results)
return results_list


@cached(cache=TTLCache(maxsize=1, ttl=1800))
Expand Down