Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
add & log a data_id string for each cache
move some messages to debug level
log process id for location get not category get
  • Loading branch information
Kilo59 committed Apr 26, 2020
commit f2b54cc407f042824d676227c74c88b145b8cd62
9 changes: 5 additions & 4 deletions app/services/location/csbs.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,14 +42,15 @@ async def get_locations():
:returns: The locations.
:rtype: dict
"""
LOGGER.info("csbs Requesting data...")
data_id = "csbs.locations"
LOGGER.info(f"{data_id} Requesting data...")
async with httputils.CLIENT_SESSION.get(BASE_URL) as response:
text = await response.text()

LOGGER.info("csbs Data received")
LOGGER.debug(f"{data_id} Data received")

data = list(csv.DictReader(text.splitlines()))
LOGGER.info("csbs CSV parsed")
LOGGER.debug(f"{data_id} CSV parsed")

locations = []

Expand Down Expand Up @@ -84,7 +85,7 @@ async def get_locations():
int(item["Death"] or 0),
)
)
LOGGER.info("csbs Data normalized")
LOGGER.info(f"{data_id} Data normalized")

# Return the locations.
return locations
19 changes: 13 additions & 6 deletions app/services/location/jhu.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import datetime as dt
import logging
import os
from pprint import pformat as pf

from asyncache import cached
from cachetools import TTLCache
Expand All @@ -16,7 +17,7 @@
from . import LocationService

LOGGER = logging.getLogger("services.location.jhu")

PID = os.getpid()

class JhuLocationService(LocationService):
"""
Expand Down Expand Up @@ -53,20 +54,21 @@ async def get_category(category):
"""
# Adhere to category naming standard.
category = category.lower()
data_id = f"jhu.{category}"

# URL to request data from.
url = BASE_URL + "time_series_covid19_%s_global.csv" % category

# Request the data
LOGGER.info(f"pid:{os.getpid()}: jhu Requesting data...")
LOGGER.info(f"{data_id} Requesting data...")
async with httputils.CLIENT_SESSION.get(url) as response:
text = await response.text()

LOGGER.info("jhu Data received")
LOGGER.debug(f"{data_id} Data received")

# Parse the CSV.
data = list(csv.DictReader(text.splitlines()))
LOGGER.info("jhu CSV parsed")
LOGGER.debug(f"{data_id} CSV parsed")

# The normalized locations.
locations = []
Expand Down Expand Up @@ -99,18 +101,20 @@ async def get_category(category):
"latest": int(latest or 0),
}
)
LOGGER.info("jhu Data normalized")
LOGGER.debug(f"{data_id} Data normalized")

# Latest total.
latest = sum(map(lambda location: location["latest"], locations))

# Return the final data.
return {
results = {
"locations": locations,
"latest": latest,
"last_updated": dt.datetime.utcnow().isoformat() + "Z",
"source": "https://github.com/ExpDev07/coronavirus-tracker-api",
}
LOGGER.info(f"{data_id} results:\n{pf(results, depth=1)}")
return results


@cached(cache=TTLCache(maxsize=1024, ttl=3600))
Expand All @@ -121,6 +125,8 @@ async def get_locations():
:returns: The locations.
:rtype: List[Location]
"""
data_id = "jhu.locations"
LOGGER.info(f"pid:{PID}: {data_id} Requesting data...")
# Get all of the data categories locations.
confirmed = await get_category("confirmed")
deaths = await get_category("deaths")
Expand Down Expand Up @@ -174,6 +180,7 @@ async def get_locations():
},
)
)
LOGGER.info(f"{data_id} Data normalized")

# Finally, return the locations.
return locations
9 changes: 5 additions & 4 deletions app/services/location/nyt.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,16 +74,17 @@ async def get_locations():
:returns: The complete data for US Counties.
:rtype: dict
"""
data_id = "nyt.locations"
# Request the data.
LOGGER.info("nyt Requesting data...")
LOGGER.info(f"{data_id} Requesting data...")
async with httputils.CLIENT_SESSION.get(BASE_URL) as response:
text = await response.text()

LOGGER.info("Data received")
LOGGER.debug(f"{data_id} Data received")

# Parse the CSV.
data = list(csv.DictReader(text.splitlines()))
LOGGER.info("nyt CSV parsed")
LOGGER.debug(f"{data_id} CSV parsed")

# Group together locations (NYT data ordered by dates not location).
grouped_locations = get_grouped_locations_dict(data)
Expand Down Expand Up @@ -125,6 +126,6 @@ async def get_locations():
},
)
)
LOGGER.info("nyt Data normalized")
LOGGER.info(f"{data_id} Data normalized")

return locations