Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions app/routers/v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ async def get_location_by_id(
Getting specific location by id.
"""
location = await request.state.source.get(id)

return {"location": location.serialize(timelines)}


Expand Down
27 changes: 12 additions & 15 deletions app/services/location/csbs.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@

LOGGER = logging.getLogger("services.location.csbs")


class CSBSLocationService(LocationService):
"""
Service for retrieving locations from csbs
Expand All @@ -23,18 +22,18 @@ class CSBSLocationService(LocationService):
async def get_all(self):
# Get the locations.
locations = await get_locations()

return locations

async def get(self, loc_id): # pylint: disable=arguments-differ
# Get location at the index equal to the provided id.
locations = await self.get_all()
return locations[loc_id]

return locations[loc_id]

# Base URL for fetching data
BASE_URL = "https://facts.csbs.org/covid-19/covid19_county.csv"


@cached(cache=TTLCache(maxsize=1, ttl=1800))
async def get_locations():
"""
Expand All @@ -43,8 +42,10 @@ async def get_locations():
:returns: The locations.
:rtype: dict
"""

data_id = "csbs.locations"
LOGGER.info(f"{data_id} Requesting data...")

# check shared cache
cache_results = await check_cache(data_id)
if cache_results:
Expand Down Expand Up @@ -77,19 +78,16 @@ async def get_locations():
# Append to locations.
locations.append(
CSBSLocation(
# General info.
i,
state,
county,
# Coordinates.
Coordinates(item["Latitude"], item["Longitude"]),
# Last update (parse as ISO).
datetime.strptime(last_update, "%Y-%m-%d %H:%M").isoformat() + "Z",
# Statistics.
int(item["Confirmed"] or 0),
int(item["Death"] or 0),
id=i,
state=state,
county=county,
coordinates=Coordinates(item["Latitude"], item["Longitude"]),
last_updated=datetime.strptime(last_update, "%Y-%m-%d %H:%M").isoformat() + "Z",
confirmed=int(item["Confirmed"] or 0),
deaths=int(item["Death"] or 0),
)
)

LOGGER.info(f"{data_id} Data normalized")
# save the results to distributed cache
# TODO: fix json serialization
Expand All @@ -98,5 +96,4 @@ async def get_locations():
except TypeError as type_err:
LOGGER.error(type_err)

# Return the locations.
return locations
80 changes: 37 additions & 43 deletions app/services/location/jhu.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
LOGGER = logging.getLogger("services.location.jhu")
PID = os.getpid()


class JhuLocationService(LocationService):
"""
Service for retrieving locations from Johns Hopkins CSSE (https://github.com/CSSEGISandData/COVID-19).
Expand All @@ -34,6 +33,7 @@ async def get_all(self):
async def get(self, loc_id): # pylint: disable=arguments-differ
# Get location at the index equal to provided id.
locations = await self.get_all()

return locations[loc_id]


Expand All @@ -52,6 +52,7 @@ async def get_category(category):
:returns: The data for category.
:rtype: dict
"""

# Adhere to category naming standard.
category = category.lower()
data_id = f"jhu.{category}"
Expand Down Expand Up @@ -87,27 +88,24 @@ async def get_category(category):
# Make location history from dates.
history = {date: int(float(amount or 0)) for date, amount in dates.items()}

# Country for this location.
country = item["Country/Region"]

# Latest data insert value.
latest = list(history.values())[-1]

# Country for this location.
country = item["Country/Region"]

# Normalize the item and append to locations.
locations.append(
{
# General info.
"country": country,
"country_code": countries.country_code(country),
"province": item["Province/State"],
# Coordinates.
"coordinates": {"lat": item["Lat"], "long": item["Long"],},
# History.
"history": history,
# Latest statistic.
"latest": int(latest or 0),
}
)
locations.append({
"country": country,
"country_code": countries.country_code(country),
"province": item["Province/State"],
"coordinates": {
"lat": item["Lat"],
"long": item["Long"],
},
"history": history,
"latest": int(latest or 0),
})
LOGGER.debug(f"{data_id} Data normalized")

# Latest total.
Expand Down Expand Up @@ -135,8 +133,10 @@ async def get_locations():
:returns: The locations.
:rtype: List[Location]
"""

data_id = "jhu.locations"
LOGGER.info(f"pid:{PID}: {data_id} Requesting data...")

# Get all of the data categories locations.
confirmed = await get_category("confirmed")
deaths = await get_category("deaths")
Expand All @@ -163,8 +163,8 @@ async def get_locations():

timelines = {
"confirmed": location["history"],
"deaths": parse_history(key, locations_deaths, index),
"recovered": parse_history(key, locations_recovered, index),
"deaths": parse_history(key, locations_deaths),
"recovered": parse_history(key, locations_recovered),
}

# Grab coordinates.
Expand All @@ -173,16 +173,12 @@ async def get_locations():
# Create location (supporting timelines) and append.
locations.append(
TimelinedLocation(
# General info.
index,
location["country"],
location["province"],
# Coordinates.
Coordinates(latitude=coordinates["lat"], longitude=coordinates["long"]),
# Last update.
datetime.utcnow().isoformat() + "Z",
# Timelines (parse dates as ISO).
{
id=index,
country=location["country"],
province=location["province"],
coordinates=Coordinates(latitude=coordinates["lat"], longitude=coordinates["long"]),
last_updated=datetime.utcnow().isoformat() + "Z",
timelines={
"confirmed": Timeline(
timeline={
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
Expand All @@ -204,25 +200,23 @@ async def get_locations():
},
)
)

LOGGER.info(f"{data_id} Data normalized")

# Finally, return the locations.
return locations


def parse_history(key: tuple, locations: list, index: int):
def parse_history(key: tuple, locations: list):
"""
Helper for validating and extracting history content from
locations data based on index. Validates with the current country/province
locations data based on key. Validates with the current country/province
key to make sure no index/column issue.
TEMP: solution because implement a more efficient and better approach in the refactor.
"""
location_history = {}
try:
if key == (locations[index]["country"], locations[index]["province"]):
location_history = locations[index]["history"]
except (IndexError, KeyError):
LOGGER.debug(f"iteration data merge error: {index} {key}")

return location_history

for i, location in enumerate(locations):
if (location["country"], location["province"]) == key:
return location["history"]

LOGGER.debug(f"iteration data merge error: {key}")

return {}
5 changes: 2 additions & 3 deletions app/services/location/nyt.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@

LOGGER = logging.getLogger("services.location.nyt")


class NYTLocationService(LocationService):
"""
Service for retrieving locations from New York Times (https://github.com/nytimes/covid-19-data).
Expand All @@ -24,21 +23,21 @@ class NYTLocationService(LocationService):
async def get_all(self):
# Get the locations.
locations = await get_locations()

return locations

async def get(self, loc_id): # pylint: disable=arguments-differ
# Get location at the index equal to provided id.
locations = await self.get_all()

return locations[loc_id]


# ---------------------------------------------------------------


# Base URL for fetching category.
BASE_URL = "https://raw.githubusercontent.com/nytimes/covid-19-data/master/us-counties.csv"


def get_grouped_locations_dict(data):
"""
Helper function to group history for locations into one dict.
Expand Down