Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions app/services/location/csbs.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
"""app.services.location.csbs.py"""
import csv
import logging
from datetime import datetime

from asyncache import cached
from cachetools import TTLCache
Expand All @@ -10,6 +9,7 @@
from ...coordinates import Coordinates
from ...location.csbs import CSBSLocation
from ...utils import httputils
from ...utils import Date
from . import LocationService

LOGGER = logging.getLogger("services.location.csbs")
Expand Down Expand Up @@ -84,7 +84,7 @@ async def get_locations():
# Coordinates.
Coordinates(item["Latitude"], item["Longitude"]),
# Last update (parse as ISO).
datetime.strptime(last_update, "%Y-%m-%d %H:%M").isoformat() + "Z",
Date().format_date(last_update, "%Y-%m-%d %H:%M") + "Z",
# Statistics.
int(item["Confirmed"] or 0),
int(item["Death"] or 0),
Expand Down
47 changes: 16 additions & 31 deletions app/services/location/jhu.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import csv
import logging
import os
from datetime import datetime
from ...utils import Date
from pprint import pformat as pf

from asyncache import cached
Expand Down Expand Up @@ -79,14 +79,14 @@ async def get_category(category):

# The normalized locations.
locations = []

date_util = Date()
for item in data:
# Filter out all the dates.
dates = dict(filter(lambda element: date_util.is_date(element[0]), item.items()))

dates = dict(filter(lambda element: date_util.is_date(element[0]), item.items()))
# Make location history from dates.
history = {date: int(float(amount or 0)) for date, amount in dates.items()}

history = date_util.get_history(dates.items)
# Country for this location.
country = item["Country/Region"]

Expand Down Expand Up @@ -117,7 +117,7 @@ async def get_category(category):
results = {
"locations": locations,
"latest": latest,
"last_updated": datetime.utcnow().isoformat() + "Z",
"last_updated": Date().format_now(),
"source": "https://github.com/ExpDev07/coronavirus-tracker-api",
}
# save the results to distributed cache
Expand Down Expand Up @@ -156,20 +156,22 @@ async def get_locations():
# ***************************************************************************
# Go through locations.
for index, location in enumerate(locations_confirmed):

date_util = Date()
# Get the timelines.

# TEMP: Fix for merging recovery data. See TODO above for more details.
key = (location["country"], location["province"])

timelines = {
"confirmed": location["history"],
"deaths": parse_history(key, locations_deaths, index),
"recovered": parse_history(key, locations_recovered, index),
"deaths": date_util.parse_history(key, locations_deaths, index),
"recovered": date_util.parse_history(key, locations_recovered, index),
}

# Grab coordinates.
coordinates = location["coordinates"]

# Create location (supporting timelines) and append.
locations.append(
TimelinedLocation(
Expand All @@ -180,24 +182,24 @@ async def get_locations():
# Coordinates.
Coordinates(latitude=coordinates["lat"], longitude=coordinates["long"]),
# Last update.
datetime.utcnow().isoformat() + "Z",
date_util.format_now(),
# Timelines (parse dates as ISO).
{
"confirmed": Timeline(
timeline={
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
date_util.format_date(date, amount, "%m/%d/%y") + "Z": amount
for date, amount in timelines["confirmed"].items()
}
),
"deaths": Timeline(
timeline={
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
date_util.format_date(date, amount, "%m/%d/%y") + "Z": amount
for date, amount in timelines["deaths"].items()
}
),
"recovered": Timeline(
timeline={
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
date_util.format_date(date, amount, "%m/%d/%y") + "Z": amount
for date, amount in timelines["recovered"].items()
}
),
Expand All @@ -209,20 +211,3 @@ async def get_locations():
# Finally, return the locations.
return locations


def parse_history(key: tuple, locations: list, index: int):
"""
Helper for validating and extracting history content from
locations data based on index. Validates with the current country/province
key to make sure no index/column issue.

TEMP: solution because implement a more efficient and better approach in the refactor.
"""
location_history = {}
try:
if key == (locations[index]["country"], locations[index]["province"]):
location_history = locations[index]["history"]
except (IndexError, KeyError):
LOGGER.debug(f"iteration data merge error: {index} {key}")

return location_history
18 changes: 9 additions & 9 deletions app/services/location/nyt.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""app.services.location.nyt.py"""
import csv
import logging
from datetime import datetime
from ...utils import Date

from asyncache import cached
from cachetools import TTLCache
Expand Down Expand Up @@ -99,34 +99,34 @@ async def get_locations():

# The normalized locations.
locations = []

date_util = Date()
for idx, (county_state, histories) in enumerate(grouped_locations.items()):
# Make location history for confirmed and deaths from dates.
# List is tuples of (date, amount) in order of increasing dates.
confirmed_list = histories["confirmed"]
confirmed_history = {date: int(amount or 0) for date, amount in confirmed_list}

confirmed_history = date_util.get_history(confirmed_list)
deaths_list = histories["deaths"]
deaths_history = {date: int(amount or 0) for date, amount in deaths_list}

deaths_history = date_util.get_history(deaths_list)
# Normalize the item and append to locations.
locations.append(
NYTLocation(
id=idx,
state=county_state[1],
county=county_state[0],
coordinates=Coordinates(None, None), # NYT does not provide coordinates
last_updated=datetime.utcnow().isoformat() + "Z", # since last request
last_updated= date_util.format_now(), # since last request
timelines={
"confirmed": Timeline(
timeline={
datetime.strptime(date, "%Y-%m-%d").isoformat() + "Z": amount
date_util.format_date(date, amount, "%Y-%m-%d") + "Z": amount
for date, amount in confirmed_history.items()
}
),
"deaths": Timeline(
timeline={
datetime.strptime(date, "%Y-%m-%d").isoformat() + "Z": amount
date_util.format_date(date, amount, "%Y-%m-%d") + "Z": amount
for date, amount in deaths_history.items()
}
),
Expand Down
70 changes: 58 additions & 12 deletions app/utils/date.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,64 @@
"""app.utils.date.py"""
from dateutil.parser import parse
from datetime import datetime
import logging

LOGGER = logging.getLogger("services.location.jhu")

def is_date(string, fuzzy=False):
"""
Return whether the string can be interpreted as a date.
- https://stackoverflow.com/a/25341965/7120095
class Date:

def get_history(self, list):
return History(list).get_history

def parse_history(key: tuple, locations: list, index: int):
return History().parse_history(key, locations, index)

def format_date(date, format):
return datetime.strptime(date, format).isoformat()

def format_now():
return datetime.utcnow().isoformat() + "Z"


def is_date(string, fuzzy=False):
"""
Return whether the string can be interpreted as a date.
- https://stackoverflow.com/a/25341965/7120095

:param string: str, string to check for date
:param fuzzy: bool, ignore unknown tokens in string if True
"""

try:
parse(string, fuzzy=fuzzy)
return True
except ValueError:
return False


class History:
def __init__(self, history_list):
self.history_list = history_list

def get_history(self):
return {date: int(float(amount or 0)) for date, amount in self.history_list}


def parse_history(key: tuple, locations: list, index: int):
"""
Helper for validating and extracting history content from
locations data based on index. Validates with the current country/province
key to make sure no index/column issue.

TEMP: solution because implement a more efficient and better approach in the refactor.
"""
location_history = {}
try:
if key == (locations[index]["country"], locations[index]["province"]):
location_history = locations[index]["history"]
except (IndexError, KeyError):
LOGGER.debug(f"iteration data merge error: {index} {key}")

return location_history

:param string: str, string to check for date
:param fuzzy: bool, ignore unknown tokens in string if True
"""

try:
parse(string, fuzzy=fuzzy)
return True
except ValueError:
return False