Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
The second aggregate pattern
  • Loading branch information
ZhiXuZhao committed Jul 25, 2021
commit 09b0a135f2ba60b75ba15815282a350995f543ce
25 changes: 13 additions & 12 deletions app/location/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,15 @@
# put confirmed cases, deaths cases, recovered cases into one class
# Inseated of using confirmed cases, deaths cases, recovered cases as attributes, we can use CaseNumbers class instance as attribute
class CaseNumbers:
def __init__(self, confirmed = 0, deaths = 0, recovered = 0):
def __init__(self, id, confirmed = 0, deaths = 0, recovered = 0):
self.id = id
self.confirmed = confirmed
self.deaths = deaths
self.recovered = recovered


#put all location information into one class
#CaseNumbers, Locationinfo, coordinates and Location forms one aggregate
class Locationinfo:
def __init__(self, id, country, province, coordinates):
self.id = id
Expand All @@ -29,7 +33,7 @@ class Location: # pylint: disable=too-many-instance-attributes

# Use instance of class CaseNumbers as attribute
def __init__(
self, locationinfo, last_updated, casenumbers
self, id, locationinfo, last_updated, casenumbers
): # pylint: disable=too-many-arguments
# General info.
self.locationinfo = locationinfo
Expand All @@ -48,7 +52,7 @@ def country_code(self):
:returns: The country code.
:rtype: str
"""
return (countries.country_code(self.country) or countries.DEFAULT_COUNTRY_CODE).upper()
return (countries.country_code(self.locationinfo.country) or countries.DEFAULT_COUNTRY_CODE).upper()

@property
def country_population(self):
Expand All @@ -69,13 +73,13 @@ def serialize(self):
"""
return {
# General info.
"id": self.id,
"country": self.country,
"id": self.locationinfo.id,
"country": self.locationinfo.country,
"country_code": self.country_code,
"country_population": self.country_population,
"province": self.province,
"province": self.locationinfo.province,
# Coordinates.
"coordinates": self.coordinates.serialize(),
"coordinates": self.locationinfo.coordinates.serialize(),
# Last updated.
"last_updated": self.last_updated,
# Latest data (statistics).
Expand All @@ -93,13 +97,10 @@ class TimelinedLocation(Location):
"""

# pylint: disable=too-many-arguments
def __init__(self, id, country, province, coordinates, last_updated, timelines, casenumbers):
def __init__(self, locationinfo, last_updated, timelines, casenumbers):
super().__init__(
# General info.
id,
country,
province,
coordinates,
locationinfo,
last_updated,
# Statistics (retrieve latest from timelines).
casenumbers
Expand Down
6 changes: 4 additions & 2 deletions app/services/location/csbs.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
from ...utils import httputils
from . import LocationService

from urls import URLs

LOGGER = logging.getLogger("services.location.csbs")


Expand All @@ -32,7 +34,7 @@ async def get(self, loc_id): # pylint: disable=arguments-differ


# Base URL for fetching data
BASE_URL = "https://facts.csbs.org/covid-19/covid19_county.csv"
#BASE_URL = "https://facts.csbs.org/covid-19/covid19_county.csv"


@cached(cache=TTLCache(maxsize=1, ttl=1800))
Expand All @@ -52,7 +54,7 @@ async def get_locations():
locations = cache_results
else:
LOGGER.info(f"{data_id} shared cache empty")
async with httputils.CLIENT_SESSION.get(BASE_URL) as response:
async with httputils.CLIENT_SESSION.get(URLs.CSBS) as response:
text = await response.text()

LOGGER.debug(f"{data_id} Data received")
Expand Down
6 changes: 4 additions & 2 deletions app/services/location/jhu.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
from ...utils import httputils
from . import LocationService

from urls import URLs

LOGGER = logging.getLogger("services.location.jhu")
PID = os.getpid()

Expand All @@ -41,7 +43,7 @@ async def get(self, loc_id): # pylint: disable=arguments-differ


# Base URL for fetching category.
BASE_URL = "https://raw.githubusercontent.com/CSSEGISandData/2019-nCoV/master/csse_covid_19_data/csse_covid_19_time_series/"
# BASE_URL = "https://raw.githubusercontent.com/CSSEGISandData/2019-nCoV/master/csse_covid_19_data/csse_covid_19_time_series/"


@cached(cache=TTLCache(maxsize=4, ttl=1800))
Expand All @@ -64,7 +66,7 @@ async def get_category(category):
else:
LOGGER.info(f"{data_id} shared cache empty")
# URL to request data from.
url = BASE_URL + "time_series_covid19_%s_global.csv" % category
url = URLs.JHU + "time_series_covid19_%s_global.csv" % category

# Request the data
LOGGER.info(f"{data_id} Requesting data...")
Expand Down
6 changes: 4 additions & 2 deletions app/services/location/nyt.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@
from ...utils import httputils
from . import LocationService

from urls import URLs

LOGGER = logging.getLogger("services.location.nyt")


Expand All @@ -36,7 +38,7 @@ async def get(self, loc_id): # pylint: disable=arguments-differ


# Base URL for fetching category.
BASE_URL = "https://raw.githubusercontent.com/nytimes/covid-19-data/master/us-counties.csv"
# BASE_URL = "https://raw.githubusercontent.com/nytimes/covid-19-data/master/us-counties.csv"


def get_grouped_locations_dict(data):
Expand Down Expand Up @@ -85,7 +87,7 @@ async def get_locations():
locations = cache_results
else:
LOGGER.info(f"{data_id} shared cache empty")
async with httputils.CLIENT_SESSION.get(BASE_URL) as response:
async with httputils.CLIENT_SESSION.get(URLs.NYT) as response:
text = await response.text()

LOGGER.debug(f"{data_id} Data received")
Expand Down
8 changes: 8 additions & 0 deletions app/services/location/urls.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
import enum

# Encapsulate urls into one class, clas URLs serve as aggregate root.
class URLs(str, enum.Enum):

JHU = "https://raw.githubusercontent.com/CSSEGISandData/2019-nCoV/master/csse_covid_19_data/csse_covid_19_time_series/"
CSBS = "https://facts.csbs.org/covid-19/covid19_county.csv"
NYT = "https://raw.githubusercontent.com/nytimes/covid-19-data/master/us-counties.csv"