Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions app/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,7 @@
def data_source(source):
"""
Retrieves the provided data-source service.

:returns: The service.
:rtype: LocationService
"""
return DATA_SOURCES.get(source.lower())
return DATA_SOURCES.get(source.lower())
122 changes: 121 additions & 1 deletion app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,4 +118,124 @@ async def handle_validation_error(
if __name__ == "__main__":
uvicorn.run(
"app.main:APP", host="127.0.0.1", port=SETTINGS.port, log_level="info",
)
)"""
app.main.py
"""
import logging

import pydantic
import sentry_sdk
import uvicorn
from fastapi import FastAPI, Request, Response
from fastapi.middleware.cors import CORSMiddleware
from fastapi.middleware.gzip import GZipMiddleware
from fastapi.responses import JSONResponse
from scout_apm.async_.starlette import ScoutMiddleware
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware

from .config import get_settings
from .data import data_source
from .routers import V1, V2
from .utils.httputils import setup_client_session, teardown_client_session

# ############
# FastAPI App
# ############
LOGGER = logging.getLogger("api")

SETTINGS = get_settings()

if SETTINGS.sentry_dsn: # pragma: no cover
sentry_sdk.init(dsn=SETTINGS.sentry_dsn)

APP = FastAPI(
title="Coronavirus Tracker",
description=(
"API for tracking the global coronavirus (COVID-19, SARS-CoV-2) outbreak."
" Project page: https://github.com/ExpDev07/coronavirus-tracker-api."
),
version="2.0.4",
docs_url="/",
redoc_url="/docs",
on_startup=[setup_client_session],
on_shutdown=[teardown_client_session],
)

# #####################
# Middleware
#######################

# Scout APM
if SETTINGS.scout_name: # pragma: no cover
LOGGER.info(f"Adding Scout APM middleware for `{SETTINGS.scout_name}`")
APP.add_middleware(ScoutMiddleware)
else:
LOGGER.debug("No SCOUT_NAME config")

# Sentry Error Tracking
if SETTINGS.sentry_dsn: # pragma: no cover
LOGGER.info("Adding Sentry middleware")
APP.add_middleware(SentryAsgiMiddleware)

# Enable CORS.
APP.add_middleware(
CORSMiddleware,
allow_credentials=True,
allow_origins=["*"],
allow_methods=["*"],
allow_headers=["*"],
)
APP.add_middleware(GZipMiddleware, minimum_size=1000)


@APP.middleware("http")
async def add_datasource(request: Request, call_next):
"""
Attach the data source to the request.state.
"""
# Retrieve the datas ource from query param.
source = data_source(request.query_params.get("source", default="jhu"))

# Abort with 404 if source cannot be found.
if not source:
return Response("The provided data-source was not found.", status_code=404)

# Attach source to request.
request.state.source = source

# Move on...
LOGGER.debug(f"source provided: {source.__class__.__name__}")
response = await call_next(request)
return response


# ################
# Exception Handler
# ################


@APP.exception_handler(pydantic.error_wrappers.ValidationError)
async def handle_validation_error(
request: Request, exc: pydantic.error_wrappers.ValidationError
): # pylint: disable=unused-argument
"""
Handles validation errors.
"""
return JSONResponse({"message": exc.errors()}, status_code=422)


# ################
# Routing
# ################


# Include routers.
APP.include_router(V1, prefix="", tags=["v1"])
APP.include_router(V2, prefix="/v2", tags=["v2"])


# Running of app.
if __name__ == "__main__":
uvicorn.run(
"app.main:APP", host="127.0.0.1", port=SETTINGS.port, log_level="info",
)
2 changes: 1 addition & 1 deletion app/routers/v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,4 +107,4 @@ async def sources():
"""
Retrieves a list of data-sources that are availble to use.
"""
return {"sources": list(DATA_SOURCES.keys())}
return {"sources": list(DATA_SOURCES.keys())}
28 changes: 0 additions & 28 deletions app/services/location/__init__.py

This file was deleted.

20 changes: 4 additions & 16 deletions app/services/location/csbs.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
from ...location.csbs import CSBSLocation
from ...utils import httputils
from . import LocationService
from ...location import factorylocation


LOGGER = logging.getLogger("services.location.csbs")

Expand Down Expand Up @@ -73,23 +75,9 @@ async def get_locations():

# Date string without "EDT" at end.
last_update = " ".join(item["Last Update"].split(" ")[0:2])
params = {"index": i, "state": state, "county": county, "item": item, "last_update": last_update}

# Append to locations.
locations.append(
CSBSLocation(
# General info.
i,
state,
county,
# Coordinates.
Coordinates(item["Latitude"], item["Longitude"]),
# Last update (parse as ISO).
datetime.strptime(last_update, "%Y-%m-%d %H:%M").isoformat() + "Z",
# Statistics.
int(item["Confirmed"] or 0),
int(item["Death"] or 0),
)
)
locations.append(locationfactory.create_location('CSBS'))
LOGGER.info(f"{data_id} Data normalized")
# save the results to distributed cache
# TODO: fix json serialization
Expand Down
97 changes: 97 additions & 0 deletions app/services/location/factorylocation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
from abc import ABC, abstractmethod
import csv
import logging
import os
from datetime import datetime
from pprint import pformat as pf

from asyncache import cached
from cachetools import TTLCache

from ...caches import check_cache, load_cache
from ...coordinates import Coordinates
from ...location.csbs import CSBSLocation
from ...location.nyt import NYTLocation
from ...utils import httputils
from . import LocationService

class factorylocation:
def __init__(self):
pass

@staticmethod
def location_create(org_name, params):
if org_name == 'NYT':
confirmed_history = params["confirmed_history"]
deaths_history = params["deaths_history"]
return NYTLocation(
id=params["index"],
state=params["county_state"][1],
county=params["county_state"][0],
coordinates=Coordinates(None, None), # NYT does not provide coordinates
last_updated=datetime.utcnow().isoformat() + "Z", # since last request
timelines={
"confirmed": Timeline(
timeline={
datetime.strptime(date, "%Y-%m-%d").isoformat() + "Z": amount
for date, amount in confirmed_history.items()
}
),
"deaths": Timeline(
timeline={
datetime.strptime(date, "%Y-%m-%d").isoformat() + "Z": amount
for date, amount in deaths_history.items()
}
),
"recovered": Timeline(),
},
)
elif org_name == "JHU":
timelines = params["timelines"];
return TimelinedLocation(
# General info.
params["index"],
params["country"],
params["province"],
# Coordinates.
Coordinates(latitude=params["coordinates"]["lat"], longitude=params["coordinates"]["long"]),
# Last update.
datetime.utcnow().isoformat() + "Z",
# Timelines (parse dates as ISO).
{
"confirmed": Timeline(
timeline={
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
for date, amount in timelines["confirmed"].items()
}
),
"deaths": Timeline(
timeline={
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
for date, amount in timelines["deaths"].items()
}
),
"recovered": Timeline(
timeline={
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
for date, amount in timelines["recovered"].items()
}
),
},
)
elif org_name == "CSBS":
item = params["item"]
return CSBSLocation(
# General info.
params["index"],
params["state"],
params["county"],
# Coordinates.
Coordinates(item["Latitude"], item["Longitude"]),
# Last update (parse as ISO).
datetime.strptime(params["last_update"], "%Y-%m-%d %H:%M").isoformat() + "Z",
# Statistics.
int(item["Confirmed"] or 0),
int(item["Death"] or 0),
)

37 changes: 4 additions & 33 deletions app/services/location/jhu.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from ...utils import date as date_util
from ...utils import httputils
from . import LocationService
from ...location import factorylocation

LOGGER = logging.getLogger("services.location.jhu")
PID = os.getpid()
Expand Down Expand Up @@ -171,39 +172,9 @@ async def get_locations():
coordinates = location["coordinates"]

# Create location (supporting timelines) and append.
locations.append(
TimelinedLocation(
# General info.
index,
location["country"],
location["province"],
# Coordinates.
Coordinates(latitude=coordinates["lat"], longitude=coordinates["long"]),
# Last update.
datetime.utcnow().isoformat() + "Z",
# Timelines (parse dates as ISO).
{
"confirmed": Timeline(
timeline={
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
for date, amount in timelines["confirmed"].items()
}
),
"deaths": Timeline(
timeline={
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
for date, amount in timelines["deaths"].items()
}
),
"recovered": Timeline(
timeline={
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
for date, amount in timelines["recovered"].items()
}
),
},
)
)
params = {"index": index, "country": location["country"], "province": location["province"],
"coordinates": coordinates, "timelines": timelines}
locations.append(locationfactory.create_location("JHU"), params)
LOGGER.info(f"{data_id} Data normalized")

# Finally, return the locations.
Expand Down
Loading