From ac6aa5f53401f27a933eb33cc7a118998b369920 Mon Sep 17 00:00:00 2001 From: Gabriel Date: Wed, 25 Mar 2020 17:32:09 -0400 Subject: [PATCH 01/12] apply black formatting (120 line-length) --- app/__init__.py | 2 +- app/config/settings.py | 3 +- app/coordinates.py | 7 +- app/core.py | 3 +- app/data/__init__.py | 8 +- app/enums/sources.py | 6 +- app/location/__init__.py | 55 +++++++------ app/location/csbs.py | 24 +++--- app/main.py | 49 +++++------- app/models/latest.py | 6 +- app/models/location.py | 14 +++- app/models/timeline.py | 5 +- app/router/__init__.py | 2 +- app/router/latest.py | 15 ++-- app/router/locations.py | 33 ++++---- app/router/sources.py | 7 +- app/routes/__init__.py | 2 +- app/routes/v1/all.py | 30 ++++--- app/routes/v1/confirmed.py | 5 +- app/routes/v1/deaths.py | 5 +- app/routes/v1/recovered.py | 5 +- app/services/location/__init__.py | 3 +- app/services/location/csbs.py | 56 +++++++------ app/services/location/jhu.py | 129 ++++++++++++++++-------------- app/timeline.py | 10 +-- app/utils/countrycodes.py | 4 +- app/utils/date.py | 5 +- pyproject.toml | 19 +++++ tests/conftest.py | 1 + tests/test_coordinates.py | 14 +--- tests/test_countrycodes.py | 24 +++--- tests/test_csbs.py | 9 ++- tests/test_date.py | 23 +++--- tests/test_jhu.py | 23 +++--- tests/test_location.py | 65 ++++++++------- tests/test_routes.py | 21 ++--- tests/test_swagger.py | 4 +- tests/test_timeline.py | 20 ++--- 38 files changed, 382 insertions(+), 334 deletions(-) create mode 100644 pyproject.toml diff --git a/app/__init__.py b/app/__init__.py index b4b15379..16847abb 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -1,4 +1,4 @@ # See PEP396. -__version__ = '2.0' +__version__ = "2.0" from .core import create_app diff --git a/app/config/settings.py b/app/config/settings.py index ce6617a8..27c907bd 100644 --- a/app/config/settings.py +++ b/app/config/settings.py @@ -2,9 +2,10 @@ # Load enviroment variables from .env file. from dotenv import load_dotenv + load_dotenv() """ The port to serve the app application on. """ -PORT = int(os.getenv('PORT', 5000)) \ No newline at end of file +PORT = int(os.getenv("PORT", 5000)) diff --git a/app/coordinates.py b/app/coordinates.py index 93b5bd9e..cc27a8e3 100644 --- a/app/coordinates.py +++ b/app/coordinates.py @@ -14,10 +14,7 @@ def serialize(self): :returns: The serialized coordinates. :rtype: dict """ - return { - 'latitude' : self.latitude, - 'longitude': self.longitude - } + return {"latitude": self.latitude, "longitude": self.longitude} def __str__(self): - return 'lat: %s, long: %s' % (self.latitude, self.longitude) \ No newline at end of file + return "lat: %s, long: %s" % (self.latitude, self.longitude) diff --git a/app/core.py b/app/core.py index d631deb4..ef22b686 100644 --- a/app/core.py +++ b/app/core.py @@ -1,6 +1,7 @@ from flask import Flask from flask_cors import CORS + def create_app(): """ Construct the core application. @@ -10,7 +11,7 @@ def create_app(): CORS(app) # Set app config from settings. - app.config.from_pyfile('config/settings.py'); + app.config.from_pyfile("config/settings.py") with app.app_context(): # Import routes. diff --git a/app/data/__init__.py b/app/data/__init__.py index 0d11f7b1..fe8e25c7 100644 --- a/app/data/__init__.py +++ b/app/data/__init__.py @@ -2,10 +2,8 @@ from ..services.location.csbs import CSBSLocationService # Mapping of services to data-sources. -data_sources = { - 'jhu': JhuLocationService(), - 'csbs': CSBSLocationService() -} +data_sources = {"jhu": JhuLocationService(), "csbs": CSBSLocationService()} + def data_source(source): """ @@ -14,4 +12,4 @@ def data_source(source): :returns: The service. :rtype: LocationService """ - return data_sources.get(source.lower()) \ No newline at end of file + return data_sources.get(source.lower()) diff --git a/app/enums/sources.py b/app/enums/sources.py index 2dd7e13b..b4538c45 100644 --- a/app/enums/sources.py +++ b/app/enums/sources.py @@ -1,8 +1,10 @@ from enum import Enum + class Sources(str, Enum): """ A source available for retrieving data. """ - jhu = 'jhu' - csbs = 'csbs' \ No newline at end of file + + jhu = "jhu" + csbs = "csbs" diff --git a/app/location/__init__.py b/app/location/__init__.py index 70f9464b..718f3102 100644 --- a/app/location/__init__.py +++ b/app/location/__init__.py @@ -1,6 +1,7 @@ from ..coordinates import Coordinates from ..utils import countrycodes + class Location: """ A location in the world affected by the coronavirus. @@ -20,7 +21,7 @@ def __init__(self, id, country, province, coordinates, last_updated, confirmed, self.confirmed = confirmed self.deaths = deaths self.recovered = recovered - + @property def country_code(self): """ @@ -37,25 +38,19 @@ def serialize(self): """ return { # General info. - 'id' : self.id, - 'country' : self.country, - 'country_code': self.country_code, - 'province' : self.province, - + "id": self.id, + "country": self.country, + "country_code": self.country_code, + "province": self.province, # Coordinates. - 'coordinates': self.coordinates.serialize(), - + "coordinates": self.coordinates.serialize(), # Last updated. - 'last_updated': self.last_updated, - + "last_updated": self.last_updated, # Latest data (statistics). - 'latest': { - 'confirmed': self.confirmed, - 'deaths' : self.deaths, - 'recovered': self.recovered - }, + "latest": {"confirmed": self.confirmed, "deaths": self.deaths, "recovered": self.recovered}, } + class TimelinedLocation(Location): """ A location with timelines. @@ -64,18 +59,21 @@ class TimelinedLocation(Location): def __init__(self, id, country, province, coordinates, last_updated, timelines): super().__init__( # General info. - id, country, province, coordinates, last_updated, - + id, + country, + province, + coordinates, + last_updated, # Statistics (retrieve latest from timelines). - confirmed=timelines.get('confirmed').latest or 0, - deaths=timelines.get('deaths').latest or 0, - recovered=timelines.get('recovered').latest or 0, + confirmed=timelines.get("confirmed").latest or 0, + deaths=timelines.get("deaths").latest or 0, + recovered=timelines.get("recovered").latest or 0, ) # Set timelines. self.timelines = timelines - def serialize(self, timelines = False): + def serialize(self, timelines=False): """ Serializes the location into a dict. @@ -87,10 +85,15 @@ def serialize(self, timelines = False): # Whether to include the timelines or not. if timelines: - serialized.update({ 'timelines': { - # Serialize all the timelines. - key: value.serialize() for (key, value) in self.timelines.items() - }}) + serialized.update( + { + "timelines": { + # Serialize all the timelines. + key: value.serialize() + for (key, value) in self.timelines.items() + } + } + ) # Return the serialized location. - return serialized \ No newline at end of file + return serialized diff --git a/app/location/csbs.py b/app/location/csbs.py index bab09e3d..0b7c27f8 100644 --- a/app/location/csbs.py +++ b/app/location/csbs.py @@ -1,23 +1,28 @@ from . import Location + class CSBSLocation(Location): """ A CSBS (county) location. """ + def __init__(self, id, state, county, coordinates, last_updated, confirmed, deaths): super().__init__( # General info. - id, 'US', state, coordinates, last_updated, - + id, + "US", + state, + coordinates, + last_updated, # Statistics. confirmed=confirmed, - deaths=deaths, - recovered=0 + deaths=deaths, + recovered=0, ) self.state = state self.county = county - + def serialize(self, timelines=False): """ Serializes the location into a dict. @@ -28,10 +33,9 @@ def serialize(self, timelines=False): serialized = super().serialize() # Update with new fields. - serialized.update({ - 'state': self.state, - 'county': self.county, - }) + serialized.update( + {"state": self.state, "county": self.county,} + ) # Return the serialized location. - return serialized \ No newline at end of file + return serialized diff --git a/app/main.py b/app/main.py index f44bd984..d174127b 100644 --- a/app/main.py +++ b/app/main.py @@ -26,14 +26,14 @@ # ############ # FastAPI App # ############ -LOGGER = logging.getLogger('api') +LOGGER = logging.getLogger("api") APP = FastAPI( - title='Coronavirus Tracker', - description='API for tracking the global coronavirus (COVID-19, SARS-CoV-2) outbreak. Project page: https://github.com/ExpDev07/coronavirus-tracker-api.', - version='2.0.1', - docs_url='/', - redoc_url='/docs', + title="Coronavirus Tracker", + description="API for tracking the global coronavirus (COVID-19, SARS-CoV-2) outbreak. Project page: https://github.com/ExpDev07/coronavirus-tracker-api.", + version="2.0.1", + docs_url="/", + redoc_url="/docs", ) # ##################### @@ -42,31 +42,27 @@ # Enable CORS. APP.add_middleware( - CORSMiddleware, - allow_credentials=True, - allow_origins=['*'], - allow_methods=['*'], - allow_headers=['*'], + CORSMiddleware, allow_credentials=True, allow_origins=["*"], allow_methods=["*"], allow_headers=["*"], ) # TODO this could probably just be a FastAPI dependency. -@APP.middleware('http') +@APP.middleware("http") async def add_datasource(request: Request, call_next): """ Attach the data source to the request.state. """ # Retrieve the datas ource from query param. - source = data_source(request.query_params.get('source', default='jhu')) - + source = data_source(request.query_params.get("source", default="jhu")) + # Abort with 404 if source cannot be found. if not source: - return Response('The provided data-source was not found.', status_code=404) - + return Response("The provided data-source was not found.", status_code=404) + # Attach source to request. request.state.source = source - + # Move on... - LOGGER.info(f'source provided: {source.__class__.__name__}') + LOGGER.info(f"source provided: {source.__class__.__name__}") response = await call_next(request) return response @@ -77,13 +73,11 @@ async def add_datasource(request: Request, call_next): @APP.exception_handler(pydantic.error_wrappers.ValidationError) -async def handle_validation_error( - request: Request, exc: pydantic.error_wrappers.ValidationError -): +async def handle_validation_error(request: Request, exc: pydantic.error_wrappers.ValidationError): """ Handles validation errors. """ - return JSONResponse({'message': exc.errors()}, status_code=422) + return JSONResponse({"message": exc.errors()}, status_code=422) # ################ @@ -93,17 +87,14 @@ async def handle_validation_error( from .router import router # Include routers. -APP.include_router(router, prefix='/v2', tags=['v2']) +APP.include_router(router, prefix="/v2", tags=["v2"]) # mount the existing Flask app # v1 @ / -APP.mount('/', WSGIMiddleware(create_app())) +APP.mount("/", WSGIMiddleware(create_app())) # Running of app. -if __name__ == '__main__': +if __name__ == "__main__": uvicorn.run( - 'app.main:APP', - host='127.0.0.1', - port=int(os.getenv('PORT', 5000)), - log_level='info', + "app.main:APP", host="127.0.0.1", port=int(os.getenv("PORT", 5000)), log_level="info", ) diff --git a/app/models/latest.py b/app/models/latest.py index 90493156..6dcfd517 100644 --- a/app/models/latest.py +++ b/app/models/latest.py @@ -1,15 +1,19 @@ from pydantic import BaseModel + class Latest(BaseModel): """ Latest model. """ + confirmed: int deaths: int recovered: int + class LatestResponse(BaseModel): """ Response for latest. """ - latest: Latest \ No newline at end of file + + latest: Latest diff --git a/app/models/location.py b/app/models/location.py index e796fad8..8180f450 100644 --- a/app/models/location.py +++ b/app/models/location.py @@ -3,29 +3,35 @@ from .timeline import Timelines from .latest import Latest + class Location(BaseModel): """ Location model. """ + id: int country: str country_code: str - county: str = '' - province: str = '' - last_updated: str # TODO use datetime.datetime type. + county: str = "" + province: str = "" + last_updated: str # TODO use datetime.datetime type. coordinates: Dict latest: Latest timelines: Timelines = {} + class LocationResponse(BaseModel): """ Response for location. """ + location: Location + class LocationsResponse(BaseModel): """ Response for locations. """ + latest: Latest - locations: List[Location] = [] \ No newline at end of file + locations: List[Location] = [] diff --git a/app/models/timeline.py b/app/models/timeline.py index 33947493..75404d98 100644 --- a/app/models/timeline.py +++ b/app/models/timeline.py @@ -1,6 +1,7 @@ from pydantic import BaseModel from typing import Dict + class Timeline(BaseModel): """ Timeline model. @@ -9,10 +10,12 @@ class Timeline(BaseModel): latest: int timeline: Dict[str, int] = {} + class Timelines(BaseModel): """ Timelines model. """ + confirmed: Timeline deaths: Timeline - recovered: Timeline \ No newline at end of file + recovered: Timeline diff --git a/app/router/__init__.py b/app/router/__init__.py index 70ccb65d..04da8f26 100644 --- a/app/router/__init__.py +++ b/app/router/__init__.py @@ -4,4 +4,4 @@ router = APIRouter() # The routes. -from . import latest, sources, locations \ No newline at end of file +from . import latest, sources, locations diff --git a/app/router/latest.py b/app/router/latest.py index a3914c5e..1bdbe44c 100644 --- a/app/router/latest.py +++ b/app/router/latest.py @@ -3,16 +3,17 @@ from ..enums.sources import Sources from ..models.latest import LatestResponse as Latest -@router.get('/latest', response_model=Latest) -def get_latest(request: Request, source: Sources = 'jhu'): + +@router.get("/latest", response_model=Latest) +def get_latest(request: Request, source: Sources = "jhu"): """ Getting latest amount of total confirmed cases, deaths, and recoveries. """ locations = request.state.source.get_all() return { - 'latest': { - 'confirmed': sum(map(lambda location: location.confirmed, locations)), - 'deaths' : sum(map(lambda location: location.deaths, locations)), - 'recovered': sum(map(lambda location: location.recovered, locations)), + "latest": { + "confirmed": sum(map(lambda location: location.confirmed, locations)), + "deaths": sum(map(lambda location: location.deaths, locations)), + "recovered": sum(map(lambda location: location.recovered, locations)), } - } \ No newline at end of file + } diff --git a/app/router/locations.py b/app/router/locations.py index 4abb11fd..aed9eca4 100644 --- a/app/router/locations.py +++ b/app/router/locations.py @@ -3,12 +3,11 @@ from ..enums.sources import Sources from ..models.location import LocationResponse as Location, LocationsResponse as Locations -@router.get( - '/locations', response_model=Locations, response_model_exclude_unset=True -) + +@router.get("/locations", response_model=Locations, response_model_exclude_unset=True) def get_locations( request: Request, - source: Sources = 'jhu', + source: Sources = "jhu", country_code: str = None, province: str = None, county: str = None, @@ -21,8 +20,8 @@ def get_locations( params = dict(request.query_params) # Remove reserved params. - params.pop('source', None) - params.pop('timelines', None) + params.pop("source", None) + params.pop("timelines", None) # Retrieve all the locations. locations = request.state.source.get_all() @@ -30,8 +29,8 @@ def get_locations( # Attempt to filter out locations with properties matching the provided query params. for key, value in params.items(): # Clean keys for security purposes. - key = key.lower() - value = value.lower().strip('__') + key = key.lower() + value = value.lower().strip("__") # Do filtering. try: @@ -41,20 +40,18 @@ def get_locations( # Return final serialized data. return { - 'latest': { - 'confirmed': sum(map(lambda location: location.confirmed, locations)), - 'deaths' : sum(map(lambda location: location.deaths, locations)), - 'recovered': sum(map(lambda location: location.recovered, locations)), + "latest": { + "confirmed": sum(map(lambda location: location.confirmed, locations)), + "deaths": sum(map(lambda location: location.deaths, locations)), + "recovered": sum(map(lambda location: location.recovered, locations)), }, - 'locations': [location.serialize(timelines) for location in locations], + "locations": [location.serialize(timelines) for location in locations], } -@router.get('/locations/{id}', response_model=Location) -def get_location_by_id(request: Request, id: int, source: Sources = 'jhu', timelines: bool = True): +@router.get("/locations/{id}", response_model=Location) +def get_location_by_id(request: Request, id: int, source: Sources = "jhu", timelines: bool = True): """ Getting specific location by id. """ - return { - 'location': request.state.source.get(id).serialize(timelines) - } \ No newline at end of file + return {"location": request.state.source.get(id).serialize(timelines)} diff --git a/app/router/sources.py b/app/router/sources.py index 25674587..8b8c4778 100644 --- a/app/router/sources.py +++ b/app/router/sources.py @@ -1,11 +1,10 @@ from . import router from ..data import data_sources -@router.get('/sources') + +@router.get("/sources") async def sources(): """ Retrieves a list of data-sources that are availble to use. """ - return { - 'sources': list(data_sources.keys()) - } \ No newline at end of file + return {"sources": list(data_sources.keys())} diff --git a/app/routes/__init__.py b/app/routes/__init__.py index b61458e0..98d5f8fd 100644 --- a/app/routes/__init__.py +++ b/app/routes/__init__.py @@ -2,7 +2,7 @@ from ..data import data_source # Follow the import order to avoid circular dependency -api_v1 = Blueprint('api_v1', __name__, url_prefix='') +api_v1 = Blueprint("api_v1", __name__, url_prefix="") # API version 1. from .v1 import confirmed, deaths, recovered, all diff --git a/app/routes/v1/all.py b/app/routes/v1/all.py index 654ae35d..88ad1be2 100644 --- a/app/routes/v1/all.py +++ b/app/routes/v1/all.py @@ -2,23 +2,21 @@ from ...routes import api_v1 as api from ...services.location.jhu import get_category -@api.route('/all') + +@api.route("/all") def all(): # Get all the categories. - confirmed = get_category('confirmed') - deaths = get_category('deaths') - recovered = get_category('recovered') - - return jsonify({ - # Data. - 'confirmed': confirmed, - 'deaths': deaths, - 'recovered': recovered, + confirmed = get_category("confirmed") + deaths = get_category("deaths") + recovered = get_category("recovered") - # Latest. - 'latest': { - 'confirmed': confirmed['latest'], - 'deaths': deaths['latest'], - 'recovered': recovered['latest'], + return jsonify( + { + # Data. + "confirmed": confirmed, + "deaths": deaths, + "recovered": recovered, + # Latest. + "latest": {"confirmed": confirmed["latest"], "deaths": deaths["latest"], "recovered": recovered["latest"],}, } - }) + ) diff --git a/app/routes/v1/confirmed.py b/app/routes/v1/confirmed.py index 914ebdc9..78c47611 100644 --- a/app/routes/v1/confirmed.py +++ b/app/routes/v1/confirmed.py @@ -2,6 +2,7 @@ from ...routes import api_v1 as api from ...services.location.jhu import get_category -@api.route('/confirmed') + +@api.route("/confirmed") def confirmed(): - return jsonify(get_category('confirmed')) + return jsonify(get_category("confirmed")) diff --git a/app/routes/v1/deaths.py b/app/routes/v1/deaths.py index 9005bdca..becbed64 100644 --- a/app/routes/v1/deaths.py +++ b/app/routes/v1/deaths.py @@ -2,6 +2,7 @@ from ...routes import api_v1 as api from ...services.location.jhu import get_category -@api.route('/deaths') + +@api.route("/deaths") def deaths(): - return jsonify(get_category('deaths')) + return jsonify(get_category("deaths")) diff --git a/app/routes/v1/recovered.py b/app/routes/v1/recovered.py index d5a58731..3d571fed 100644 --- a/app/routes/v1/recovered.py +++ b/app/routes/v1/recovered.py @@ -2,6 +2,7 @@ from ...routes import api_v1 as api from ...services.location.jhu import get_category -@api.route('/recovered') + +@api.route("/recovered") def recovered(): - return jsonify(get_category('recovered')) + return jsonify(get_category("recovered")) diff --git a/app/services/location/__init__.py b/app/services/location/__init__.py index 3338b3d3..80b5e05c 100644 --- a/app/services/location/__init__.py +++ b/app/services/location/__init__.py @@ -1,5 +1,6 @@ from abc import ABC, abstractmethod + class LocationService(ABC): """ Service for retrieving locations. @@ -23,4 +24,4 @@ def get(self, id): :returns: The location. :rtype: Location """ - raise NotImplementedError \ No newline at end of file + raise NotImplementedError diff --git a/app/services/location/csbs.py b/app/services/location/csbs.py index b0f3a2e2..e8eae2a2 100644 --- a/app/services/location/csbs.py +++ b/app/services/location/csbs.py @@ -2,6 +2,7 @@ from ...coordinates import Coordinates from ...location.csbs import CSBSLocation + class CSBSLocationService(LocationService): """ Servive for retrieving locations from csbs @@ -10,17 +11,19 @@ class CSBSLocationService(LocationService): def get_all(self): # Get the locations return get_locations() - + def get(self, id): return self.get_all()[id] + import requests import csv from datetime import datetime from cachetools import cached, TTLCache # Base URL for fetching data -base_url = 'https://facts.csbs.org/covid-19/covid19_county.csv' +base_url = "https://facts.csbs.org/covid-19/covid19_county.csv" + @cached(cache=TTLCache(maxsize=1, ttl=3600)) def get_locations(): @@ -34,45 +37,40 @@ def get_locations(): text = request.text data = list(csv.DictReader(text.splitlines())) - + locations = [] for i, item in enumerate(data): # General info. - state = item['State Name'] - county = item['County Name'] + state = item["State Name"] + county = item["County Name"] # Ensure country is specified. if county == "Unassigned" or county == "Unknown": continue # Coordinates. - coordinates = Coordinates( - item['Latitude'], - item['Longitude'] - ) - + coordinates = Coordinates(item["Latitude"], item["Longitude"]) + # Date string without "EDT" at end. - last_update = ' '.join(item['Last Update'].split(' ')[0:2]) - + last_update = " ".join(item["Last Update"].split(" ")[0:2]) + # Append to locations. - locations.append(CSBSLocation( - # General info. - i, state, county, - - # Coordinates. - Coordinates( - item['Latitude'], - item['Longitude'] - ), - - # Last update (parse as ISO). - datetime.strptime(last_update, '%Y-%m-%d %H:%M').isoformat() + 'Z', - - # Statistics. - int(item['Confirmed'] or 0), - int(item['Death'] or 0) - )) + locations.append( + CSBSLocation( + # General info. + i, + state, + county, + # Coordinates. + Coordinates(item["Latitude"], item["Longitude"]), + # Last update (parse as ISO). + datetime.strptime(last_update, "%Y-%m-%d %H:%M").isoformat() + "Z", + # Statistics. + int(item["Confirmed"] or 0), + int(item["Death"] or 0), + ) + ) # Return the locations. return locations diff --git a/app/services/location/jhu.py b/app/services/location/jhu.py index 885c658e..75a3df4e 100644 --- a/app/services/location/jhu.py +++ b/app/services/location/jhu.py @@ -3,6 +3,7 @@ from ...coordinates import Coordinates from ...timeline import Timeline + class JhuLocationService(LocationService): """ Service for retrieving locations from Johns Hopkins CSSE (https://github.com/CSSEGISandData/COVID-19). @@ -11,11 +12,12 @@ class JhuLocationService(LocationService): def get_all(self): # Get the locations. return get_locations() - + def get(self, id): # Get location at the index equal to provided id. return self.get_all()[id] + # --------------------------------------------------------------- import requests @@ -27,7 +29,10 @@ def get(self, id): """ Base URL for fetching category. """ -base_url = 'https://raw.githubusercontent.com/CSSEGISandData/2019-nCoV/master/csse_covid_19_data/csse_covid_19_time_series/'; +base_url = ( + "https://raw.githubusercontent.com/CSSEGISandData/2019-nCoV/master/csse_covid_19_data/csse_covid_19_time_series/" +) + @cached(cache=TTLCache(maxsize=1024, ttl=3600)) def get_category(category): @@ -39,19 +44,19 @@ def get_category(category): """ # Adhere to category naming standard. - category = category.lower(); + category = category.lower() # URL to request data from. - url = base_url + 'time_series_covid19_%s_global.csv' % category + url = base_url + "time_series_covid19_%s_global.csv" % category # Different URL is needed for recoveries. # Read about deprecation here: https://github.com/CSSEGISandData/COVID-19/tree/master/csse_covid_19_data/csse_covid_19_time_series. - if category == 'recovered': - url = base_url + 'time_series_19-covid-Recovered.csv' + if category == "recovered": + url = base_url + "time_series_19-covid-Recovered.csv" # Request the data request = requests.get(url) - text = request.text + text = request.text # Parse the CSV. data = list(csv.DictReader(text.splitlines())) @@ -64,45 +69,42 @@ def get_category(category): dates = dict(filter(lambda element: date_util.is_date(element[0]), item.items())) # Make location history from dates. - history = { date: int(amount or 0) for date, amount in dates.items() }; + history = {date: int(amount or 0) for date, amount in dates.items()} # Country for this location. - country = item['Country/Region'] + country = item["Country/Region"] # Latest data insert value. - latest = list(history.values())[-1]; + latest = list(history.values())[-1] # Normalize the item and append to locations. - locations.append({ - # General info. - 'country': country, - 'country_code': countrycodes.country_code(country), - 'province': item['Province/State'], - - # Coordinates. - 'coordinates': { - 'lat': item['Lat'], - 'long': item['Long'], - }, - - # History. - 'history': history, - - # Latest statistic. - 'latest': int(latest or 0), - }) + locations.append( + { + # General info. + "country": country, + "country_code": countrycodes.country_code(country), + "province": item["Province/State"], + # Coordinates. + "coordinates": {"lat": item["Lat"], "long": item["Long"],}, + # History. + "history": history, + # Latest statistic. + "latest": int(latest or 0), + } + ) # Latest total. - latest = sum(map(lambda location: location['latest'], locations)) + latest = sum(map(lambda location: location["latest"], locations)) # Return the final data. return { - 'locations': locations, - 'latest': latest, - 'last_updated': datetime.utcnow().isoformat() + 'Z', - 'source': 'https://github.com/ExpDev07/coronavirus-tracker-api', + "locations": locations, + "latest": latest, + "last_updated": datetime.utcnow().isoformat() + "Z", + "source": "https://github.com/ExpDev07/coronavirus-tracker-api", } + @cached(cache=TTLCache(maxsize=1024, ttl=3600)) def get_locations(): """ @@ -112,8 +114,8 @@ def get_locations(): :rtype: List[Location] """ # Get all of the data categories locations. - confirmed = get_category('confirmed')['locations'] - deaths = get_category('deaths')['locations'] + confirmed = get_category("confirmed")["locations"] + deaths = get_category("deaths")["locations"] # recovered = get_category('recovered')['locations'] # Final locations to return. @@ -123,36 +125,43 @@ def get_locations(): for index, location in enumerate(confirmed): # Get the timelines. timelines = { - 'confirmed' : confirmed[index]['history'], - 'deaths' : deaths[index]['history'], + "confirmed": confirmed[index]["history"], + "deaths": deaths[index]["history"], # 'recovered' : recovered[index]['history'], } # Grab coordinates. - coordinates = location['coordinates'] + coordinates = location["coordinates"] # Create location (supporting timelines) and append. - locations.append(TimelinedLocation( - # General info. - index, location['country'], location['province'], - - # Coordinates. - Coordinates( - coordinates['lat'], - coordinates['long'] - ), - - # Last update. - datetime.utcnow().isoformat() + 'Z', - - # Timelines (parse dates as ISO). - { - 'confirmed': Timeline({ datetime.strptime(date, '%m/%d/%y').isoformat() + 'Z': amount for date, amount in timelines['confirmed'].items() }), - 'deaths' : Timeline({ datetime.strptime(date, '%m/%d/%y').isoformat() + 'Z': amount for date, amount in timelines['deaths'].items() }), - 'recovered': Timeline({}) - } - )) - + locations.append( + TimelinedLocation( + # General info. + index, + location["country"], + location["province"], + # Coordinates. + Coordinates(coordinates["lat"], coordinates["long"]), + # Last update. + datetime.utcnow().isoformat() + "Z", + # Timelines (parse dates as ISO). + { + "confirmed": Timeline( + { + datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount + for date, amount in timelines["confirmed"].items() + } + ), + "deaths": Timeline( + { + datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount + for date, amount in timelines["deaths"].items() + } + ), + "recovered": Timeline({}), + }, + ) + ) + # Finally, return the locations. return locations - \ No newline at end of file diff --git a/app/timeline.py b/app/timeline.py index 44e54c12..e1ee6e9b 100644 --- a/app/timeline.py +++ b/app/timeline.py @@ -1,12 +1,13 @@ from datetime import datetime from collections import OrderedDict + class Timeline: """ Timeline with history of data. """ - def __init__(self, history = {}): + def __init__(self, history={}): self.__timeline = history @property @@ -27,7 +28,7 @@ def latest(self): # Last item is the latest. if len(values): return values[-1] or 0 - + # Fallback value of 0. return 0 @@ -38,7 +39,4 @@ def serialize(self): :returns: The serialized timeline. :rtype: dict """ - return { - 'latest' : self.latest, - 'timeline': self.timeline - } \ No newline at end of file + return {"latest": self.latest, "timeline": self.timeline} diff --git a/app/utils/countrycodes.py b/app/utils/countrycodes.py index 666ccce7..af473a28 100644 --- a/app/utils/countrycodes.py +++ b/app/utils/countrycodes.py @@ -4,6 +4,7 @@ # Mapping of country names to alpha-2 codes according to # https://en.wikipedia.org/wiki/ISO_3166-1. # As a reference see also https://github.com/TakahikoKawasaki/nv-i18n (in Java) +# fmt: off is_3166_1 = { "Afghanistan" : "AF", "Ă…land Islands" : "AX", @@ -359,6 +360,7 @@ # "Cruise Ship" has no mapping, i.e. the default val is used } +# fmt: on def country_code(country): """ Return two letter country code (Alpha-2) according to https://en.wikipedia.org/wiki/ISO_3166-1 @@ -371,5 +373,5 @@ def country_code(country): synonym = synonyms[country] return is_3166_1[synonym] else: - print ("No country_code found for '" + country + "'. Using '" + default_code + "'") + print("No country_code found for '" + country + "'. Using '" + default_code + "'") return default_code diff --git a/app/utils/date.py b/app/utils/date.py index 42f75b06..3a18832e 100644 --- a/app/utils/date.py +++ b/app/utils/date.py @@ -1,5 +1,6 @@ from dateutil.parser import parse + def is_date(string, fuzzy=False): """ Return whether the string can be interpreted as a date. @@ -9,8 +10,8 @@ def is_date(string, fuzzy=False): :param fuzzy: bool, ignore unknown tokens in string if True """ - try: + try: parse(string, fuzzy=fuzzy) return True except ValueError: - return False \ No newline at end of file + return False diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..75e96b7e --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,19 @@ +[tool.black] +line-length = 120 +target-version = ['py36', 'py37', 'py38'] +include = '\.pyi?$' +exclude = ''' +/( + \.eggs + | \.git + | \.hg + | \.mypy_cache + | \.tox + | \.venv + | virtualenv + | _build + | buck-out + | build + | dist +)/ +''' \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py index b1271106..9da7ec30 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -9,6 +9,7 @@ from app.main import APP + @pytest.fixture def api_client(): """ diff --git a/tests/test_coordinates.py b/tests/test_coordinates.py index 8cc56b4d..cf355594 100644 --- a/tests/test_coordinates.py +++ b/tests/test_coordinates.py @@ -2,18 +2,12 @@ from unittest import mock from app import coordinates -@pytest.mark.parametrize("latitude, longitude", [ - ("1", "2"), - (100, "2"), - (-3, 0), - (-10, -10000000)]) + +@pytest.mark.parametrize("latitude, longitude", [("1", "2"), (100, "2"), (-3, 0), (-10, -10000000)]) def test_coordinates_class(latitude, longitude): coord_obj = coordinates.Coordinates(latitude=latitude, longitude=longitude) - #validate serialize - check_obj = { - 'latitude' : latitude, - 'longitude': longitude - } + # validate serialize + check_obj = {"latitude": latitude, "longitude": longitude} assert coord_obj.serialize() == check_obj diff --git a/tests/test_countrycodes.py b/tests/test_countrycodes.py index 6d959e9b..a19595cc 100644 --- a/tests/test_countrycodes.py +++ b/tests/test_countrycodes.py @@ -6,17 +6,23 @@ * Test cases for capturing of stdout/stderr """ -@pytest.mark.parametrize("country_name,expected_country_code", [ - ("Germany", "DE"), - ("Bolivia, Plurinational State of", "BO"), - ("Korea, Democratic People's Republic of", "KP"), - ("BlaBla", "XX")]) + +@pytest.mark.parametrize( + "country_name,expected_country_code", + [ + ("Germany", "DE"), + ("Bolivia, Plurinational State of", "BO"), + ("Korea, Democratic People's Republic of", "KP"), + ("BlaBla", "XX"), + ], +) def test_countrycodes_is_3166_1(country_name, expected_country_code): assert countrycodes.country_code(country_name) == expected_country_code -@pytest.mark.parametrize("country_name_synonym, expected_country_code", [ - ("Deutschland", "DE"), - ("Iran (Islamic Republic of)", "IR"), - ("British Virgin Islands", "VG")]) + +@pytest.mark.parametrize( + "country_name_synonym, expected_country_code", + [("Deutschland", "DE"), ("Iran (Islamic Republic of)", "IR"), ("British Virgin Islands", "VG")], +) def test_countrycodes_synonym(country_name_synonym, expected_country_code): assert countrycodes.country_code(country_name_synonym) == expected_country_code diff --git a/tests/test_csbs.py b/tests/test_csbs.py index 3eaab01c..c1502976 100644 --- a/tests/test_csbs.py +++ b/tests/test_csbs.py @@ -4,12 +4,14 @@ from unittest import mock from app.services.location import csbs + def mocked_csbs_requests_get(*args, **kwargs): class FakeRequestsGetResponse: """ Returns instance of `FakeRequestsGetResponse` when calling `app.services.location.csbs.requests.get() """ + def __init__(self): self.text = self.read_file() @@ -21,10 +23,11 @@ def read_file(self): print("Try to read {}".format(filepath)) with open(filepath, "r") as file: return file.read() - + return FakeRequestsGetResponse() -@mock.patch('app.services.location.csbs.requests.get', side_effect=mocked_csbs_requests_get) + +@mock.patch("app.services.location.csbs.requests.get", side_effect=mocked_csbs_requests_get) def test_get_locations(mock_request_get): data = csbs.get_locations() assert isinstance(data, list) @@ -32,4 +35,4 @@ def test_get_locations(mock_request_get): # check to see that Unknown/Unassigned has been filtered for d in data: assert d.county != "Unknown" - assert d.county != "Unassigned" \ No newline at end of file + assert d.county != "Unassigned" diff --git a/tests/test_date.py b/tests/test_date.py index 08a2d5cb..efee143a 100644 --- a/tests/test_date.py +++ b/tests/test_date.py @@ -1,15 +1,20 @@ import pytest from app.utils import date -@pytest.mark.parametrize("str_date, fuzzy_bool, expected_value", [ - ("1990-12-1", False, True), - ("2005/3", False, True), - ("Jan 19, 1990", False, True), - ("today is 2019-03-27", False, False), - ("Monday at 12:01am", False, True), - ("xyz_not_a_date", False, False), - ("yesterday", False, False), - ("today is 2019-03-27", True, True)]) + +@pytest.mark.parametrize( + "str_date, fuzzy_bool, expected_value", + [ + ("1990-12-1", False, True), + ("2005/3", False, True), + ("Jan 19, 1990", False, True), + ("today is 2019-03-27", False, False), + ("Monday at 12:01am", False, True), + ("xyz_not_a_date", False, False), + ("yesterday", False, False), + ("today is 2019-03-27", True, True), + ], +) def test_is_date(str_date, fuzzy_bool, expected_value): """ Testdata from https://stackoverflow.com/a/25341965/7120095 diff --git a/tests/test_jhu.py b/tests/test_jhu.py index a503a1c2..a859270b 100644 --- a/tests/test_jhu.py +++ b/tests/test_jhu.py @@ -8,12 +8,14 @@ DATETIME_STRING = "2020-03-17T10:23:22.505550" + def mocked_requests_get(*args, **kwargs): class FakeRequestsGetResponse: """ Returns instance of `FakeRequestsGetResponse` when calling `app.services.location.jhu.requests.get()` """ + def __init__(self, url, filename, state): self.url = url self.filename = filename @@ -29,31 +31,33 @@ def read_file(self, state): # Determine filepath. filepath = "tests/example_data/{}.csv".format(state) - if state == 'recovered': - filepath = 'tests/example_data/time_series_19-covid-Recovered.csv' + if state == "recovered": + filepath = "tests/example_data/time_series_19-covid-Recovered.csv" # Return fake response. print("Try to read {}".format(filepath)) with open(filepath, "r") as file: return file.read() - #get url from `request.get` + # get url from `request.get` url = args[0] - #get filename from url + # get filename from url filename = url.split("/")[-1] - #clean up for id token (e.g. Deaths) + # clean up for id token (e.g. Deaths) state = filename.split("-")[-1].replace(".csv", "").lower().capitalize() return FakeRequestsGetResponse(url, filename, state) + def mocked_strptime_isoformat(*args, **kwargs): class DateTimeStrpTime: """ Returns instance of `DateTimeStrpTime` when calling `app.services.location.jhu.datetime.trptime(date, '%m/%d/%y').isoformat()` """ + def __init__(self, date, strformat): self.date = date self.strformat = strformat @@ -66,10 +70,11 @@ def isoformat(self): return DateTimeStrpTime(date, strformat) -@mock.patch('app.services.location.jhu.datetime') -@mock.patch('app.services.location.jhu.requests.get', side_effect=mocked_requests_get) + +@mock.patch("app.services.location.jhu.datetime") +@mock.patch("app.services.location.jhu.requests.get", side_effect=mocked_requests_get) def test_get_locations(mock_request_get, mock_datetime): - #mock app.services.location.jhu.datetime.utcnow().isoformat() + # mock app.services.location.jhu.datetime.utcnow().isoformat() mock_datetime.utcnow.return_value.isoformat.return_value = DATETIME_STRING mock_datetime.strptime.side_effect = mocked_strptime_isoformat @@ -77,6 +82,6 @@ def test_get_locations(mock_request_get, mock_datetime): assert isinstance(output, list) assert isinstance(output[0], location.Location) - #`jhu.get_locations()` creates id based on confirmed list + # `jhu.get_locations()` creates id based on confirmed list location_confirmed = jhu.get_category("confirmed") assert len(output) == len(location_confirmed["locations"]) diff --git a/tests/test_location.py b/tests/test_location.py index a8b05838..82f701ab 100644 --- a/tests/test_location.py +++ b/tests/test_location.py @@ -3,20 +3,36 @@ from unittest import mock from app import location, coordinates, timeline + def mocked_timeline(*args, **kwargs): class TestTimeline: def __init__(self, latest): self.latest = latest - + return TestTimeline(args[0]) -@pytest.mark.parametrize("test_id, country, country_code, province, latitude, longitude, confirmed_latest, deaths_latest, recovered_latest", [ - (0, "Thailand", "TH", "", 15, 100, 1000, 1111, 22222), - (1, "Deutschland", "DE", "", 15, 100, 1000, 1111, 22222), - (2, "Cruise Ship", "XX", "", 15, 100, 1000, 1111, 22222) -]) -@mock.patch('app.timeline.Timeline', side_effect=mocked_timeline) -def test_location_class(mocked_timeline, test_id, country, country_code, province, latitude, longitude, confirmed_latest, deaths_latest, recovered_latest): + +@pytest.mark.parametrize( + "test_id, country, country_code, province, latitude, longitude, confirmed_latest, deaths_latest, recovered_latest", + [ + (0, "Thailand", "TH", "", 15, 100, 1000, 1111, 22222), + (1, "Deutschland", "DE", "", 15, 100, 1000, 1111, 22222), + (2, "Cruise Ship", "XX", "", 15, 100, 1000, 1111, 22222), + ], +) +@mock.patch("app.timeline.Timeline", side_effect=mocked_timeline) +def test_location_class( + mocked_timeline, + test_id, + country, + country_code, + province, + latitude, + longitude, + confirmed_latest, + deaths_latest, + recovered_latest, +): # id, country, province, coordinates, confirmed, deaths, recovered coords = coordinates.Coordinates(latitude=latitude, longitude=longitude) @@ -27,33 +43,24 @@ def test_location_class(mocked_timeline, test_id, country, country_code, provinc recovered = timeline.Timeline(recovered_latest) # Date now. - now = datetime.utcnow().isoformat() + 'Z' + now = datetime.utcnow().isoformat() + "Z" # Location. - location_obj = location.TimelinedLocation(test_id, country, province, coords, now, { - 'confirmed': confirmed, - 'deaths' : deaths, - 'recovered': recovered, - }) + location_obj = location.TimelinedLocation( + test_id, country, province, coords, now, {"confirmed": confirmed, "deaths": deaths, "recovered": recovered,} + ) assert location_obj.country_code == country_code - #validate serialize + # validate serialize check_dict = { - 'id': test_id, - 'country': country, - 'country_code': country_code, - 'province': province, - 'last_updated': now, - 'coordinates': { - 'latitude': latitude, - 'longitude': longitude - }, - 'latest': { - 'confirmed': confirmed_latest, - 'deaths': deaths_latest, - 'recovered': recovered_latest - } + "id": test_id, + "country": country, + "country_code": country_code, + "province": province, + "last_updated": now, + "coordinates": {"latitude": latitude, "longitude": longitude}, + "latest": {"confirmed": confirmed_latest, "deaths": deaths_latest, "recovered": recovered_latest}, } assert location_obj.serialize() == check_dict diff --git a/tests/test_routes.py b/tests/test_routes.py index dee93465..1ea3005c 100644 --- a/tests/test_routes.py +++ b/tests/test_routes.py @@ -10,8 +10,9 @@ from .test_jhu import mocked_requests_get, mocked_strptime_isoformat, DATETIME_STRING -@mock.patch('app.services.location.jhu.datetime') -@mock.patch('app.services.location.jhu.requests.get', side_effect=mocked_requests_get) + +@mock.patch("app.services.location.jhu.datetime") +@mock.patch("app.services.location.jhu.requests.get", side_effect=mocked_requests_get) class FlaskRoutesTest(unittest.TestCase): """ Need to mock some objects to control testing data locally @@ -19,7 +20,7 @@ class FlaskRoutesTest(unittest.TestCase): Store all integration testcases in one class to ensure app context """ - #load app context only once. + # load app context only once. app = app.create_app() def setUp(self): @@ -38,7 +39,7 @@ def test_root_api(self, mock_request_get, mock_datetime): response = self.asgi_client.get("/") assert response.status_code == 200 - assert not response.is_redirect + assert not response.is_redirect def test_v1_confirmed(self, mock_request_get, mock_datetime): mock_datetime.utcnow.return_value.isoformat.return_value = self.date @@ -73,7 +74,7 @@ def test_v1_all(self, mock_request_get, mock_datetime): state = "all" expected_json_output = self.read_file_v1(state=state) return_data = self.client.get("/{}".format(state)).data.decode() - #print(return_data) + # print(return_data) assert return_data == expected_json_output def test_v2_latest(self, mock_request_get, mock_datetime): @@ -82,13 +83,7 @@ def test_v2_latest(self, mock_request_get, mock_datetime): state = "latest" return_data = self.asgi_client.get(f"/v2/{state}").json() - check_dict = { - 'latest': { - 'confirmed': 1940, - 'deaths': 1940, - 'recovered': 0 - } - } + check_dict = {"latest": {"confirmed": 1940, "deaths": 1940, "recovered": 0}} assert return_data == check_dict @@ -116,7 +111,7 @@ def test_v2_locations_id(self, mock_request_get, mock_datetime): with open(filepath, "r") as file: expected_json_output = file.read() - #assert return_data == expected_json_output + # assert return_data == expected_json_output def tearDown(self): pass diff --git a/tests/test_swagger.py b/tests/test_swagger.py index 3d71ae64..8dfa9060 100644 --- a/tests/test_swagger.py +++ b/tests/test_swagger.py @@ -1,7 +1,7 @@ - import pytest -@pytest.mark.parametrize("route",["/", "/docs", "/openapi.json"]) + +@pytest.mark.parametrize("route", ["/", "/docs", "/openapi.json"]) def test_swagger(api_client, route): """Test that the swagger ui, redoc and openapi json are available.""" response = api_client.get(route) diff --git a/tests/test_timeline.py b/tests/test_timeline.py index 728a2c69..d6d277e0 100644 --- a/tests/test_timeline.py +++ b/tests/test_timeline.py @@ -3,13 +3,14 @@ from collections import OrderedDict from app import timeline + def test_timeline_class(): # Unordered timeseries. timeseries = { - '1/24/20': 5, - '1/22/20': 2, - '1/25/20': 7, - '1/23/20': 3, + "1/24/20": 5, + "1/22/20": 2, + "1/25/20": 7, + "1/23/20": 3, } history_data = timeline.Timeline(history=timeseries) @@ -18,17 +19,12 @@ def test_timeline_class(): assert history_data.latest == 7 # validate order - assert list(dict(history_data.timeline).keys()) == ['1/22/20', '1/23/20', '1/24/20', '1/25/20'] + assert list(dict(history_data.timeline).keys()) == ["1/22/20", "1/23/20", "1/24/20", "1/25/20"] # validate serialize check_serialize = { - 'latest': 7, - 'timeline': OrderedDict([ - ('1/22/20', 2), - ('1/23/20', 3), - ('1/24/20', 5), - ('1/25/20', 7), - ]) + "latest": 7, + "timeline": OrderedDict([("1/22/20", 2), ("1/23/20", 3), ("1/24/20", 5), ("1/25/20", 7),]), } assert dict(history_data.serialize()) == check_serialize From 3c186063a9dd535e5cd2a6435bc1c8ab1697976e Mon Sep 17 00:00:00 2001 From: Gabriel Date: Wed, 25 Mar 2020 17:53:00 -0400 Subject: [PATCH 02/12] isort --- app/data/__init__.py | 2 +- app/main.py | 18 +++++++----------- app/models/location.py | 6 ++++-- app/models/timeline.py | 3 ++- app/router/latest.py | 3 ++- app/router/locations.py | 6 ++++-- app/router/sources.py | 2 +- app/routes/v1/all.py | 1 + app/routes/v1/confirmed.py | 1 + app/routes/v1/deaths.py | 1 + app/routes/v1/recovered.py | 1 + app/services/location/csbs.py | 12 +++++++----- app/services/location/jhu.py | 17 ++++++++++------- app/timeline.py | 2 +- pyproject.toml | 9 ++++++++- tests/conftest.py | 1 - tests/test_coordinates.py | 4 +++- tests/test_countrycodes.py | 2 ++ tests/test_csbs.py | 6 ++++-- tests/test_date.py | 1 + tests/test_jhu.py | 8 +++++--- tests/test_location.py | 6 ++++-- tests/test_routes.py | 12 +++++++----- tests/test_timeline.py | 6 ++++-- 24 files changed, 81 insertions(+), 49 deletions(-) diff --git a/app/data/__init__.py b/app/data/__init__.py index fe8e25c7..73468add 100644 --- a/app/data/__init__.py +++ b/app/data/__init__.py @@ -1,5 +1,5 @@ -from ..services.location.jhu import JhuLocationService from ..services.location.csbs import CSBSLocationService +from ..services.location.jhu import JhuLocationService # Mapping of services to data-sources. data_sources = {"jhu": JhuLocationService(), "csbs": CSBSLocationService()} diff --git a/app/main.py b/app/main.py index d174127b..44876182 100644 --- a/app/main.py +++ b/app/main.py @@ -1,27 +1,24 @@ """ app.main.py """ +import datetime as dt import logging import os import reprlib -import datetime as dt import pydantic import uvicorn - -from fastapi import FastAPI -from fastapi import Request, Response - -from fastapi.responses import JSONResponse - -from fastapi.middleware.wsgi import WSGIMiddleware +from fastapi import FastAPI, Request, Response from fastapi.middleware.cors import CORSMiddleware +from fastapi.middleware.wsgi import WSGIMiddleware +from fastapi.responses import JSONResponse from .core import create_app from .data import data_source - -from .models.location import LocationResponse as Location, LocationsResponse as Locations from .models.latest import LatestResponse as Latest +from .models.location import LocationResponse as Location +from .models.location import LocationsResponse as Locations +from .router import router # ############ # FastAPI App @@ -84,7 +81,6 @@ async def handle_validation_error(request: Request, exc: pydantic.error_wrappers # Routing # ################ -from .router import router # Include routers. APP.include_router(router, prefix="/v2", tags=["v2"]) diff --git a/app/models/location.py b/app/models/location.py index 8180f450..dc67c4ee 100644 --- a/app/models/location.py +++ b/app/models/location.py @@ -1,7 +1,9 @@ -from pydantic import BaseModel from typing import Dict, List -from .timeline import Timelines + +from pydantic import BaseModel + from .latest import Latest +from .timeline import Timelines class Location(BaseModel): diff --git a/app/models/timeline.py b/app/models/timeline.py index 75404d98..453dfb14 100644 --- a/app/models/timeline.py +++ b/app/models/timeline.py @@ -1,6 +1,7 @@ -from pydantic import BaseModel from typing import Dict +from pydantic import BaseModel + class Timeline(BaseModel): """ diff --git a/app/router/latest.py b/app/router/latest.py index 1bdbe44c..81b254cf 100644 --- a/app/router/latest.py +++ b/app/router/latest.py @@ -1,7 +1,8 @@ from fastapi import Request -from . import router + from ..enums.sources import Sources from ..models.latest import LatestResponse as Latest +from . import router @router.get("/latest", response_model=Latest) diff --git a/app/router/locations.py b/app/router/locations.py index aed9eca4..d0e03c46 100644 --- a/app/router/locations.py +++ b/app/router/locations.py @@ -1,7 +1,9 @@ from fastapi import Request -from . import router + from ..enums.sources import Sources -from ..models.location import LocationResponse as Location, LocationsResponse as Locations +from ..models.location import LocationResponse as Location +from ..models.location import LocationsResponse as Locations +from . import router @router.get("/locations", response_model=Locations, response_model_exclude_unset=True) diff --git a/app/router/sources.py b/app/router/sources.py index 8b8c4778..538921f4 100644 --- a/app/router/sources.py +++ b/app/router/sources.py @@ -1,5 +1,5 @@ -from . import router from ..data import data_sources +from . import router @router.get("/sources") diff --git a/app/routes/v1/all.py b/app/routes/v1/all.py index 88ad1be2..9638c4bd 100644 --- a/app/routes/v1/all.py +++ b/app/routes/v1/all.py @@ -1,4 +1,5 @@ from flask import jsonify + from ...routes import api_v1 as api from ...services.location.jhu import get_category diff --git a/app/routes/v1/confirmed.py b/app/routes/v1/confirmed.py index 78c47611..85cfe039 100644 --- a/app/routes/v1/confirmed.py +++ b/app/routes/v1/confirmed.py @@ -1,4 +1,5 @@ from flask import jsonify + from ...routes import api_v1 as api from ...services.location.jhu import get_category diff --git a/app/routes/v1/deaths.py b/app/routes/v1/deaths.py index becbed64..cb65874b 100644 --- a/app/routes/v1/deaths.py +++ b/app/routes/v1/deaths.py @@ -1,4 +1,5 @@ from flask import jsonify + from ...routes import api_v1 as api from ...services.location.jhu import get_category diff --git a/app/routes/v1/recovered.py b/app/routes/v1/recovered.py index 3d571fed..be5fe646 100644 --- a/app/routes/v1/recovered.py +++ b/app/routes/v1/recovered.py @@ -1,4 +1,5 @@ from flask import jsonify + from ...routes import api_v1 as api from ...services.location.jhu import get_category diff --git a/app/services/location/csbs.py b/app/services/location/csbs.py index e8eae2a2..1983de7b 100644 --- a/app/services/location/csbs.py +++ b/app/services/location/csbs.py @@ -1,6 +1,12 @@ -from . import LocationService +import csv +from datetime import datetime + +import requests +from cachetools import TTLCache, cached + from ...coordinates import Coordinates from ...location.csbs import CSBSLocation +from . import LocationService class CSBSLocationService(LocationService): @@ -16,10 +22,6 @@ def get(self, id): return self.get_all()[id] -import requests -import csv -from datetime import datetime -from cachetools import cached, TTLCache # Base URL for fetching data base_url = "https://facts.csbs.org/covid-19/covid19_county.csv" diff --git a/app/services/location/jhu.py b/app/services/location/jhu.py index 75a3df4e..38b20c2e 100644 --- a/app/services/location/jhu.py +++ b/app/services/location/jhu.py @@ -1,7 +1,15 @@ -from . import LocationService -from ...location import TimelinedLocation +import csv +from datetime import datetime + +import requests +from cachetools import TTLCache, cached + from ...coordinates import Coordinates +from ...location import TimelinedLocation from ...timeline import Timeline +from ...utils import countrycodes +from ...utils import date as date_util +from . import LocationService class JhuLocationService(LocationService): @@ -20,11 +28,6 @@ def get(self, id): # --------------------------------------------------------------- -import requests -import csv -from datetime import datetime -from cachetools import cached, TTLCache -from ...utils import countrycodes, date as date_util """ Base URL for fetching category. diff --git a/app/timeline.py b/app/timeline.py index e1ee6e9b..4916ea2b 100644 --- a/app/timeline.py +++ b/app/timeline.py @@ -1,5 +1,5 @@ -from datetime import datetime from collections import OrderedDict +from datetime import datetime class Timeline: diff --git a/pyproject.toml b/pyproject.toml index 75e96b7e..f1226541 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,4 +16,11 @@ exclude = ''' | build | dist )/ -''' \ No newline at end of file +''' +[tool.isort] +known_third_party = "invoke,pkg_resources" +multi_line_output = 3 +include_trailing_comma = "True" +force_grid_wrap = 0 +use_parentheses = "True" +line_length = 120 diff --git a/tests/conftest.py b/tests/conftest.py index 9da7ec30..a9811d22 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -6,7 +6,6 @@ import pytest from fastapi.testclient import TestClient - from app.main import APP diff --git a/tests/test_coordinates.py b/tests/test_coordinates.py index cf355594..2ec0290a 100644 --- a/tests/test_coordinates.py +++ b/tests/test_coordinates.py @@ -1,5 +1,7 @@ -import pytest from unittest import mock + +import pytest + from app import coordinates diff --git a/tests/test_countrycodes.py b/tests/test_countrycodes.py index a19595cc..1b132266 100644 --- a/tests/test_countrycodes.py +++ b/tests/test_countrycodes.py @@ -1,6 +1,8 @@ import pytest + from app.utils import countrycodes + """ Todo: * Test cases for capturing of stdout/stderr diff --git a/tests/test_csbs.py b/tests/test_csbs.py index c1502976..64852102 100644 --- a/tests/test_csbs.py +++ b/tests/test_csbs.py @@ -1,7 +1,9 @@ -import app import datetime -import pytest from unittest import mock + +import pytest + +import app from app.services.location import csbs diff --git a/tests/test_date.py b/tests/test_date.py index efee143a..bde5516a 100644 --- a/tests/test_date.py +++ b/tests/test_date.py @@ -1,4 +1,5 @@ import pytest + from app.utils import date diff --git a/tests/test_jhu.py b/tests/test_jhu.py index a859270b..2db58140 100644 --- a/tests/test_jhu.py +++ b/tests/test_jhu.py @@ -1,10 +1,12 @@ -import app import datetime -import pytest from unittest import mock -from app.utils import date + +import pytest + +import app from app import location from app.services.location import jhu +from app.utils import date DATETIME_STRING = "2020-03-17T10:23:22.505550" diff --git a/tests/test_location.py b/tests/test_location.py index 82f701ab..2135212c 100644 --- a/tests/test_location.py +++ b/tests/test_location.py @@ -1,7 +1,9 @@ -import pytest from datetime import datetime from unittest import mock -from app import location, coordinates, timeline + +import pytest + +from app import coordinates, location, timeline def mocked_timeline(*args, **kwargs): diff --git a/tests/test_routes.py b/tests/test_routes.py index 1ea3005c..7c4b1f03 100644 --- a/tests/test_routes.py +++ b/tests/test_routes.py @@ -1,14 +1,16 @@ -import app -import unittest -from fastapi.testclient import TestClient import json -from unittest import mock +import unittest from pprint import pformat as pf +from unittest import mock + import pytest +from fastapi.testclient import TestClient + +import app from app import services from app.main import APP -from .test_jhu import mocked_requests_get, mocked_strptime_isoformat, DATETIME_STRING +from .test_jhu import DATETIME_STRING, mocked_requests_get, mocked_strptime_isoformat @mock.patch("app.services.location.jhu.datetime") diff --git a/tests/test_timeline.py b/tests/test_timeline.py index d6d277e0..056286aa 100644 --- a/tests/test_timeline.py +++ b/tests/test_timeline.py @@ -1,6 +1,8 @@ -import pytest -from unittest import mock from collections import OrderedDict +from unittest import mock + +import pytest + from app import timeline From a55dd782979c1a862e75c7b0ba5ad5d1e7059c54 Mon Sep 17 00:00:00 2001 From: Gabriel Date: Wed, 25 Mar 2020 17:53:12 -0400 Subject: [PATCH 03/12] isort exceptions --- app/router/__init__.py | 2 +- app/routes/__init__.py | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/app/router/__init__.py b/app/router/__init__.py index 04da8f26..3c10e956 100644 --- a/app/router/__init__.py +++ b/app/router/__init__.py @@ -4,4 +4,4 @@ router = APIRouter() # The routes. -from . import latest, sources, locations +from . import latest, sources, locations # isort:skip diff --git a/app/routes/__init__.py b/app/routes/__init__.py index 98d5f8fd..2a584490 100644 --- a/app/routes/__init__.py +++ b/app/routes/__init__.py @@ -1,3 +1,8 @@ +""" +app.routes + +isort:skip_file +""" from flask import Blueprint, redirect, request, abort, current_app as app from ..data import data_source From b50fb1fb3f68d51d8c011b9a06fc4f771e57490f Mon Sep 17 00:00:00 2001 From: Gabriel Date: Wed, 25 Mar 2020 17:56:30 -0400 Subject: [PATCH 04/12] final fmt --- app/router/__init__.py | 2 +- app/services/location/csbs.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/app/router/__init__.py b/app/router/__init__.py index 3c10e956..eefb5f0a 100644 --- a/app/router/__init__.py +++ b/app/router/__init__.py @@ -4,4 +4,4 @@ router = APIRouter() # The routes. -from . import latest, sources, locations # isort:skip +from . import latest, sources, locations # isort:skip diff --git a/app/services/location/csbs.py b/app/services/location/csbs.py index 1983de7b..6a13f41e 100644 --- a/app/services/location/csbs.py +++ b/app/services/location/csbs.py @@ -22,7 +22,6 @@ def get(self, id): return self.get_all()[id] - # Base URL for fetching data base_url = "https://facts.csbs.org/covid-19/covid19_county.csv" From dd06505ee837bea996894ac96ecaff7d59e66616 Mon Sep 17 00:00:00 2001 From: Gabriel Date: Wed, 25 Mar 2020 18:00:17 -0400 Subject: [PATCH 05/12] make fmt & make check-fmt --- .travis.yml | 2 +- Makefile | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index a54844dd..39e16fce 100644 --- a/.travis.yml +++ b/.travis.yml @@ -7,4 +7,4 @@ install: - "pip install pipenv" - "pipenv install --dev --skip-lock" script: - - "make test lint" + - "make test lint check-fmt" diff --git a/Makefile b/Makefile index 8b5fc47f..d2d3ad1b 100644 --- a/Makefile +++ b/Makefile @@ -19,3 +19,11 @@ test: lint: pylint $(APP) || true + +fmt: + isort -rc --atomic + black . + +check-fmt: + isort -rc --check + black . --check --diff From b97a60ffc52028911e10f1e879463cc338acc2ac Mon Sep 17 00:00:00 2001 From: Gabriel Date: Wed, 25 Mar 2020 18:03:56 -0400 Subject: [PATCH 06/12] black style badge --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 0a212225..2352a114 100644 --- a/README.md +++ b/README.md @@ -13,6 +13,7 @@ Support multiple data-sources. [![GitHub last commit](https://img.shields.io/github/last-commit/ExpDev07/coronavirus-tracker-api)](https://github.com/ExpDev07/coronavirus-tracker-api/commits/master) [![GitHub pull requests](https://img.shields.io/github/issues-pr/ExpDev07/coronavirus-tracker-api)](https://github.com/ExpDev07/coronavirus-tracker-api/pulls) [![GitHub issues](https://img.shields.io/github/issues/ExpDev07/coronavirus-tracker-api)](https://github.com/ExpDev07/coronavirus-tracker-api/issues) +[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![Tweet](https://img.shields.io/twitter/url?url=https%3A%2F%2Fgithub.com%2FExpDev07%2Fcoronavirus-tracker-api)](https://twitter.com/intent/tweet?text=COVID19%20Live%20Tracking%20API:%20&url=https%3A%2F%2Fgithub.com%2FExpDev07%2Fcoronavirus-tracker-api) **Live global stats (provided by [fight-covid19/bagdes](https://github.com/fight-covid19/bagdes)) from this API:** From 4a360999a9118fb9457ff53257250f9e45bc614b Mon Sep 17 00:00:00 2001 From: Gabriel Date: Wed, 25 Mar 2020 18:27:39 -0400 Subject: [PATCH 07/12] add black and isort dev to Pipfile --- Pipfile | 2 ++ Pipfile.lock | 91 +++++++++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 92 insertions(+), 1 deletion(-) diff --git a/Pipfile b/Pipfile index 029ca2fb..a5df7173 100644 --- a/Pipfile +++ b/Pipfile @@ -5,6 +5,8 @@ verify_ssl = true [dev-packages] bandit = "*" +black = "==19.10b0" +isort = "*" pytest = "*" pylint = "*" diff --git a/Pipfile.lock b/Pipfile.lock index b79d6b4b..c949f6cb 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "846c10a9cdea8ecb7482b41acd826e486578f42a7443022155bd6484f104376b" + "sha256": "ecd83aad2c3783fdaa5581f562d022a6b500b3f3b4beb7c3f63d3d5baff85813" }, "pipfile-spec": 6, "requires": { @@ -284,6 +284,13 @@ } }, "develop": { + "appdirs": { + "hashes": [ + "sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92", + "sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e" + ], + "version": "==1.4.3" + }, "astroid": { "hashes": [ "sha256:71ea07f44df9568a75d0f354c49143a4575d90645e9fead6dfb52c26a85ed13a", @@ -306,6 +313,21 @@ "index": "pypi", "version": "==1.6.2" }, + "black": { + "hashes": [ + "sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b", + "sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539" + ], + "index": "pypi", + "version": "==19.10b0" + }, + "click": { + "hashes": [ + "sha256:8a18b4ea89d8820c5d0c7da8a64b2c324b4dabb695804dbfea19b9be9d88c0cc", + "sha256:e345d143d80bf5ee7534056164e5e112ea5e22716bbb1ce727941f4c8b471b9a" + ], + "version": "==7.1.1" + }, "gitdb": { "hashes": [ "sha256:284a6a4554f954d6e737cddcff946404393e030b76a282c6640df8efd6b3da5e", @@ -325,6 +347,7 @@ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1", "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd" ], + "index": "pypi", "version": "==4.3.21" }, "lazy-object-proxy": { @@ -374,6 +397,13 @@ ], "version": "==20.3" }, + "pathspec": { + "hashes": [ + "sha256:163b0632d4e31cef212976cf57b43d9fd6b0bac6e67c26015d611a647d5e7424", + "sha256:562aa70af2e0d434367d9790ad37aed893de47f1693e4201fd1d3dca15d19b96" + ], + "version": "==0.7.0" + }, "pbr": { "hashes": [ "sha256:139d2625547dbfa5fb0b81daebb39601c478c21956dc57e2e07b74450a8c506b", @@ -434,6 +464,32 @@ ], "version": "==5.3.1" }, + "regex": { + "hashes": [ + "sha256:01b2d70cbaed11f72e57c1cfbaca71b02e3b98f739ce33f5f26f71859ad90431", + "sha256:046e83a8b160aff37e7034139a336b660b01dbfe58706f9d73f5cdc6b3460242", + "sha256:113309e819634f499d0006f6200700c8209a2a8bf6bd1bdc863a4d9d6776a5d1", + "sha256:200539b5124bc4721247a823a47d116a7a23e62cc6695744e3eb5454a8888e6d", + "sha256:25f4ce26b68425b80a233ce7b6218743c71cf7297dbe02feab1d711a2bf90045", + "sha256:269f0c5ff23639316b29f31df199f401e4cb87529eafff0c76828071635d417b", + "sha256:5de40649d4f88a15c9489ed37f88f053c15400257eeb18425ac7ed0a4e119400", + "sha256:7f78f963e62a61e294adb6ff5db901b629ef78cb2a1cfce3cf4eeba80c1c67aa", + "sha256:82469a0c1330a4beb3d42568f82dffa32226ced006e0b063719468dcd40ffdf0", + "sha256:8c2b7fa4d72781577ac45ab658da44c7518e6d96e2a50d04ecb0fd8f28b21d69", + "sha256:974535648f31c2b712a6b2595969f8ab370834080e00ab24e5dbb9d19b8bfb74", + "sha256:99272d6b6a68c7ae4391908fc15f6b8c9a6c345a46b632d7fdb7ef6c883a2bbb", + "sha256:9b64a4cc825ec4df262050c17e18f60252cdd94742b4ba1286bcfe481f1c0f26", + "sha256:9e9624440d754733eddbcd4614378c18713d2d9d0dc647cf9c72f64e39671be5", + "sha256:9ff16d994309b26a1cdf666a6309c1ef51ad4f72f99d3392bcd7b7139577a1f2", + "sha256:b33ebcd0222c1d77e61dbcd04a9fd139359bded86803063d3d2d197b796c63ce", + "sha256:bba52d72e16a554d1894a0cc74041da50eea99a8483e591a9edf1025a66843ab", + "sha256:bed7986547ce54d230fd8721aba6fd19459cdc6d315497b98686d0416efaff4e", + "sha256:c7f58a0e0e13fb44623b65b01052dae8e820ed9b8b654bb6296bc9c41f571b70", + "sha256:d58a4fa7910102500722defbde6e2816b0372a4fcc85c7e239323767c74f5cbc", + "sha256:f1ac2dc65105a53c1c2d72b1d3e98c2464a133b4067a51a3d2477b28449709a0" + ], + "version": "==2020.2.20" + }, "six": { "hashes": [ "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", @@ -455,6 +511,39 @@ ], "version": "==1.32.0" }, + "toml": { + "hashes": [ + "sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c", + "sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e" + ], + "version": "==0.10.0" + }, + "typed-ast": { + "hashes": [ + "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355", + "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919", + "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa", + "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652", + "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75", + "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01", + "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d", + "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1", + "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907", + "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c", + "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3", + "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b", + "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614", + "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb", + "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b", + "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41", + "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6", + "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34", + "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe", + "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4", + "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7" + ], + "version": "==1.4.1" + }, "wcwidth": { "hashes": [ "sha256:cafe2186b3c009a04067022ce1dcd79cb38d8d65ee4f4791b8888d6599d1bbe1", From ef97e3fc6ecd9e6679a42d638e991a7cb6d9beca Mon Sep 17 00:00:00 2001 From: Gabriel Date: Wed, 25 Mar 2020 19:48:02 -0400 Subject: [PATCH 08/12] pylintrc file disable `bad-continuation` up max-line-length to `120` --- pylintrc | 582 +++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 582 insertions(+) create mode 100644 pylintrc diff --git a/pylintrc b/pylintrc new file mode 100644 index 00000000..4db0f41f --- /dev/null +++ b/pylintrc @@ -0,0 +1,582 @@ +[MASTER] + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. +extension-pkg-whitelist= + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS + +# Add files or directories matching the regex patterns to the blacklist. The +# regex matches against base names, not paths. +ignore-patterns= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use. +jobs=1 + +# Control the amount of potential inferred values when inferring a single +# object. This can help the performance when dealing with large functions or +# complex, nested conditions. +limit-inference-results=100 + +# List of plugins (as comma separated values of python module names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# Specify a configuration file. +#rcfile= + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode=yes + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. +confidence= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=print-statement, + parameter-unpacking, + unpacking-in-except, + old-raise-syntax, + backtick, + long-suffix, + old-ne-operator, + old-octal-literal, + import-star-module-level, + non-ascii-bytes-literal, + raw-checker-failed, + bad-inline-option, + locally-disabled, + file-ignored, + suppressed-message, + useless-suppression, + deprecated-pragma, + use-symbolic-message-instead, + apply-builtin, + basestring-builtin, + buffer-builtin, + cmp-builtin, + coerce-builtin, + execfile-builtin, + file-builtin, + long-builtin, + raw_input-builtin, + reduce-builtin, + standarderror-builtin, + unicode-builtin, + xrange-builtin, + coerce-method, + delslice-method, + getslice-method, + setslice-method, + no-absolute-import, + old-division, + dict-iter-method, + dict-view-method, + next-method-called, + metaclass-assignment, + indexing-exception, + raising-string, + reload-builtin, + oct-method, + hex-method, + nonzero-method, + cmp-method, + input-builtin, + round-builtin, + intern-builtin, + unichr-builtin, + map-builtin-not-iterating, + zip-builtin-not-iterating, + range-builtin-not-iterating, + filter-builtin-not-iterating, + using-cmp-argument, + eq-without-hash, + div-method, + idiv-method, + rdiv-method, + exception-message-attribute, + invalid-str-codec, + sys-max-int, + bad-python3-import, + deprecated-string-function, + deprecated-str-translate-call, + deprecated-itertools-function, + deprecated-types-field, + next-method-defined, + dict-items-not-iterating, + dict-keys-not-iterating, + dict-values-not-iterating, + deprecated-operator-function, + deprecated-urllib-function, + xreadlines-attribute, + deprecated-sys-function, + exception-escape, + comprehension-escape, + bad-continuation # conflicts with black + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable=c-extension-no-member + + +[REPORTS] + +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'error', 'warning', 'refactor', and 'convention' +# which contain the number of messages in each category, as well as 'statement' +# which is the total number of statements analyzed. This score is used by the +# global evaluation report (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +#msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +output-format=text + +# Tells whether to display a full report or only the messages. +reports=no + +# Activate the evaluation score. +score=yes + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit + + +[BASIC] + +# Naming style matching correct argument names. +argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style. +#argument-rgx= + +# Naming style matching correct attribute names. +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. +#attr-rgx= + +# Bad variable names which should always be refused, separated by a comma. +bad-names=foo, + bar, + baz, + toto, + tutu, + tata + +# Naming style matching correct class attribute names. +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. +#class-attribute-rgx= + +# Naming style matching correct class names. +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming- +# style. +#class-rgx= + +# Naming style matching correct constant names. +const-naming-style=UPPER_CASE + +# Regular expression matching correct constant names. Overrides const-naming- +# style. +#const-rgx= + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names. +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style. +#function-rgx= + +# Good variable names which should always be accepted, separated by a comma. +good-names=i, + j, + k, + ex, + Run, + _ + +# Include a hint for the correct naming format with invalid-name. +include-naming-hint=no + +# Naming style matching correct inline iteration names. +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. +#inlinevar-rgx= + +# Naming style matching correct method names. +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style. +#method-rgx= + +# Naming style matching correct module names. +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style. +#module-rgx= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +# These decorators are taken in consideration only for invalid-name. +property-classes=abc.abstractproperty + +# Naming style matching correct variable names. +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style. +#variable-rgx= + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=120 # matches black setting + +# Maximum number of lines in a module. +max-module-lines=1000 + +# List of optional constructs for which whitespace checking is disabled. `dict- +# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. +# `trailing-comma` allows a space between comma and closing bracket: (a, ). +# `empty-line` allows space-only lines. +no-space-check=trailing-comma, + dict-separator + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[LOGGING] + +# Format style used to check logging format string. `old` means using % +# formatting, `new` is for `{}` formatting,and `fstr` is for f-strings. +logging-format-style=old + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX, + TODO + + +[SIMILARITIES] + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 + +# Spelling dictionary name. Available dictionaries: none. To make it work, +# install the python-enchant package. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains the private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +spelling-store-unknown-words=no + + +[STRING] + +# This flag controls whether the implicit-str-concat-in-sequence should +# generate a warning on implicit string concatenation in sequences defined over +# several lines. +check-str-concat-over-line-jumps=no + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# Tells whether to warn about missing members when the owner of the attribute +# is inferred to be None. +ignore-none=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis). It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + +# List of decorators that change the signature of a decorated function. +signature-mutators= + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expected to +# not be used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore. +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp, + __post_init__ + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict, + _fields, + _replace, + _source, + _make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=cls + + +[DESIGN] + +# Maximum number of arguments for function / method. +max-args=5 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr=5 + +# Maximum number of branch for function / method body. +max-branches=12 + +# Maximum number of locals for function / method body. +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body. +max-returns=6 + +# Maximum number of statements in function / method body. +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[IMPORTS] + +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules=optparse,tkinter.tix + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled). +ext-import-graph= + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled). +import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "BaseException, Exception". +overgeneral-exceptions=BaseException, + Exception From 555c1ddcd488ebc93457e8ed519c09f8708715c9 Mon Sep 17 00:00:00 2001 From: ExpDev07 Date: Thu, 26 Mar 2020 02:32:55 +0100 Subject: [PATCH 09/12] formatted --- app/location/__init__.py | 10 +++++----- app/models/location.py | 6 +++--- app/utils/countrycodes.py | 2 +- app/utils/populations.py | 9 ++++----- tests/test_location.py | 5 +++-- 5 files changed, 16 insertions(+), 16 deletions(-) diff --git a/app/location/__init__.py b/app/location/__init__.py index df98d9bf..cafcf547 100644 --- a/app/location/__init__.py +++ b/app/location/__init__.py @@ -52,11 +52,11 @@ def serialize(self): """ return { # General info. - 'id' : self.id, - 'country' : self.country, - 'country_code' : self.country_code, - 'country_population': self.country_population, - 'province' : self.province, + "id": self.id, + "country": self.country, + "country_code": self.country_code, + "country_population": self.country_population, + "province": self.province, # Coordinates. "coordinates": self.coordinates.serialize(), # Last updated. diff --git a/app/models/location.py b/app/models/location.py index a547abc2..48fa4d74 100644 --- a/app/models/location.py +++ b/app/models/location.py @@ -15,9 +15,9 @@ class Location(BaseModel): country: str country_code: str country_population: int = None - province: str = '' - county: str = '' - last_updated: str # TODO use datetime.datetime type. + province: str = "" + county: str = "" + last_updated: str # TODO use datetime.datetime type. coordinates: Dict latest: Latest timelines: Timelines = {} diff --git a/app/utils/countrycodes.py b/app/utils/countrycodes.py index 6df0d25a..f85031ec 100644 --- a/app/utils/countrycodes.py +++ b/app/utils/countrycodes.py @@ -377,5 +377,5 @@ def country_code(country): return is_3166_1[country] # Default to default_code. - print ("No country_code found for '" + country + "'. Using '" + default_code + "'") + print("No country_code found for '" + country + "'. Using '" + default_code + "'") return default_code diff --git a/app/utils/populations.py b/app/utils/populations.py index 03edb566..4df0cc55 100644 --- a/app/utils/populations.py +++ b/app/utils/populations.py @@ -12,21 +12,22 @@ def fetch_populations(): :returns: The mapping of populations. :rtype: dict """ - print ("Fetching populations...") + print("Fetching populations...") # Mapping of populations mappings = {} # Fetch the countries. - countries = requests.get("http://api.geonames.org/countryInfoJSON?username=dperic").json()['geonames'] + countries = requests.get("http://api.geonames.org/countryInfoJSON?username=dperic").json()["geonames"] # Go through all the countries and perform the mapping. for country in countries: - mappings.update({ country["countryCode"]: int(country["population"]) or None }) + mappings.update({country["countryCode"]: int(country["population"]) or None}) # Finally, return the mappings. return mappings + # Mapping of alpha-2 codes country codes to population. populations = fetch_populations() @@ -39,5 +40,3 @@ def country_population(country_code, default=None): :rtype: int """ return populations.get(country_code, default) - - diff --git a/tests/test_location.py b/tests/test_location.py index 9255f76e..567eddcd 100644 --- a/tests/test_location.py +++ b/tests/test_location.py @@ -13,6 +13,7 @@ def __init__(self, latest): return TestTimeline(args[0]) + @pytest.mark.parametrize( "test_id, country, country_code, province, latitude, longitude, confirmed_latest, deaths_latest, recovered_latest", [ @@ -39,7 +40,7 @@ def test_location_class( # Timelines confirmed = timeline.Timeline(confirmed_latest) - deaths = timeline.Timeline(deaths_latest) + deaths = timeline.Timeline(deaths_latest) recovered = timeline.Timeline(recovered_latest) # Date now. @@ -51,4 +52,4 @@ def test_location_class( ) assert location_obj.country_code == country_code - assert not location_obj.serialize() == None \ No newline at end of file + assert not location_obj.serialize() == None From dac5f2e9249687b9b263ac4d270b1015d4dc58bc Mon Sep 17 00:00:00 2001 From: ExpDev07 Date: Thu, 26 Mar 2020 02:37:46 +0100 Subject: [PATCH 10/12] ran isort --- app/utils/populations.py | 9 ++++++--- tests/conftest.py | 2 +- tests/test_routes.py | 2 +- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/app/utils/populations.py b/app/utils/populations.py index 4df0cc55..60ceff3d 100644 --- a/app/utils/populations.py +++ b/app/utils/populations.py @@ -1,9 +1,12 @@ -import requests -from io import StringIO, BytesIO -from cachetools import cached, TTLCache +from io import BytesIO, StringIO from zipfile import ZipFile, ZipInfo + +import requests +from cachetools import TTLCache, cached + from .countrycodes import country_code + # Fetching of the populations. def fetch_populations(): """ diff --git a/tests/conftest.py b/tests/conftest.py index a9811d22..7e94a0c3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,9 +4,9 @@ Global conftest file for shared pytest fixtures """ import pytest -from fastapi.testclient import TestClient from app.main import APP +from fastapi.testclient import TestClient @pytest.fixture diff --git a/tests/test_routes.py b/tests/test_routes.py index 7c4b1f03..f4e7600a 100644 --- a/tests/test_routes.py +++ b/tests/test_routes.py @@ -4,11 +4,11 @@ from unittest import mock import pytest -from fastapi.testclient import TestClient import app from app import services from app.main import APP +from fastapi.testclient import TestClient from .test_jhu import DATETIME_STRING, mocked_requests_get, mocked_strptime_isoformat From bd05311761fd6687c878dea96dbdfd1dc406a50b Mon Sep 17 00:00:00 2001 From: Gabriel Date: Wed, 25 Mar 2020 21:52:28 -0400 Subject: [PATCH 11/12] add fmt and sort pipenv run scripts --- Makefile | 4 ++-- Pipfile | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index d2d3ad1b..78f8f6b4 100644 --- a/Makefile +++ b/Makefile @@ -21,8 +21,8 @@ lint: pylint $(APP) || true fmt: - isort -rc --atomic - black . + isort --apply --atomic + black . -l 120 check-fmt: isort -rc --check diff --git a/Pipfile b/Pipfile index a5df7173..6b30ef90 100644 --- a/Pipfile +++ b/Pipfile @@ -27,3 +27,5 @@ python_version = "3.8" [scripts] dev = "uvicorn app.main:APP --reload" start = "uvicorn app.main:APP" +fmt = "black . -l 120" +sort = "isort --apply --atomic" From b884ffd98bffce27f80d1cf3d1df64b4443d08f7 Mon Sep 17 00:00:00 2001 From: Gabriel Date: Wed, 25 Mar 2020 21:55:05 -0400 Subject: [PATCH 12/12] sort --- tests/conftest.py | 2 +- tests/test_routes.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 7e94a0c3..a9811d22 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,9 +4,9 @@ Global conftest file for shared pytest fixtures """ import pytest +from fastapi.testclient import TestClient from app.main import APP -from fastapi.testclient import TestClient @pytest.fixture diff --git a/tests/test_routes.py b/tests/test_routes.py index f4e7600a..7c4b1f03 100644 --- a/tests/test_routes.py +++ b/tests/test_routes.py @@ -4,11 +4,11 @@ from unittest import mock import pytest +from fastapi.testclient import TestClient import app from app import services from app.main import APP -from fastapi.testclient import TestClient from .test_jhu import DATETIME_STRING, mocked_requests_get, mocked_strptime_isoformat