From 0bb037198cbc5b42914c41eaef690600b7eaf611 Mon Sep 17 00:00:00 2001 From: EliuX Date: Tue, 31 Mar 2020 18:49:20 -0500 Subject: [PATCH 001/387] Extract DAL from time_tracker_api #50 --- .env.template | 7 ++++-- Dockerfile | 2 +- README.md | 12 +++++++--- run.py => api.py | 0 {tests/activities => commons}/__init__.py | 0 .../data_access_layer}/__init__.py | 0 .../data_access_layer/azure}/__init__.py | 0 .../azure}/sql_repository.py | 0 requirements/sql_db_serverless.txt | 12 ++++++++++ requirements/{ => time_tracker_api}/dev.txt | 3 ++- requirements/{ => time_tracker_api}/prod.txt | 12 +++++----- tests/commons/__init__.py | 0 tests/commons/data_access_layer/__init__.py | 0 .../data_access_layer/azure/__init__.py | 0 .../data_access_layer/azure}/resources.py | 2 +- .../azure}/sql_repository_test.py | 0 tests/conftest.py | 22 +++++++++---------- tests/time_tracker_api/__init__.py | 0 tests/time_tracker_api/activities/__init__.py | 0 .../activities/activities_namespace_test.py | 0 tests/time_tracker_api/projects/__init__.py | 0 .../projects/projects_namespace_test.py | 0 tests/{ => time_tracker_api}/smoke_test.py | 0 .../time_tracker_api/time_entries/__init__.py | 0 .../time_entries_namespace_test.py | 0 .../activities/activities_model.py | 4 ++-- time_tracker_api/config.py | 5 +++-- time_tracker_api/database.py | 2 +- time_tracker_api/projects/projects_model.py | 3 ++- .../time_entries/time_entries_model.py | 4 ++-- 30 files changed, 56 insertions(+), 34 deletions(-) rename run.py => api.py (100%) rename {tests/activities => commons}/__init__.py (100%) rename {tests/projects => commons/data_access_layer}/__init__.py (100%) rename {tests/time_entries => commons/data_access_layer/azure}/__init__.py (100%) rename {time_tracker_api => commons/data_access_layer/azure}/sql_repository.py (100%) create mode 100644 requirements/sql_db_serverless.txt rename requirements/{ => time_tracker_api}/dev.txt (64%) rename requirements/{ => time_tracker_api}/prod.txt (74%) create mode 100644 tests/commons/__init__.py create mode 100644 tests/commons/data_access_layer/__init__.py create mode 100644 tests/commons/data_access_layer/azure/__init__.py rename tests/{ => commons/data_access_layer/azure}/resources.py (85%) rename tests/{ => commons/data_access_layer/azure}/sql_repository_test.py (100%) create mode 100644 tests/time_tracker_api/__init__.py create mode 100644 tests/time_tracker_api/activities/__init__.py rename tests/{ => time_tracker_api}/activities/activities_namespace_test.py (100%) create mode 100644 tests/time_tracker_api/projects/__init__.py rename tests/{ => time_tracker_api}/projects/projects_namespace_test.py (100%) rename tests/{ => time_tracker_api}/smoke_test.py (100%) create mode 100644 tests/time_tracker_api/time_entries/__init__.py rename tests/{ => time_tracker_api}/time_entries/time_entries_namespace_test.py (100%) diff --git a/.env.template b/.env.template index 96325fea..3ae757dd 100644 --- a/.env.template +++ b/.env.template @@ -1,5 +1,8 @@ -# Package where the app is located +# API +## Package where the app is located export FLASK_APP=time_tracker_api -# The database connection URI. Check out the README.md for more details + +# Common attributes +## The database connection URI. Check out the README.md for more details DATABASE_URI=mssql+pyodbc://:@time-tracker-srv.database.windows.net/?driver\=ODBC Driver 17 for SQL Server diff --git a/Dockerfile b/Dockerfile index fe98d10a..a6482d82 100644 --- a/Dockerfile +++ b/Dockerfile @@ -27,4 +27,4 @@ ENV FLASK_APP time_tracker_api EXPOSE 5000 -CMD ["gunicorn", "-b 0.0.0.0:5000", "run:app"] +CMD ["gunicorn", "-b 0.0.0.0:5000", "api:app"] diff --git a/README.md b/README.md index 6d419955..11b4ec81 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # time-tracker-api -The API of the TSheets killer app. +This is the mono-repository for the backend services and common codebase ## Getting started Follow the following instructions to get the project ready to use ASAP. @@ -30,10 +30,16 @@ automatically [pip](https://pip.pypa.io/en/stable/) as well. - Install the requirements: ``` - python3 -m pip install -r requirements/.txt + python3 -m pip install -r requirements//.txt ``` - The `stage` can be `dev` or `prod`. + Where is one of the executable app namespace, e.g. `time_tracker_api`. + The `stage` can be + + * `dev`: Used for working locally + * `prod`: For anything deployed + + Remember to do it with Python 3. diff --git a/run.py b/api.py similarity index 100% rename from run.py rename to api.py diff --git a/tests/activities/__init__.py b/commons/__init__.py similarity index 100% rename from tests/activities/__init__.py rename to commons/__init__.py diff --git a/tests/projects/__init__.py b/commons/data_access_layer/__init__.py similarity index 100% rename from tests/projects/__init__.py rename to commons/data_access_layer/__init__.py diff --git a/tests/time_entries/__init__.py b/commons/data_access_layer/azure/__init__.py similarity index 100% rename from tests/time_entries/__init__.py rename to commons/data_access_layer/azure/__init__.py diff --git a/time_tracker_api/sql_repository.py b/commons/data_access_layer/azure/sql_repository.py similarity index 100% rename from time_tracker_api/sql_repository.py rename to commons/data_access_layer/azure/sql_repository.py diff --git a/requirements/sql_db_serverless.txt b/requirements/sql_db_serverless.txt new file mode 100644 index 00000000..d3eadf51 --- /dev/null +++ b/requirements/sql_db_serverless.txt @@ -0,0 +1,12 @@ +# requirements/sql_db_serverless.txt + +# For SQL database serverless (MS SQL) + + +# SQL Server driver +pyodbc==4.0.30 + +# ORM +SQLAlchemy==1.3.15 +SQLAlchemy-Utils==0.36.3 +flask_sqlalchemy==2.4.1 \ No newline at end of file diff --git a/requirements/dev.txt b/requirements/time_tracker_api/dev.txt similarity index 64% rename from requirements/dev.txt rename to requirements/time_tracker_api/dev.txt index e1d4d47d..2e5fd7d8 100644 --- a/requirements/dev.txt +++ b/requirements/time_tracker_api/dev.txt @@ -1,5 +1,6 @@ -# requirements/dev.txt +# requirements/time_tracker_api/dev.txt +# Include the prod resources -r prod.txt # For development diff --git a/requirements/prod.txt b/requirements/time_tracker_api/prod.txt similarity index 74% rename from requirements/prod.txt rename to requirements/time_tracker_api/prod.txt index eb52998c..3ce670df 100644 --- a/requirements/prod.txt +++ b/requirements/time_tracker_api/prod.txt @@ -1,4 +1,8 @@ -# requirements/prod.txt +# requirements/time_tracker_api/prod.txt + +# Dependencies +-r ../sql_db_serverless.txt + # For production releases @@ -21,12 +25,6 @@ Faker==4.0.2 #CLI support Flask-Script==2.0.6 -# SQL database (MS SQL) -pyodbc==4.0.30 -SQLAlchemy==1.3.15 -SQLAlchemy-Utils==0.36.3 -flask_sqlalchemy==2.4.1 - # Handling requests requests==2.23.0 diff --git a/tests/commons/__init__.py b/tests/commons/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/commons/data_access_layer/__init__.py b/tests/commons/data_access_layer/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/commons/data_access_layer/azure/__init__.py b/tests/commons/data_access_layer/azure/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/resources.py b/tests/commons/data_access_layer/azure/resources.py similarity index 85% rename from tests/resources.py rename to tests/commons/data_access_layer/azure/resources.py index 662f3215..4b2300a4 100644 --- a/tests/resources.py +++ b/tests/commons/data_access_layer/azure/resources.py @@ -1,4 +1,4 @@ -from time_tracker_api.sql_repository import db, AuditedSQLModel +from commons.data_access_layer.azure.sql_repository import db, AuditedSQLModel from sqlalchemy_utils import UUIDType import uuid diff --git a/tests/sql_repository_test.py b/tests/commons/data_access_layer/azure/sql_repository_test.py similarity index 100% rename from tests/sql_repository_test.py rename to tests/commons/data_access_layer/azure/sql_repository_test.py diff --git a/tests/conftest.py b/tests/conftest.py index 679c7ee8..1e481ced 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,4 @@ import pytest -from _pytest.fixtures import FixtureRequest from flask import Flask from flask.testing import FlaskClient @@ -7,7 +6,7 @@ @pytest.fixture(scope='session') -def app(request: FixtureRequest) -> Flask: +def app() -> Flask: return create_app("time_tracker_api.config.TestConfig") @@ -18,15 +17,16 @@ def client(app: Flask) -> FlaskClient: @pytest.fixture(scope="module") -def sql_repository(): - from .resources import PersonSQLModel - from time_tracker_api.sql_repository import db +def sql_repository(app: Flask): + with app.test_client(): + from tests.commons.data_access_layer.azure.resources import PersonSQLModel + from commons.data_access_layer.azure.sql_repository import db - db.metadata.create_all(bind=db.engine, tables=[PersonSQLModel.__table__]) - print("Test models created!") + db.metadata.create_all(bind=db.engine, tables=[PersonSQLModel.__table__]) + print("Test models created!") - from time_tracker_api.sql_repository import SQLRepository - yield SQLRepository(PersonSQLModel) + from commons.data_access_layer.azure.sql_repository import SQLRepository + yield SQLRepository(PersonSQLModel) - db.metadata.drop_all(bind=db.engine, tables=[PersonSQLModel.__table__]) - print("Test models removed!") + db.metadata.drop_all(bind=db.engine, tables=[PersonSQLModel.__table__]) + print("Test models removed!") diff --git a/tests/time_tracker_api/__init__.py b/tests/time_tracker_api/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/time_tracker_api/activities/__init__.py b/tests/time_tracker_api/activities/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/activities/activities_namespace_test.py b/tests/time_tracker_api/activities/activities_namespace_test.py similarity index 100% rename from tests/activities/activities_namespace_test.py rename to tests/time_tracker_api/activities/activities_namespace_test.py diff --git a/tests/time_tracker_api/projects/__init__.py b/tests/time_tracker_api/projects/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/projects/projects_namespace_test.py b/tests/time_tracker_api/projects/projects_namespace_test.py similarity index 100% rename from tests/projects/projects_namespace_test.py rename to tests/time_tracker_api/projects/projects_namespace_test.py diff --git a/tests/smoke_test.py b/tests/time_tracker_api/smoke_test.py similarity index 100% rename from tests/smoke_test.py rename to tests/time_tracker_api/smoke_test.py diff --git a/tests/time_tracker_api/time_entries/__init__.py b/tests/time_tracker_api/time_entries/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/time_entries/time_entries_namespace_test.py b/tests/time_tracker_api/time_entries/time_entries_namespace_test.py similarity index 100% rename from tests/time_entries/time_entries_namespace_test.py rename to tests/time_tracker_api/time_entries/time_entries_namespace_test.py diff --git a/time_tracker_api/activities/activities_model.py b/time_tracker_api/activities/activities_model.py index 86d91754..bfbb6c57 100644 --- a/time_tracker_api/activities/activities_model.py +++ b/time_tracker_api/activities/activities_model.py @@ -6,10 +6,10 @@ class ActivityDao(CRUDDao): def create_dao() -> ActivityDao: - from time_tracker_api.sql_repository import db - from time_tracker_api.sql_repository import SQLCRUDDao from sqlalchemy_utils import UUIDType import uuid + from commons.data_access_layer.azure.sql_repository import db + from commons.data_access_layer.azure.sql_repository import SQLCRUDDao class ActivitySQLModel(db.Model): __tablename__ = 'activity' diff --git a/time_tracker_api/config.py b/time_tracker_api/config.py index 3700adde..ebdddb81 100644 --- a/time_tracker_api/config.py +++ b/time_tracker_api/config.py @@ -21,9 +21,10 @@ class DevelopmentConfig(Config): class SQLConfig(Config): - SQLALCHEMY_DATABASE_URI = Config.DATABASE_URI SQLALCHEMY_COMMIT_ON_TEARDOWN = True SQLALCHEMY_TRACK_MODIFICATIONS = False + DATABASE_URI = os.environ.get('DATABASE_URI') + SQLALCHEMY_DATABASE_URI = DATABASE_URI class TestConfig(SQLConfig): @@ -41,7 +42,7 @@ class ProductionConfig(Config): class AzureConfig(SQLConfig): - DATABASE_URI = os.environ.get('DATABASE_URI', os.environ.get('SQLAZURECONNSTR_DATABASE_URI')) + DATABASE_URI = os.environ.get('SQLAZURECONNSTR_DATABASE_URI', SQLConfig.DATABASE_URI) SQLALCHEMY_DATABASE_URI = DATABASE_URI diff --git a/time_tracker_api/database.py b/time_tracker_api/database.py index 8079ec24..094c894d 100644 --- a/time_tracker_api/database.py +++ b/time_tracker_api/database.py @@ -52,7 +52,7 @@ def __call__(self, *args, **kwargs): def init_app(app: Flask) -> None: - from time_tracker_api.sql_repository import init_app, SQLSeeder + from commons.data_access_layer.azure.sql_repository import init_app, SQLSeeder init_app(app) global seeder seeder = SQLSeeder() diff --git a/time_tracker_api/projects/projects_model.py b/time_tracker_api/projects/projects_model.py index fbf1630f..5da7ddcf 100644 --- a/time_tracker_api/projects/projects_model.py +++ b/time_tracker_api/projects/projects_model.py @@ -17,11 +17,12 @@ class ProjectDao(CRUDDao): def create_dao() -> ProjectDao: - from time_tracker_api.sql_repository import db + from commons.data_access_layer.azure.sql_repository import db from time_tracker_api.database import COMMENTS_MAX_LENGTH from time_tracker_api.sql_repository import SQLCRUDDao from sqlalchemy_utils import UUIDType import uuid + from commons.data_access_layer.azure.sql_repository import SQLCRUDDao, AuditedSQLModel class ProjectSQLModel(db.Model): __tablename__ = 'project' diff --git a/time_tracker_api/time_entries/time_entries_model.py b/time_tracker_api/time_entries/time_entries_model.py index 1460685b..80a3aef4 100644 --- a/time_tracker_api/time_entries/time_entries_model.py +++ b/time_tracker_api/time_entries/time_entries_model.py @@ -8,11 +8,11 @@ class TimeEntriesDao(CRUDDao): def create_dao() -> TimeEntriesDao: - from time_tracker_api.sql_repository import db + from commons.data_access_layer.azure.sql_repository import db from time_tracker_api.database import COMMENTS_MAX_LENGTH - from time_tracker_api.sql_repository import SQLCRUDDao from sqlalchemy_utils import UUIDType import uuid + from commons.data_access_layer.azure.sql_repository import SQLCRUDDao class TimeEntrySQLModel(db.Model): __tablename__ = 'time_entry' From 6b17a36f7ebac83abdfa3e20aca62add8213fef6 Mon Sep 17 00:00:00 2001 From: EliuX Date: Wed, 1 Apr 2020 19:13:44 -0500 Subject: [PATCH 002/387] Create and test Cosmos db repository #50 --- .env.template | 12 +- Dockerfile | 2 +- README.md | 4 +- commons/data_access_layer/azure/__init__.py | 0 commons/data_access_layer/cosmos_db.py | 149 +++++ .../{azure/sql_repository.py => sql.py} | 4 +- requirements/azure_cosmos.txt | 14 + requirements/commons.txt | 9 + .../{sql_db_serverless.txt => sql_db.txt} | 4 +- requirements/time_tracker_api/dev.txt | 1 - requirements/time_tracker_api/prod.txt | 11 +- setup.cfg | 1 + .../data_access_layer/azure/__init__.py | 0 .../data_access_layer/azure/resources.py | 15 - .../data_access_layer/cosmos_db_test.py | 509 ++++++++++++++++++ .../sql_repository_test.py => sql_test.py} | 0 tests/conftest.py | 99 +++- .../activities/activities_namespace_test.py | 10 +- .../projects/projects_namespace_test.py | 10 +- .../time_entries_namespace_test.py | 8 +- .../activities/activities_model.py | 8 +- .../activities/activities_namespace.py | 1 - time_tracker_api/config.py | 17 +- time_tracker_api/customers/customers_model.py | 4 +- time_tracker_api/database.py | 23 +- .../project_types/project_types_model.py | 4 +- time_tracker_api/projects/projects_model.py | 5 +- .../time_entries/time_entries_model.py | 4 +- 28 files changed, 850 insertions(+), 78 deletions(-) delete mode 100644 commons/data_access_layer/azure/__init__.py create mode 100644 commons/data_access_layer/cosmos_db.py rename commons/data_access_layer/{azure/sql_repository.py => sql.py} (97%) create mode 100644 requirements/azure_cosmos.txt create mode 100644 requirements/commons.txt rename requirements/{sql_db_serverless.txt => sql_db.txt} (59%) delete mode 100644 tests/commons/data_access_layer/azure/__init__.py delete mode 100644 tests/commons/data_access_layer/azure/resources.py create mode 100644 tests/commons/data_access_layer/cosmos_db_test.py rename tests/commons/data_access_layer/{azure/sql_repository_test.py => sql_test.py} (100%) diff --git a/.env.template b/.env.template index 3ae757dd..16549078 100644 --- a/.env.template +++ b/.env.template @@ -2,7 +2,13 @@ ## Package where the app is located export FLASK_APP=time_tracker_api - # Common attributes -## The database connection URI. Check out the README.md for more details -DATABASE_URI=mssql+pyodbc://:@time-tracker-srv.database.windows.net/?driver\=ODBC Driver 17 for SQL Server +## In case you use an Azure SQL database, you must specify the database connection URI. Check out the README.md for more details +#export DATABASE_URI=mssql+pyodbc://:@time-tracker-srv.database.windows.net/?driver\=ODBC Driver 17 for SQL Server + +## For Azure Cosmos DB +export DATABASE_ACCOUNT_URI=https://.documents.azure.com:443 +export DATABASE_MASTER_KEY= +export DATABASE_NAME= +### or +# export DATABASE_URI=AccountEndpoint=;AccountKey= diff --git a/Dockerfile b/Dockerfile index a6482d82..bf9bf448 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,7 +8,7 @@ COPY . . RUN apk update \ && apk add --no-cache $buildDeps gcc unixodbc-dev \ - && pip3 install --no-cache-dir -r requirements/prod.txt \ + && pip3 install --no-cache-dir -r requirements/time_tracker_api/prod.txt \ && curl -O https://download.microsoft.com/download/e/4/e/e4e67866-dffd-428c-aac7-8d28ddafb39b/msodbcsql17_17.5.2.1-1_amd64.apk \ && curl -O https://download.microsoft.com/download/e/4/e/e4e67866-dffd-428c-aac7-8d28ddafb39b/mssql-tools_17.5.2.1-1_amd64.apk \ && curl -O https://download.microsoft.com/download/e/4/e/e4e67866-dffd-428c-aac7-8d28ddafb39b/msodbcsql17_17.5.2.1-1_amd64.sig \ diff --git a/README.md b/README.md index 11b4ec81..0338ae95 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # time-tracker-api -This is the mono-repository for the backend services and common codebase +This is the mono-repository for the backend services and their common codebase ## Getting started Follow the following instructions to get the project ready to use ASAP. @@ -100,7 +100,7 @@ The [integrations tests](https://en.wikipedia.org/wiki/Integration_testing) veri are working well together. These are the default tests we should run: ```dotenv -python3 -m pytest -v --ignore=tests/sql_repository_test.py +python3 -m pytest -v --ignore=tests/commons/data_access_layer/azure/sql_repository_test.py ``` As you may have noticed we are ignoring the tests related with the repository. diff --git a/commons/data_access_layer/azure/__init__.py b/commons/data_access_layer/azure/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/commons/data_access_layer/cosmos_db.py b/commons/data_access_layer/cosmos_db.py new file mode 100644 index 00000000..13d9272d --- /dev/null +++ b/commons/data_access_layer/cosmos_db.py @@ -0,0 +1,149 @@ +import dataclasses +import uuid +from typing import Callable + +import azure.cosmos.cosmos_client as cosmos_client +import azure.cosmos.exceptions as exceptions +from azure.cosmos import ContainerProxy +from flask import Flask + + +class CosmosDBFacade: + def __init__(self, app: Flask): # pragma: no cover + self.app = app + + db_uri = app.config.get('DATABASE_URI') + if db_uri is None: + app.logger.warn("DATABASE_URI was not found. Looking for alternative variables.") + account_uri = app.config.get('DATABASE_ACCOUNT_URI') + if account_uri is None: + raise EnvironmentError("DATABASE_ACCOUNT_URI is not defined in the environment") + + master_key = app.config.get('DATABASE_MASTER_KEY') + if master_key is None: + raise EnvironmentError("DATABASE_MASTER_KEY is not defined in the environment") + + self.client = cosmos_client.CosmosClient(account_uri, {'masterKey': master_key}, + user_agent="CosmosDBDotnetQuickstart", + user_agent_overwrite=True) + else: + self.client = cosmos_client.CosmosClient.from_connection_string(db_uri) + + db_id = app.config.get('DATABASE_NAME') + if db_id is None: + raise EnvironmentError("DATABASE_NAME is not defined in the environment") + + self.db = self.client.get_database_client(db_id) + + def create_container(self, container_definition: dict): + try: + return self.db.create_container(**container_definition) + + except exceptions.CosmosResourceExistsError: # pragma: no cover + self.app.logger.info('Container with id \'{0}\' was found'.format(container_definition["id"])) + + def delete_container(self, container_id: str): + try: + return self.db.delete_container(container_id) + + except exceptions.CosmosHttpResponseError: # pragma: no cover + self.app.logger.info('Container with id \'{0}\' was not deleted'.format(container_id)) + + +cosmos_helper: CosmosDBFacade = None + + +class CosmosDBModel(): + def __init__(self, data): + names = set([f.name for f in dataclasses.fields(self)]) + for k, v in data.items(): + if k in names: + setattr(self, k, v) + + +class CosmosDBRepository: + def __init__(self, container_id: str, + mapper: Callable = None, + custom_cosmos_helper: CosmosDBFacade = None): + global cosmos_helper + self.cosmos_helper = custom_cosmos_helper or cosmos_helper + if self.cosmos_helper is None: # pragma: no cover + raise ValueError("The cosmos_db module has not been initialized!") + self.mapper = mapper + self.container: ContainerProxy = self.cosmos_helper.db.get_container_client(container_id) + + @classmethod + def from_definition(cls, container_definition: dict, + mapper: Callable = None, + custom_cosmos_helper: CosmosDBFacade = None): + return cls(container_definition['id'], mapper, custom_cosmos_helper) + + def create(self, data: dict, mapper: Callable = None): + function_mapper = self.get_mapper_or_dict(mapper) + return function_mapper(self.container.create_item(body=data)) + + def find(self, id: str, partition_key_value, visible_only=True, mapper: Callable = None): + found_item = self.container.read_item(id, partition_key_value) + function_mapper = self.get_mapper_or_dict(mapper) + return function_mapper(self.check_visibility(found_item, visible_only)) + + def find_all(self, partition_key_value: str, max_count=None, offset=0, + visible_only=True, mapper: Callable = None): + # TODO Use the tenant_id param and change container alias + max_count = self.get_page_size_or(max_count) + result = self.container.query_items( + query=""" + SELECT * FROM c WHERE c.tenant_id=@tenant_id AND {visibility_condition} + OFFSET @offset LIMIT @max_count + """.format(visibility_condition=self.create_sql_condition_for_visibility(visible_only)), + parameters=[ + {"name": "@tenant_id", "value": partition_key_value}, + {"name": "@offset", "value": offset}, + {"name": "@max_count", "value": max_count}, + ], + partition_key=partition_key_value, + max_item_count=max_count) + + function_mapper = self.get_mapper_or_dict(mapper) + return list(map(function_mapper, result)) + + def partial_update(self, id: str, changes: dict, partition_key_value: str, + visible_only=True, mapper: Callable = None): + item_data = self.find(id, partition_key_value, visible_only=visible_only) + item_data.update(changes) + return self.update(id, item_data, mapper=mapper) + + def update(self, id: str, item_data: dict, mapper: Callable = None): + function_mapper = self.get_mapper_or_dict(mapper) + return function_mapper(self.container.replace_item(id, body=item_data)) + + def delete(self, id: str, partition_key_value: str, mapper: Callable = None): + return self.partial_update(id, { + 'deleted': str(uuid.uuid4()) + }, partition_key_value, visible_only=True, mapper=mapper) + + def check_visibility(self, item, throw_not_found_if_deleted): + if throw_not_found_if_deleted and item.get('deleted') is not None: + raise exceptions.CosmosResourceNotFoundError(message='Deleted item', + status_code=404) + + return item + + def create_sql_condition_for_visibility(self, visible_only: bool, container_name='c') -> str: + if visible_only: + # We are considering that `deleted == null` is not a choice + return 'NOT IS_DEFINED(%s.deleted)' % container_name + return 'true' + + def get_mapper_or_dict(self, alternative_mapper: Callable) -> Callable: + return alternative_mapper or self.mapper or dict + + def get_page_size_or(self, custom_page_size: int) -> int: + # TODO The default value should be taken from the Azure Feature Manager + # or any other repository for the settings + return custom_page_size or 100 + + +def init_app(app: Flask) -> None: + global cosmos_helper + cosmos_helper = CosmosDBFacade(app) diff --git a/commons/data_access_layer/azure/sql_repository.py b/commons/data_access_layer/sql.py similarity index 97% rename from commons/data_access_layer/azure/sql_repository.py rename to commons/data_access_layer/sql.py index 7bd5934a..e9a02491 100644 --- a/commons/data_access_layer/azure/sql_repository.py +++ b/commons/data_access_layer/sql.py @@ -14,7 +14,7 @@ def handle_commit_issues(f): def rollback_if_necessary(*args, **kw): try: return f(*args, **kw) - except: + except: # pragma: no cover db.session.rollback() raise @@ -92,7 +92,7 @@ def delete(self, id): self.repository.remove(id) -class SQLSeeder(Seeder): +class SQLSeeder(Seeder): # pragma: no cover def run(self): print("Provisioning database...") db.create_all() diff --git a/requirements/azure_cosmos.txt b/requirements/azure_cosmos.txt new file mode 100644 index 00000000..53ab3e98 --- /dev/null +++ b/requirements/azure_cosmos.txt @@ -0,0 +1,14 @@ +# requirements/azure_cosmos.txt + +# For Cosmos DB + +# Azure Cosmos DB official library +azure-core==1.1.1 +azure-cosmos==4.0.0b6 +certifi==2019.11.28 +chardet==3.0.4 +idna==2.8 +six==1.13.0 +urllib3==1.25.7 +virtualenv==16.7.9 +virtualenv-clone==0.5.3 \ No newline at end of file diff --git a/requirements/commons.txt b/requirements/commons.txt new file mode 100644 index 00000000..d6e85a2e --- /dev/null +++ b/requirements/commons.txt @@ -0,0 +1,9 @@ +# requirements/commons.txt + +# For Common dependencies + +# Handling requests +requests==2.23.0 + +# To create sample content in tests and API documentation +Faker==4.0.2 \ No newline at end of file diff --git a/requirements/sql_db_serverless.txt b/requirements/sql_db.txt similarity index 59% rename from requirements/sql_db_serverless.txt rename to requirements/sql_db.txt index d3eadf51..c3f62a2d 100644 --- a/requirements/sql_db_serverless.txt +++ b/requirements/sql_db.txt @@ -1,6 +1,6 @@ -# requirements/sql_db_serverless.txt +# requirements/sql_db.txt -# For SQL database serverless (MS SQL) +# For SQL database (MS SQL) # SQL Server driver diff --git a/requirements/time_tracker_api/dev.txt b/requirements/time_tracker_api/dev.txt index 2e5fd7d8..a9cc28ca 100644 --- a/requirements/time_tracker_api/dev.txt +++ b/requirements/time_tracker_api/dev.txt @@ -10,7 +10,6 @@ pytest==5.2.0 # Mocking pytest-mock==2.0.0 -Faker==4.0.2 # Coverage coverage==4.5.1 \ No newline at end of file diff --git a/requirements/time_tracker_api/prod.txt b/requirements/time_tracker_api/prod.txt index 3ce670df..f1221c4a 100644 --- a/requirements/time_tracker_api/prod.txt +++ b/requirements/time_tracker_api/prod.txt @@ -1,8 +1,9 @@ # requirements/time_tracker_api/prod.txt # Dependencies --r ../sql_db_serverless.txt - +-r ../commons.txt +-r ../azure_cosmos.txt +-r ../sql_db.txt # For production releases @@ -19,15 +20,9 @@ gunicorn==20.0.4 #Swagger support for Restful API flask-restplus==0.12.1 -#Mocking -Faker==4.0.2 - #CLI support Flask-Script==2.0.6 -# Handling requests -requests==2.23.0 - # The Debug Toolbar Flask-DebugToolbar==0.11.0 diff --git a/setup.cfg b/setup.cfg index f7333a82..824945c8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -6,3 +6,4 @@ addopts = -p no:warnings branch = True source = time_tracker_api + commons diff --git a/tests/commons/data_access_layer/azure/__init__.py b/tests/commons/data_access_layer/azure/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/commons/data_access_layer/azure/resources.py b/tests/commons/data_access_layer/azure/resources.py deleted file mode 100644 index 4b2300a4..00000000 --- a/tests/commons/data_access_layer/azure/resources.py +++ /dev/null @@ -1,15 +0,0 @@ -from commons.data_access_layer.azure.sql_repository import db, AuditedSQLModel - -from sqlalchemy_utils import UUIDType -import uuid - - -class PersonSQLModel(db.Model, AuditedSQLModel): - __tablename__ = 'test' - id = db.Column(UUIDType(binary=False), primary_key=True, default=uuid.uuid4) - name = db.Column(db.String(80), unique=False, nullable=False) - email = db.Column(db.String(120), unique=True, nullable=False) - age = db.Column(db.Integer, nullable=False) - - def __repr__(self): - return '' % self.name diff --git a/tests/commons/data_access_layer/cosmos_db_test.py b/tests/commons/data_access_layer/cosmos_db_test.py new file mode 100644 index 00000000..d369cf20 --- /dev/null +++ b/tests/commons/data_access_layer/cosmos_db_test.py @@ -0,0 +1,509 @@ +from dataclasses import dataclass +from typing import Callable + +import pytest +from azure.cosmos.exceptions import CosmosResourceExistsError, CosmosResourceNotFoundError +from faker import Faker +from pytest import fail + +from commons.data_access_layer.cosmos_db import CosmosDBRepository, CosmosDBModel + +fake = Faker() +Faker.seed() + +existing_item: dict = None + + +@dataclass() +class Person(CosmosDBModel): + id: str + name: str + email: str + age: int + tenant_id: str + + def __init__(self, data): + super(Person, self).__init__(data) + + def is_adult(self): + return self.age >= 18 + + +def test_repository_exists(cosmos_db_repository): + assert cosmos_db_repository is not None + + +def test_create_should_succeed(cosmos_db_repository: CosmosDBRepository, tenant_id: str): + global existing_item + existing_item = dict(id=fake.uuid4(), + name=fake.name(), + email=fake.safe_email(), + age=fake.pyint(min_value=10, max_value=80), + tenant_id=tenant_id) + + created_item = cosmos_db_repository.create(existing_item) + + assert created_item is not None + assert all(item in created_item.items() for item in existing_item.items()) + + +def test_create_should_fail_if_user_is_same(cosmos_db_repository: CosmosDBRepository): + try: + global existing_elemen + cosmos_db_repository.create(existing_item) + + fail('It should have failed') + except Exception as e: + assert type(e) is CosmosResourceExistsError + assert e.status_code == 409 + + +def test_create_with_diff_unique_data_but_same_tenant_should_succeed( + cosmos_db_repository: CosmosDBRepository): + global existing_item + new_data = existing_item.copy() + new_data.update({ + 'id': fake.uuid4(), + 'email': fake.safe_email(), + }) + + result = cosmos_db_repository.create(new_data) + assert result["id"] != existing_item["id"], 'It should be a new element' + + +def test_create_with_same_id_should_fail( + cosmos_db_repository: CosmosDBRepository): + try: + global existing_item + new_data = existing_item.copy() + new_data.update({ + 'email': fake.safe_email(), + }) + + cosmos_db_repository.create(new_data) + + fail('It should have failed') + except Exception as e: + assert type(e) is CosmosResourceExistsError + assert e.status_code == 409 + + +def test_create_with_diff_id_but_same_unique_field_should_fail( + cosmos_db_repository: CosmosDBRepository): + try: + global existing_item + new_data = existing_item.copy() + new_data.update({ + 'id': fake.uuid4() + }) + + cosmos_db_repository.create(new_data) + + fail('It should have failed') + except Exception as e: + assert type(e) is CosmosResourceExistsError + assert e.status_code == 409 + + +def test_create_with_no_partition_key_attrib_should_pass( + cosmos_db_repository: CosmosDBRepository): + global existing_item + new_data = existing_item.copy() + + new_data.update({ + 'tenant_id': None, + }) + + result = cosmos_db_repository.create(new_data) + assert result["tenant_id"] is None, "A None value in a partition key is valid" + + +def test_create_with_same_id_but_diff_partition_key_attrib_should_succeed( + cosmos_db_repository: CosmosDBRepository, + another_tenant_id: str): + global existing_item + new_data = existing_item.copy() + + new_data.update({ + 'tenant_id': another_tenant_id, + }) + + result = cosmos_db_repository.create(new_data) + assert result["id"] == existing_item["id"], "Should have allowed same id" + + +def test_create_with_mapper_should_provide_calculated_fields( + cosmos_db_repository: CosmosDBRepository, tenant_id): + new_item = dict(id=fake.uuid4(), + name=fake.name(), + email=fake.safe_email(), + age=fake.pyint(min_value=10, max_value=80), + tenant_id=tenant_id) + + created_item: Person = cosmos_db_repository.create(new_item, mapper=Person) + + assert created_item is not None + assert all(item in created_item.__dict__.items() for item in new_item.items()) + assert type(created_item) is Person, "The result should be wrapped with a class" + assert created_item.is_adult() is (new_item["age"] >= 18) + + +def test_find_by_valid_id_should_succeed(cosmos_db_repository: CosmosDBRepository): + found_item = cosmos_db_repository.find(existing_item["id"], + existing_item['tenant_id']) + + assert all(item in found_item.items() for item in existing_item.items()) + + +def test_find_by_invalid_id_should_fail(cosmos_db_repository: CosmosDBRepository): + try: + cosmos_db_repository.find(fake.uuid4(), existing_item['tenant_id']) + + fail('It should have failed') + except Exception as e: + assert type(e) is CosmosResourceNotFoundError + assert e.status_code == 404 + + +def test_find_by_invalid_partition_key_value_should_fail(cosmos_db_repository: CosmosDBRepository): + try: + cosmos_db_repository.find(existing_item["id"], fake.uuid4()) + + fail('It should have failed') + except Exception as e: + assert type(e) is CosmosResourceNotFoundError + assert e.status_code == 404 + + +def test_find_by_valid_id_and_mapper_should_succeed(cosmos_db_repository: CosmosDBRepository): + found_item: Person = cosmos_db_repository.find(existing_item["id"], + existing_item['tenant_id'], + mapper=Person) + + assert all(item in found_item.__dict__.items() for item in existing_item.items()) + assert type(found_item) is Person, "The result should be wrapped with a class" + assert found_item.is_adult() is (existing_item["age"] >= 18) + + +@pytest.mark.parametrize( + 'mapper,expected_type', [(None, dict), (dict, dict), (Person, Person)] +) +def test_find_all_with_mapper(cosmos_db_repository: CosmosDBRepository, + tenant_id: str, + mapper: Callable, + expected_type: Callable): + result = cosmos_db_repository.find_all(tenant_id, mapper=mapper) + + assert result is not None + assert len(result) > 0 + assert type(result[0]) is expected_type, "The result type is not the expected" + + +def test_find_all_should_return_items_from_specified_partition_key_value( + cosmos_db_repository: CosmosDBRepository, + tenant_id: str, + another_tenant_id: str): + result_tenant_id = cosmos_db_repository.find_all(tenant_id) + + assert len(result_tenant_id) > 1 + assert all((i["tenant_id"] == tenant_id for i in result_tenant_id)) + + result_another_tenant_id = cosmos_db_repository.find_all(another_tenant_id) + + assert len(result_another_tenant_id) > 0 + assert all((i["tenant_id"] == another_tenant_id for i in result_another_tenant_id)) + + assert not any(item in result_another_tenant_id for item in result_tenant_id), \ + "There should be no interceptions" + + +def test_find_all_should_succeed_with_partition_key_value_with_no_items( + cosmos_db_repository: CosmosDBRepository): + no_items = cosmos_db_repository.find_all(fake.uuid4()) + + assert no_items is not None + assert len(no_items) == 0, "No items are expected" + + +def test_find_all_with_max_count(cosmos_db_repository: CosmosDBRepository, + tenant_id: str): + all_items = cosmos_db_repository.find_all(tenant_id) + + assert len(all_items) > 2 + + first_two_items = cosmos_db_repository.find_all(tenant_id, max_count=2) + assert len(first_two_items) == 2, "The result should be limited to 2" + + +def test_find_all_with_offset(cosmos_db_repository: CosmosDBRepository, + tenant_id: str): + result_all_items = cosmos_db_repository.find_all(tenant_id) + + assert len(result_all_items) >= 3 + + result_after_the_first_item = cosmos_db_repository.find_all(tenant_id, offset=1) + + assert result_after_the_first_item == result_all_items[1:] + + result_after_the_second_item = cosmos_db_repository.find_all(tenant_id, offset=2) + + assert result_after_the_second_item == result_all_items[2:] + + +@pytest.mark.parametrize( + 'mapper,expected_type', [(None, dict), (dict, dict), (Person, Person)] +) +def test_partial_update_with_mapper(cosmos_db_repository: CosmosDBRepository, + mapper: Callable, + expected_type: Callable): + changes = { + 'name': fake.name(), + 'email': fake.safe_email(), + } + + updated_item = cosmos_db_repository.partial_update(existing_item['id'], + changes, + existing_item['tenant_id'], + mapper=mapper) + + assert updated_item is not None + assert type(updated_item) is expected_type + + +def test_partial_update_with_new_partition_key_value_should_fail( + cosmos_db_repository: CosmosDBRepository, + another_tenant_id: str, + sample_item: dict): + changes = { + 'name': fake.name(), + 'email': fake.safe_email(), + } + + try: + cosmos_db_repository.partial_update(sample_item['id'], changes, another_tenant_id) + fail('It should have failed') + except Exception as e: + assert type(e) is CosmosResourceNotFoundError + assert e.status_code == 404 + + +def test_partial_update_with_invalid_id_should_fail( + cosmos_db_repository: CosmosDBRepository, + sample_item: dict): + changes = { + 'name': fake.name(), + 'email': fake.safe_email(), + } + + try: + cosmos_db_repository.partial_update(fake.uuid4(), changes, sample_item['tenant_id']) + fail('It should have failed') + except Exception as e: + assert type(e) is CosmosResourceNotFoundError + assert e.status_code == 404 + + +def test_partial_update_should_only_update_fields_in_changes( + cosmos_db_repository: CosmosDBRepository, + sample_item: dict): + changes = { + 'name': fake.name(), + 'email': fake.safe_email(), + } + + updated_item = cosmos_db_repository.partial_update( + sample_item['id'], + changes, sample_item['tenant_id']) + + assert updated_item is not None + assert updated_item['name'] == changes["name"] != sample_item["name"] + assert updated_item['email'] == changes["email"] != sample_item["email"] + assert updated_item['id'] == sample_item["id"] + assert updated_item['tenant_id'] == sample_item["tenant_id"] + assert updated_item['age'] == sample_item["age"] + + +@pytest.mark.parametrize( + 'mapper,expected_type', [(None, dict), (dict, dict), (Person, Person)] +) +def test_update_with_mapper(cosmos_db_repository: CosmosDBRepository, + mapper: Callable, + expected_type: Callable): + changed_item = existing_item.copy() + changed_item.update({ + 'name': fake.name(), + 'email': fake.safe_email(), + }) + + updated_item = cosmos_db_repository.update(existing_item['id'], + changed_item, + mapper=mapper) + + assert updated_item is not None + assert type(updated_item) is expected_type + + +def test_update_with_invalid_id_should_fail(cosmos_db_repository: CosmosDBRepository): + changes = { + 'name': fake.name(), + 'email': fake.safe_email(), + } + + try: + cosmos_db_repository.update(fake.uuid4(), changes) + fail('It should have failed') + except Exception as e: + assert type(e) is CosmosResourceNotFoundError + assert e.status_code == 404 + + +def test_update_with_partial_changes_without_required_fields_it_should_fail( + cosmos_db_repository: CosmosDBRepository, + sample_item: dict): + changes = { + 'id': sample_item['id'], + 'email': fake.safe_email(), + 'tenant_id': fake.uuid4(), + } + + try: + cosmos_db_repository.update(sample_item['id'], changes) + fail('It should have failed') + except Exception as e: + assert type(e) is CosmosResourceNotFoundError + assert e.status_code == 404 + + +def test_update_with_partial_changes_with_required_fields_should_delete_the_missing_ones( + cosmos_db_repository: CosmosDBRepository, + sample_item: dict): + changes = { + 'id': fake.uuid4(), + 'email': fake.safe_email(), + 'tenant_id': sample_item['tenant_id'], + } + + updated_item = cosmos_db_repository.update(sample_item['id'], changes) + + assert updated_item is not None + assert updated_item['id'] == changes["id"] != sample_item["id"] + assert updated_item['email'] == changes["email"] != sample_item["email"] + assert updated_item['tenant_id'] == changes["tenant_id"] + assert updated_item.get('name') is None + assert updated_item.get('age') is None + + try: + cosmos_db_repository.find(sample_item['id'], sample_item['tenant_id']) + fail('The previous version should not exist') + except Exception as e: + assert type(e) is CosmosResourceNotFoundError + assert e.status_code == 404 + + +def test_delete_with_invalid_id_should_fail(cosmos_db_repository: CosmosDBRepository, + tenant_id: str): + try: + cosmos_db_repository.delete(fake.uuid4(), tenant_id) + except Exception as e: + assert type(e) is CosmosResourceNotFoundError + assert e.status_code == 404 + + +@pytest.mark.parametrize( + 'mapper,expected_type', [(None, dict), (dict, dict), (Person, Person)] +) +def test_delete_with_mapper(cosmos_db_repository: CosmosDBRepository, + sample_item: dict, + mapper: Callable, + expected_type: Callable): + deleted_item = cosmos_db_repository.delete(sample_item['id'], + sample_item['tenant_id'], + mapper=mapper) + + assert deleted_item is not None + assert type(deleted_item) is expected_type + + try: + cosmos_db_repository.find(sample_item['id'], + sample_item['tenant_id'], + mapper=mapper) + fail('It should have not found the deleted item') + except Exception as e: + assert type(e) is CosmosResourceNotFoundError + assert e.status_code == 404 + + +def test_find_can_find_deleted_item_only_if_visibile_only_is_true(cosmos_db_repository: CosmosDBRepository, + sample_item: dict): + deleted_item = cosmos_db_repository.delete(sample_item['id'], sample_item['tenant_id']) + + assert deleted_item is not None + assert deleted_item['deleted'] is not None + + try: + cosmos_db_repository.find(sample_item['id'], sample_item['tenant_id']) + except Exception as e: + assert type(e) is CosmosResourceNotFoundError + assert e.status_code == 404 + + found_deleted_item = cosmos_db_repository.find(sample_item['id'], + sample_item['tenant_id'], + visible_only=False) + assert found_deleted_item is not None + + +def test_find_all_can_find_deleted_items_only_if_visibile_only_is_true( + cosmos_db_repository: CosmosDBRepository, + sample_item: dict): + deleted_item = cosmos_db_repository.delete(sample_item['id'], sample_item['tenant_id']) + assert deleted_item is not None + assert deleted_item['deleted'] is not None + + visible_items = cosmos_db_repository.find_all(sample_item['tenant_id']) + + assert visible_items is not None + assert any(item['id'] == sample_item['id'] for item in visible_items) == False, \ + 'The deleted item should not be visible' + + all_items = cosmos_db_repository.find_all(sample_item['tenant_id'], visible_only=False) + + assert all_items is not None + assert any(item['id'] == sample_item['id'] for item in all_items), \ + 'Deleted item should be visible' + + +def test_delete_should_not_find_element_that_is_already_deleted( + cosmos_db_repository: CosmosDBRepository, + sample_item: dict): + deleted_item = cosmos_db_repository.delete(sample_item['id'], sample_item['tenant_id']) + + assert deleted_item is not None + + try: + cosmos_db_repository.delete(deleted_item['id'], deleted_item['tenant_id']) + fail('It should have not found the deleted item') + except Exception as e: + assert type(e) is CosmosResourceNotFoundError + assert e.status_code == 404 + + +def test_partial_update_should_not_find_element_that_is_already_deleted( + cosmos_db_repository: CosmosDBRepository, + sample_item: dict): + deleted_item = cosmos_db_repository.delete(sample_item['id'], sample_item['tenant_id']) + + assert deleted_item is not None + + try: + changes = { + 'name': fake.name(), + 'email': fake.safe_email(), + } + cosmos_db_repository.partial_update(deleted_item['id'], + changes, + deleted_item['tenant_id']) + + fail('It should have not found the deleted item') + except Exception as e: + assert type(e) is CosmosResourceNotFoundError + assert e.status_code == 404 diff --git a/tests/commons/data_access_layer/azure/sql_repository_test.py b/tests/commons/data_access_layer/sql_test.py similarity index 100% rename from tests/commons/data_access_layer/azure/sql_repository_test.py rename to tests/commons/data_access_layer/sql_test.py diff --git a/tests/conftest.py b/tests/conftest.py index 1e481ced..78ec7584 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,9 +1,14 @@ import pytest +from faker import Faker from flask import Flask from flask.testing import FlaskClient +from commons.data_access_layer.cosmos_db import CosmosDBRepository from time_tracker_api import create_app +fake = Faker() +Faker.seed() + @pytest.fixture(scope='session') def app() -> Flask: @@ -17,16 +22,90 @@ def client(app: Flask) -> FlaskClient: @pytest.fixture(scope="module") -def sql_repository(app: Flask): - with app.test_client(): - from tests.commons.data_access_layer.azure.resources import PersonSQLModel - from commons.data_access_layer.azure.sql_repository import db +def sql_model_class(): + from commons.data_access_layer.sql import db, AuditedSQLModel + class PersonSQLModel(db.Model, AuditedSQLModel): + __tablename__ = 'test' + id = db.Column(db.Integer, primary_key=True) + name = db.Column(db.String(80), unique=False, nullable=False) + email = db.Column(db.String(120), unique=True, nullable=False) + age = db.Column(db.Integer, nullable=False) + + def __repr__(self): + return '' % self.name + + return PersonSQLModel + + +@pytest.fixture(scope="module") +def sql_repository(app: Flask, sql_model_class): + with app.app_context(): + from commons.data_access_layer.sql import init_app, db + + if db is None: + init_app(app) + from commons.data_access_layer.sql import db + + db.metadata.create_all(bind=db.engine, tables=[sql_model_class.__table__]) + app.logger.info("SQl test models created!") + + from commons.data_access_layer.sql import SQLRepository + yield SQLRepository(sql_model_class) + + db.metadata.drop_all(bind=db.engine, tables=[sql_model_class.__table__]) + app.logger.info("SQL test models removed!") + + +@pytest.fixture(scope="module") +def cosmos_db_model(): + from azure.cosmos import PartitionKey + return { + 'id': 'tests', + 'partition_key': PartitionKey(path='/tenant_id'), + 'unique_key_policy': { + 'uniqueKeys': [ + {'paths': ['/email']}, + ] + } + } + + +@pytest.yield_fixture(scope="module") +def cosmos_db_repository(app: Flask, cosmos_db_model) -> CosmosDBRepository: + with app.app_context(): + from commons.data_access_layer.cosmos_db import init_app, cosmos_helper + + if cosmos_helper is None: + init_app(app) + from commons.data_access_layer.cosmos_db import cosmos_helper + + app.logger.info("Creating Cosmos DB test models...") + cosmos_helper.create_container(cosmos_db_model) + app.logger.info("Cosmos DB test models created!") + + yield CosmosDBRepository.from_definition(cosmos_db_model) + + app.logger.info("Removing Cosmos DB test models...") + cosmos_helper.delete_container(cosmos_db_model["id"]) + app.logger.info("Cosmos DB test models removed!") + + +@pytest.fixture(scope="session") +def tenant_id() -> str: + return fake.uuid4() + + +@pytest.fixture(scope="session") +def another_tenant_id() -> str: + return fake.uuid4() - db.metadata.create_all(bind=db.engine, tables=[PersonSQLModel.__table__]) - print("Test models created!") - from commons.data_access_layer.azure.sql_repository import SQLRepository - yield SQLRepository(PersonSQLModel) +@pytest.fixture(scope="function") +def sample_item(cosmos_db_repository: CosmosDBRepository, tenant_id: str) -> dict: + sample_item_data = dict(id=fake.uuid4(), + name=fake.name(), + email=fake.safe_email(), + age=fake.pyint(min_value=10, max_value=80), + tenant_id=tenant_id) - db.metadata.drop_all(bind=db.engine, tables=[PersonSQLModel.__table__]) - print("Test models removed!") + return cosmos_db_repository.create(sample_item_data) diff --git a/tests/time_tracker_api/activities/activities_namespace_test.py b/tests/time_tracker_api/activities/activities_namespace_test.py index fb1045d3..735a884b 100644 --- a/tests/time_tracker_api/activities/activities_namespace_test.py +++ b/tests/time_tracker_api/activities/activities_namespace_test.py @@ -31,14 +31,11 @@ def test_create_activity_should_succeed_with_valid_request(client: FlaskClient, def test_create_activity_should_reject_bad_request(client: FlaskClient, mocker: MockFixture): from time_tracker_api.activities.activities_namespace import activity_dao - invalid_activity_data = valid_activity_data.copy().update({ - "invalid_field": 123, - }) repository_create_mock = mocker.patch.object(activity_dao.repository, 'create', return_value=fake_activity) - response = client.post("/activities", json=invalid_activity_data, follow_redirects=True) + response = client.post("/activities", json=None, follow_redirects=True) assert HTTPStatus.BAD_REQUEST == response.status_code repository_create_mock.assert_not_called() @@ -123,15 +120,12 @@ def test_update_activity_should_succeed_with_valid_data(client: FlaskClient, moc def test_update_activity_should_reject_bad_request(client: FlaskClient, mocker: MockFixture): from time_tracker_api.activities.activities_namespace import activity_dao - invalid_activity_data = valid_activity_data.copy().update({ - "invalid_field": 123, - }) repository_update_mock = mocker.patch.object(activity_dao.repository, 'update', return_value=fake_activity) valid_id = fake.random_int(1, 9999) - response = client.put("/activities/%s" % valid_id, json=invalid_activity_data, follow_redirects=True) + response = client.put("/activities/%s" % valid_id, json=None, follow_redirects=True) assert HTTPStatus.BAD_REQUEST == response.status_code repository_update_mock.assert_not_called() diff --git a/tests/time_tracker_api/projects/projects_namespace_test.py b/tests/time_tracker_api/projects/projects_namespace_test.py index 2e077640..5d5459a3 100644 --- a/tests/time_tracker_api/projects/projects_namespace_test.py +++ b/tests/time_tracker_api/projects/projects_namespace_test.py @@ -35,8 +35,9 @@ def test_create_project_should_succeed_with_valid_request(client: FlaskClient, m def test_create_project_should_reject_bad_request(client: FlaskClient, mocker: MockFixture): from time_tracker_api.projects.projects_namespace import project_dao - invalid_project_data = valid_project_data.copy().update({ - "type": 'anything', + invalid_project_data = valid_project_data.copy() + invalid_project_data.update({ + "project_type_id": fake.pyint(min_value=1, max_value=100), }) repository_create_mock = mocker.patch.object(project_dao.repository, 'create', @@ -125,8 +126,9 @@ def test_update_project_should_succeed_with_valid_data(client: FlaskClient, mock def test_update_project_should_reject_bad_request(client: FlaskClient, mocker: MockFixture): from time_tracker_api.projects.projects_namespace import project_dao - invalid_project_data = valid_project_data.copy().update({ - "type": 'anything', + invalid_project_data = valid_project_data.copy() + invalid_project_data.update({ + "project_type_id": fake.pyint(min_value=1, max_value=100), }) repository_update_mock = mocker.patch.object(project_dao.repository, 'update', diff --git a/tests/time_tracker_api/time_entries/time_entries_namespace_test.py b/tests/time_tracker_api/time_entries/time_entries_namespace_test.py index 09be8af9..e45e4510 100644 --- a/tests/time_tracker_api/time_entries/time_entries_namespace_test.py +++ b/tests/time_tracker_api/time_entries/time_entries_namespace_test.py @@ -35,7 +35,8 @@ def test_create_time_entry_should_succeed_with_valid_request(client: FlaskClient def test_create_time_entry_should_reject_bad_request(client: FlaskClient, mocker: MockFixture): from time_tracker_api.time_entries.time_entries_namespace import time_entries_dao - invalid_time_entry_input = valid_time_entry_input.copy().update({ + invalid_time_entry_input = valid_time_entry_input.copy() + invalid_time_entry_input.update({ "project_id": None, }) repository_create_mock = mocker.patch.object(time_entries_dao.repository, @@ -110,8 +111,9 @@ def test_update_time_entry_should_succeed_with_valid_data(client: FlaskClient, m def test_update_time_entry_should_reject_bad_request(client: FlaskClient, mocker: MockFixture): from time_tracker_api.time_entries.time_entries_namespace import time_entries_dao - invalid_time_entry_data = valid_time_entry_input.copy().update({ - "project_id": 'anything', + invalid_time_entry_data = valid_time_entry_input.copy() + invalid_time_entry_data.update({ + "project_id": fake.pyint(min_value=1, max_value=100), }) repository_update_mock = mocker.patch.object(time_entries_dao.repository, 'update', diff --git a/time_tracker_api/activities/activities_model.py b/time_tracker_api/activities/activities_model.py index bfbb6c57..7305a0b9 100644 --- a/time_tracker_api/activities/activities_model.py +++ b/time_tracker_api/activities/activities_model.py @@ -1,3 +1,5 @@ +import abc + from time_tracker_api.database import CRUDDao @@ -8,8 +10,8 @@ class ActivityDao(CRUDDao): def create_dao() -> ActivityDao: from sqlalchemy_utils import UUIDType import uuid - from commons.data_access_layer.azure.sql_repository import db - from commons.data_access_layer.azure.sql_repository import SQLCRUDDao + from commons.data_access_layer.sql import db + from commons.data_access_layer.sql import SQLCRUDDao class ActivitySQLModel(db.Model): __tablename__ = 'activity' @@ -25,7 +27,7 @@ def __repr__(self): def __str___(self): return "the activity \"%s\"" % self.name - class ActivitySQLDao(SQLCRUDDao): + class ActivitySQLDao(ActivityDao, SQLCRUDDao): def __init__(self): SQLCRUDDao.__init__(self, ActivitySQLModel) diff --git a/time_tracker_api/activities/activities_namespace.py b/time_tracker_api/activities/activities_namespace.py index c63b5f56..e87f2ccd 100644 --- a/time_tracker_api/activities/activities_namespace.py +++ b/time_tracker_api/activities/activities_namespace.py @@ -4,7 +4,6 @@ from time_tracker_api.activities.activities_model import create_dao from time_tracker_api.api import audit_fields -from time_tracker_api.activities.activities_model import create_dao faker = Faker() diff --git a/time_tracker_api/config.py b/time_tracker_api/config.py index ebdddb81..7376b08c 100644 --- a/time_tracker_api/config.py +++ b/time_tracker_api/config.py @@ -27,12 +27,19 @@ class SQLConfig(Config): SQLALCHEMY_DATABASE_URI = DATABASE_URI -class TestConfig(SQLConfig): +class CosmosDB(Config): + DATABASE_URI = os.environ.get('DATABASE_URI') + DATABASE_ACCOUNT_URI = os.environ.get('DATABASE_ACCOUNT_URI') + DATABASE_MASTER_KEY = os.environ.get('DATABASE_MASTER_KEY') + DATABASE_NAME = os.environ.get('DATABASE_NAME') + + +class TestConfig(CosmosDB, SQLConfig): TESTING = True FLASK_DEBUG = True TEST_TABLE = 'tests' - DATABASE_URI = os.environ.get('DATABASE_URI', 'sqlite:///:memory:') - SQLALCHEMY_DATABASE_URI = DATABASE_URI + DATABASE_URI = os.environ.get('DATABASE_URI') + SQLALCHEMY_DATABASE_URI = DATABASE_URI or 'sqlite:///:memory:' class ProductionConfig(Config): @@ -41,8 +48,8 @@ class ProductionConfig(Config): FLASK_ENV = 'production' -class AzureConfig(SQLConfig): - DATABASE_URI = os.environ.get('SQLAZURECONNSTR_DATABASE_URI', SQLConfig.DATABASE_URI) +class AzureConfig(CosmosDB): + DATABASE_URI = os.environ.get('DATABASE_URI', os.environ.get('SQLAZURECONNSTR_DATABASE_URI')) SQLALCHEMY_DATABASE_URI = DATABASE_URI diff --git a/time_tracker_api/customers/customers_model.py b/time_tracker_api/customers/customers_model.py index 9659ec5a..cc994bfe 100644 --- a/time_tracker_api/customers/customers_model.py +++ b/time_tracker_api/customers/customers_model.py @@ -6,9 +6,9 @@ class CustomerDao(CRUDDao): def create_dao() -> CustomerDao: - from time_tracker_api.sql_repository import db + from commons.data_access_layer.sql import db from time_tracker_api.database import COMMENTS_MAX_LENGTH - from time_tracker_api.sql_repository import SQLCRUDDao + from commons.data_access_layer.sql import SQLCRUDDao from sqlalchemy_utils import UUIDType import uuid diff --git a/time_tracker_api/database.py b/time_tracker_api/database.py index 094c894d..c9a21b54 100644 --- a/time_tracker_api/database.py +++ b/time_tracker_api/database.py @@ -52,7 +52,28 @@ def __call__(self, *args, **kwargs): def init_app(app: Flask) -> None: - from commons.data_access_layer.azure.sql_repository import init_app, SQLSeeder + init_sql(app) + + +def init_sql(app: Flask) -> None: + from commons.data_access_layer.sql import init_app, SQLSeeder init_app(app) global seeder seeder = SQLSeeder() + + +def init_cosmos_db(app: Flask) -> None: + # from commons.data_access_layer.azure.cosmos_db import cosmos_helper + class CosmosSeeder(Seeder): + def run(self): + print("Provisioning namespace(database)...") + # cosmos_helper.create_container() + print("Database seeded!") + + def fresh(self): + print("Removing namespace(database)...") + # cosmos_helper.remove_container() + self.run() + + global seeder + seeder = CosmosSeeder() diff --git a/time_tracker_api/project_types/project_types_model.py b/time_tracker_api/project_types/project_types_model.py index abab5477..589ce5ef 100644 --- a/time_tracker_api/project_types/project_types_model.py +++ b/time_tracker_api/project_types/project_types_model.py @@ -6,9 +6,9 @@ class ProjectTypeDao(CRUDDao): def create_dao() -> ProjectTypeDao: - from time_tracker_api.sql_repository import db + from commons.data_access_layer.sql import db from time_tracker_api.database import COMMENTS_MAX_LENGTH - from time_tracker_api.sql_repository import SQLCRUDDao + from commons.data_access_layer.sql import SQLCRUDDao from sqlalchemy_utils import UUIDType import uuid diff --git a/time_tracker_api/projects/projects_model.py b/time_tracker_api/projects/projects_model.py index 5da7ddcf..2c580ca6 100644 --- a/time_tracker_api/projects/projects_model.py +++ b/time_tracker_api/projects/projects_model.py @@ -17,12 +17,11 @@ class ProjectDao(CRUDDao): def create_dao() -> ProjectDao: - from commons.data_access_layer.azure.sql_repository import db + from commons.data_access_layer.sql import db from time_tracker_api.database import COMMENTS_MAX_LENGTH - from time_tracker_api.sql_repository import SQLCRUDDao from sqlalchemy_utils import UUIDType import uuid - from commons.data_access_layer.azure.sql_repository import SQLCRUDDao, AuditedSQLModel + from commons.data_access_layer.sql import SQLCRUDDao class ProjectSQLModel(db.Model): __tablename__ = 'project' diff --git a/time_tracker_api/time_entries/time_entries_model.py b/time_tracker_api/time_entries/time_entries_model.py index 80a3aef4..7759ace8 100644 --- a/time_tracker_api/time_entries/time_entries_model.py +++ b/time_tracker_api/time_entries/time_entries_model.py @@ -8,11 +8,11 @@ class TimeEntriesDao(CRUDDao): def create_dao() -> TimeEntriesDao: - from commons.data_access_layer.azure.sql_repository import db + from commons.data_access_layer.sql import db from time_tracker_api.database import COMMENTS_MAX_LENGTH from sqlalchemy_utils import UUIDType import uuid - from commons.data_access_layer.azure.sql_repository import SQLCRUDDao + from commons.data_access_layer.sql import SQLCRUDDao class TimeEntrySQLModel(db.Model): __tablename__ = 'time_entry' From ea21a76c9ddb07d08532f0a9c7b76f685e679001 Mon Sep 17 00:00:00 2001 From: roberto Date: Wed, 8 Apr 2020 14:53:41 -0500 Subject: [PATCH 003/387] feat: add python-semantic-release library --- requirements/time_tracker_api/prod.txt | 3 +++ setup.cfg | 4 ++++ 2 files changed, 7 insertions(+) diff --git a/requirements/time_tracker_api/prod.txt b/requirements/time_tracker_api/prod.txt index f1221c4a..aad33b0d 100644 --- a/requirements/time_tracker_api/prod.txt +++ b/requirements/time_tracker_api/prod.txt @@ -23,6 +23,9 @@ flask-restplus==0.12.1 #CLI support Flask-Script==2.0.6 +#Semantic versioning +python-semantic-release==5.1.0 + # The Debug Toolbar Flask-DebugToolbar==0.11.0 diff --git a/setup.cfg b/setup.cfg index 824945c8..dd7700da 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,6 +2,10 @@ testpaths = tests addopts = -p no:warnings +[semantic_release] +version_variable = time_tracker_api/__init__.py:__version__ +upload_to_pypi = false + [coverage:run] branch = True source = From 8942f62ac300960a49ac80e7650d5637a8beacf1 Mon Sep 17 00:00:00 2001 From: roberto Date: Wed, 8 Apr 2020 14:55:18 -0500 Subject: [PATCH 004/387] docs: add instructions of semantic versioning --- README.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/README.md b/README.md index 0338ae95..f7364100 100644 --- a/README.md +++ b/README.md @@ -166,6 +166,18 @@ as well as its correspondent options. python cli.py gen_swagger_json -f ~/Downloads/swagger.json ``` +## Semantic versioning + +### Style +We use [angular commit message style](https://github.com/angular/angular.js/blob/master/DEVELOPERS.md#commits) as the standard commit message style. + +### Release +1. A Release can be done manually or automatically using CI, in either case the variable `GH_TOKEN` is required +to post releases to Github. The `GH_TOKEN` can be generated following [these steps](https://help.github.com/es/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line) + +2. We use the command `semantic-release publish` after a successful PR to make a release. Check the library +[python-semantic-release](https://python-semantic-release.readthedocs.io/en/latest/commands.html#publish) for details of underlying operations. + ## Run as docker container 1. Build image ```bash From 5adc3186c23608c8e11cb3dde4e0870c67198fd6 Mon Sep 17 00:00:00 2001 From: roberto Date: Wed, 8 Apr 2020 14:56:17 -0500 Subject: [PATCH 005/387] refactor: update app version --- time_tracker_api/__init__.py | 2 ++ time_tracker_api/api.py | 9 ++++++--- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/time_tracker_api/__init__.py b/time_tracker_api/__init__.py index 7b8489e1..ce7c881f 100644 --- a/time_tracker_api/__init__.py +++ b/time_tracker_api/__init__.py @@ -3,6 +3,8 @@ from flask import Flask +__version__ = '0.0.2' + flask_app: Flask = None diff --git a/time_tracker_api/api.py b/time_tracker_api/api.py index 18d19b97..90dec35f 100644 --- a/time_tracker_api/api.py +++ b/time_tracker_api/api.py @@ -5,12 +5,15 @@ from flask import current_app as app from flask_restplus import Api, fields from flask_restplus._http import HTTPStatus +from time_tracker_api import __version__ faker = Faker() -api = Api(version='1.0.1', - title="TimeTracker API", - description="API for the TimeTracker project") +api = Api( + version=__version__, + title="TimeTracker API", + description="API for the TimeTracker project" +) # Common models structure audit_fields = { From 6b55e0555608b4714202ecb0dc5fe9c4f104a969 Mon Sep 17 00:00:00 2001 From: roberto Date: Wed, 8 Apr 2020 17:23:42 -0500 Subject: [PATCH 006/387] fix: add missing dependency in dockerfile --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index bf9bf448..3fb32d00 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,6 @@ FROM python:3.8-alpine -ARG buildDeps='g++ gnupg curl' +ARG buildDeps='g++ gnupg curl libffi-dev openssl-dev' WORKDIR /usr/src/app From 65fe7766b4b6129e1c8b50bd983f75c3be9d65da Mon Sep 17 00:00:00 2001 From: roberto Date: Wed, 8 Apr 2020 17:49:45 -0500 Subject: [PATCH 007/387] docs: add CI link in README.md --- README.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/README.md b/README.md index f7364100..9581e61e 100644 --- a/README.md +++ b/README.md @@ -172,8 +172,7 @@ python cli.py gen_swagger_json -f ~/Downloads/swagger.json We use [angular commit message style](https://github.com/angular/angular.js/blob/master/DEVELOPERS.md#commits) as the standard commit message style. ### Release -1. A Release can be done manually or automatically using CI, in either case the variable `GH_TOKEN` is required -to post releases to Github. The `GH_TOKEN` can be generated following [these steps](https://help.github.com/es/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line) +1. The release is automatically done by the [TimeTracker CI](https://dev.azure.com/IOET-DevOps/TimeTracker-API/_build?definitionId=1&_a=summary) although can also be done manually. The variable `GH_TOKEN` is required to post releases to Github. The `GH_TOKEN` can be generated following [these steps](https://help.github.com/es/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line). 2. We use the command `semantic-release publish` after a successful PR to make a release. Check the library [python-semantic-release](https://python-semantic-release.readthedocs.io/en/latest/commands.html#publish) for details of underlying operations. From c2e2be1aaab6e68aaa8a1186f86f07579f9ea1b1 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Wed, 8 Apr 2020 18:05:22 -0500 Subject: [PATCH 008/387] 0.1.0 Automatically generated by python-semantic-release --- time_tracker_api/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/time_tracker_api/__init__.py b/time_tracker_api/__init__.py index ce7c881f..b76493d4 100644 --- a/time_tracker_api/__init__.py +++ b/time_tracker_api/__init__.py @@ -3,7 +3,7 @@ from flask import Flask -__version__ = '0.0.2' +__version__ = '0.1.0' flask_app: Flask = None From ae9f93114204e1f0f01911263b5b1f0d65c9797d Mon Sep 17 00:00:00 2001 From: roberto Date: Thu, 9 Apr 2020 13:18:39 -0500 Subject: [PATCH 009/387] chore: update python-semantic-release version to 5.2.0 --- requirements/time_tracker_api/prod.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/time_tracker_api/prod.txt b/requirements/time_tracker_api/prod.txt index aad33b0d..d72284f5 100644 --- a/requirements/time_tracker_api/prod.txt +++ b/requirements/time_tracker_api/prod.txt @@ -24,7 +24,7 @@ flask-restplus==0.12.1 Flask-Script==2.0.6 #Semantic versioning -python-semantic-release==5.1.0 +python-semantic-release==5.2.0 # The Debug Toolbar Flask-DebugToolbar==0.11.0 From 56cc8cf49bfa451c60bc1dbf6a8f8559d6628553 Mon Sep 17 00:00:00 2001 From: roberto Date: Thu, 9 Apr 2020 13:20:52 -0500 Subject: [PATCH 010/387] chore: add variable for releases in windows --- setup.cfg | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.cfg b/setup.cfg index dd7700da..a6500ee6 100644 --- a/setup.cfg +++ b/setup.cfg @@ -5,6 +5,7 @@ addopts = -p no:warnings [semantic_release] version_variable = time_tracker_api/__init__.py:__version__ upload_to_pypi = false +remove_dist = false [coverage:run] branch = True From b8d2aa197b8821b86bd5c883a69895a8f731cc27 Mon Sep 17 00:00:00 2001 From: EliuX Date: Thu, 9 Apr 2020 18:06:41 -0500 Subject: [PATCH 011/387] Closes #36 Allow to run migrations --- .env.template | 4 +- .gitignore | 5 +- README.md | 36 ++++++++- commons/data_access_layer/cosmos_db.py | 64 +++++++++------ migrations/01-initialize-db.py | 35 +++++++++ migrations/__init__.py | 77 +++++++++++++++++++ requirements/migrations.txt | 6 ++ .../data_access_layer/cosmos_db_test.py | 29 +++++++ tests/conftest.py | 2 +- .../activities/activities_model.py | 14 +++- time_tracker_api/config.py | 17 ++-- time_tracker_api/customers/customers_model.py | 14 ++++ .../project_types/project_types_model.py | 14 ++++ time_tracker_api/projects/projects_model.py | 14 ++++ .../projects/projects_namespace.py | 2 +- .../time_entries/time_entries_model.py | 13 ++++ 16 files changed, 305 insertions(+), 41 deletions(-) create mode 100644 migrations/01-initialize-db.py create mode 100644 migrations/__init__.py create mode 100644 requirements/migrations.txt diff --git a/.env.template b/.env.template index 16549078..31a135a5 100644 --- a/.env.template +++ b/.env.template @@ -4,11 +4,11 @@ export FLASK_APP=time_tracker_api # Common attributes ## In case you use an Azure SQL database, you must specify the database connection URI. Check out the README.md for more details -#export DATABASE_URI=mssql+pyodbc://:@time-tracker-srv.database.windows.net/?driver\=ODBC Driver 17 for SQL Server +#export SQL_DATABASE_URI=mssql+pyodbc://:@time-tracker-srv.database.windows.net/?driver\=ODBC Driver 17 for SQL Server ## For Azure Cosmos DB export DATABASE_ACCOUNT_URI=https://.documents.azure.com:443 export DATABASE_MASTER_KEY= export DATABASE_NAME= ### or -# export DATABASE_URI=AccountEndpoint=;AccountKey= +# export COSMOS_DATABASE_URI=AccountEndpoint=;AccountKey= diff --git a/.gitignore b/.gitignore index e5f5178c..54bf6d95 100644 --- a/.gitignore +++ b/.gitignore @@ -28,5 +28,8 @@ htmlcov/ timetracker-api-postman-collection.json swagger.json -# Ignore any SQLite generated database +# SQLite databases *.db + +# Local migration files +migration_status.csv diff --git a/README.md b/README.md index 0338ae95..b9036183 100644 --- a/README.md +++ b/README.md @@ -60,10 +60,10 @@ Driver=/usr/local/lib/libmsodbcsql.17.dylib UsageCount=2 ``` -Then specify the driver name, in this case _DBC Driver 17 for SQL Server_ in the `DATABASE_URI`, e.g.: +Then specify the driver name, in this case _DBC Driver 17 for SQL Server_ in the `SQL_DATABASE_URI`, e.g.: ```.dotenv -DATABASE_URI=mssql+pyodbc://:@time-tracker-srv.database.windows.net/?driver\=ODBC Driver 17 for SQL Server +SQL_DATABASE_URI=mssql+pyodbc://:@time-tracker-srv.database.windows.net/?driver\=ODBC Driver 17 for SQL Server ``` To troubleshoot issues regarding this part please check out: @@ -115,7 +115,7 @@ tests as [system testing](https://en.wikipedia.org/wiki/System_testing): python3 -m pytest -v ``` -The database tests will be done in the table `tests` of the database specified by the variable `DATABASE_URI`. If this +The database tests will be done in the table `tests` of the database specified by the variable `SQL_DATABASE_URI`. If this variable is not specified it will automatically connect to SQLite database in-memory. This will do, because we are using [SQL Alchemy](https://www.sqlalchemy.org/features.html) to be able connect to any SQL database maintaining the same codebase. @@ -179,6 +179,36 @@ docker run -p 5000:5000 time_tracker_api:local 3. Visit `127.0.0.1:5000` +## Migrations +Looking for a DB-agnostic migration tool, the only choice I found was [migrate-anything](https://pypi.org/project/migrate-anything/). +An specific requirement file was created to run the migrations in `requirements/migrations.txt`. This way we do not mix +any possible vulnerable dependency brought by these dependencies to the environment `prod`. Therefore the dependencies +to run the migrations shall be installed this way: + +```bash +pip install -r requirements//prod.txt +pip install -r requirements/migrations.txt +``` + +All the migrations will be handled and created in the python package `migrations`. In order to create a migration we +must do it manually (for now) and prefixed by a number, e.g. `migrations/01-initialize-db.py` in order to warranty the +order of execution alphabetically. +Inside every migration there is an `up` and `down` method. The `down` method is executed from the persisted migration in +the database. Whe a `down` logic that used external dependencies was tested it failed, whilst I put that same logic in +the an `up` method it run correctly. In general the library seems to present [design issues](https://github.com/Lieturd/migrate-anything/issues/3). +Therefore, it is recommended to apply changes just in one direction: `up`. +For more information, please check out [some examples](https://github.com/Lieturd/migrate-anything/tree/master/examples) +that illustrates the usage of this migration tool. + +Basically, for running the migrations you must execute + +```bash +migrate-anything migrations +``` + +They will be automatically run during the Continuous Deployment process. + + ## Built with - [Python version 3](https://www.python.org/download/releases/3.0/) as backend programming language. Strong typing for the win. diff --git a/commons/data_access_layer/cosmos_db.py b/commons/data_access_layer/cosmos_db.py index 13d9272d..b0b494e8 100644 --- a/commons/data_access_layer/cosmos_db.py +++ b/commons/data_access_layer/cosmos_db.py @@ -1,20 +1,28 @@ import dataclasses +import logging import uuid from typing import Callable import azure.cosmos.cosmos_client as cosmos_client import azure.cosmos.exceptions as exceptions -from azure.cosmos import ContainerProxy +from azure.cosmos import ContainerProxy, PartitionKey from flask import Flask class CosmosDBFacade: - def __init__(self, app: Flask): # pragma: no cover - self.app = app + def __init__(self, client, db_id: str, logger=None): # pragma: no cover + self.client = client + self.db = self.client.get_database_client(db_id) + if logger is None: + self.logger = logging.getLogger(CosmosDBFacade.__name__) + else: + self.logger = logger - db_uri = app.config.get('DATABASE_URI') + @classmethod + def from_flask_config(cls, app: Flask): + db_uri = app.config.get('COSMOS_DATABASE_URI') if db_uri is None: - app.logger.warn("DATABASE_URI was not found. Looking for alternative variables.") + app.logger.warn("COSMOS_DATABASE_URI was not found. Looking for alternative variables.") account_uri = app.config.get('DATABASE_ACCOUNT_URI') if account_uri is None: raise EnvironmentError("DATABASE_ACCOUNT_URI is not defined in the environment") @@ -23,31 +31,26 @@ def __init__(self, app: Flask): # pragma: no cover if master_key is None: raise EnvironmentError("DATABASE_MASTER_KEY is not defined in the environment") - self.client = cosmos_client.CosmosClient(account_uri, {'masterKey': master_key}, - user_agent="CosmosDBDotnetQuickstart", - user_agent_overwrite=True) + client = cosmos_client.CosmosClient(account_uri, {'masterKey': master_key}, + user_agent="CosmosDBDotnetQuickstart", + user_agent_overwrite=True) else: - self.client = cosmos_client.CosmosClient.from_connection_string(db_uri) + client = cosmos_client.CosmosClient.from_connection_string(db_uri) db_id = app.config.get('DATABASE_NAME') if db_id is None: raise EnvironmentError("DATABASE_NAME is not defined in the environment") - self.db = self.client.get_database_client(db_id) + return cls(client, db_id, logger=app.logger) def create_container(self, container_definition: dict): - try: - return self.db.create_container(**container_definition) + return self.db.create_container(**container_definition) - except exceptions.CosmosResourceExistsError: # pragma: no cover - self.app.logger.info('Container with id \'{0}\' was found'.format(container_definition["id"])) + def create_container_if_not_exists(self, container_definition: dict): + return self.db.create_container_if_not_exists(**container_definition) def delete_container(self, container_id: str): - try: - return self.db.delete_container(container_id) - - except exceptions.CosmosHttpResponseError: # pragma: no cover - self.app.logger.info('Container with id \'{0}\' was not deleted'.format(container_id)) + return self.db.delete_container(container_id) cosmos_helper: CosmosDBFacade = None @@ -61,8 +64,13 @@ def __init__(self, data): setattr(self, k, v) +def partition_key_attribute(pk: PartitionKey) -> str: + return pk.path.strip('/') + + class CosmosDBRepository: def __init__(self, container_id: str, + partition_key_attribute: str, mapper: Callable = None, custom_cosmos_helper: CosmosDBFacade = None): global cosmos_helper @@ -71,12 +79,16 @@ def __init__(self, container_id: str, raise ValueError("The cosmos_db module has not been initialized!") self.mapper = mapper self.container: ContainerProxy = self.cosmos_helper.db.get_container_client(container_id) + self.partition_key_attribute: str = partition_key_attribute @classmethod def from_definition(cls, container_definition: dict, mapper: Callable = None, custom_cosmos_helper: CosmosDBFacade = None): - return cls(container_definition['id'], mapper, custom_cosmos_helper) + pk_attrib = partition_key_attribute(container_definition['partition_key']) + return cls(container_definition['id'], pk_attrib, + mapper=mapper, + custom_cosmos_helper=custom_cosmos_helper) def create(self, data: dict, mapper: Callable = None): function_mapper = self.get_mapper_or_dict(mapper) @@ -93,11 +105,12 @@ def find_all(self, partition_key_value: str, max_count=None, offset=0, max_count = self.get_page_size_or(max_count) result = self.container.query_items( query=""" - SELECT * FROM c WHERE c.tenant_id=@tenant_id AND {visibility_condition} + SELECT * FROM c WHERE c.{partition_key_attribute}=@partition_key_value AND {visibility_condition} OFFSET @offset LIMIT @max_count - """.format(visibility_condition=self.create_sql_condition_for_visibility(visible_only)), + """.format(partition_key_attribute=self.partition_key_attribute, + visibility_condition=self.create_sql_condition_for_visibility(visible_only)), parameters=[ - {"name": "@tenant_id", "value": partition_key_value}, + {"name": "@partition_key_value", "value": partition_key_value}, {"name": "@offset", "value": offset}, {"name": "@max_count", "value": max_count}, ], @@ -122,6 +135,9 @@ def delete(self, id: str, partition_key_value: str, mapper: Callable = None): 'deleted': str(uuid.uuid4()) }, partition_key_value, visible_only=True, mapper=mapper) + def delete_permanently(self, id: str, partition_key_value: str) -> None: + self.container.delete_item(id, partition_key_value) + def check_visibility(self, item, throw_not_found_if_deleted): if throw_not_found_if_deleted and item.get('deleted') is not None: raise exceptions.CosmosResourceNotFoundError(message='Deleted item', @@ -146,4 +162,4 @@ def get_page_size_or(self, custom_page_size: int) -> int: def init_app(app: Flask) -> None: global cosmos_helper - cosmos_helper = CosmosDBFacade(app) + cosmos_helper = CosmosDBFacade.from_flask_config(app) diff --git a/migrations/01-initialize-db.py b/migrations/01-initialize-db.py new file mode 100644 index 00000000..3df78199 --- /dev/null +++ b/migrations/01-initialize-db.py @@ -0,0 +1,35 @@ +def up(): + from commons.data_access_layer.cosmos_db import cosmos_helper + import azure.cosmos.exceptions as exceptions + from . import app + + app.logger.info("Creating TimeTracker initial containers...") + + try: + app.logger.info('- Project') + from time_tracker_api.projects.projects_model import container_definition as project_definition + cosmos_helper.create_container(project_definition) + + app.logger.info('- Project type') + from time_tracker_api.project_types.project_types_model import container_definition as project_type_definition + cosmos_helper.create_container(project_type_definition) + + app.logger.info('- Activity') + from time_tracker_api.activities.activities_model import container_definition as activity_definition + cosmos_helper.create_container(activity_definition) + + app.logger.info('- Customer') + from time_tracker_api.customers.customers_model import container_definition as customer_definition + cosmos_helper.create_container(customer_definition) + + app.logger.info('- Time entry') + from time_tracker_api.time_entries.time_entries_model import container_definition as time_entry_definition + cosmos_helper.create_container(time_entry_definition) + except exceptions.CosmosResourceExistsError as e: + app.logger.warning("Unexpected error while creating initial database schema: %s" % e.message) + + app.logger.info("Done!") + + +def down(): + print("Not implemented!") diff --git a/migrations/__init__.py b/migrations/__init__.py new file mode 100644 index 00000000..65cf435b --- /dev/null +++ b/migrations/__init__.py @@ -0,0 +1,77 @@ +from azure.cosmos import PartitionKey +from migrate_anything import configure +from migrate_anything.storage import Storage + +from time_tracker_api import create_app + + +class CustomStorage(object): + def __init__(self, file): + self.file = file + + def save_migration(self, name, code): + with open(self.file, "a", encoding="utf-8") as file: + file.write("{},{}\n".format(name, code)) + + def list_migrations(self): + try: + with open(self.file, encoding="utf-8") as file: + return [ + line.split(",") + for line in file.readlines() + if line.strip() # Skip empty lines + ] + except FileNotFoundError: + return [] + + def remove_migration(self, name): + migrations = [ + migration for migration in self.list_migrations() if migration[0] != name + ] + + with open(self.file, "w", encoding="utf-8") as file: + for row in migrations: + file.write("{},{}\n".format(*row)) + + +app = create_app('time_tracker_api.config.CLIConfig') +from commons.data_access_layer.cosmos_db import cosmos_helper, init_app, CosmosDBRepository + +if cosmos_helper is None: + init_app(app) + from commons.data_access_layer.cosmos_db import cosmos_helper + + +class CosmosDBStorage(Storage): + def __init__(self, collection_id, app_id): + self.collection_id = collection_id + self.app_id = app_id + migrations_definition = { + 'id': collection_id, + 'partition_key': PartitionKey(path='/app_id'), + 'unique_key_policy': { + 'uniqueKeys': [ + {'paths': ['/name']}, + ] + } + } + cosmos_helper.create_container_if_not_exists(migrations_definition) + self.repository = CosmosDBRepository.from_definition(migrations_definition) + + def save_migration(self, name, code): + self.repository.create({"id": name, + "name": name, + "code": code, + "app_id": self.app_id}) + + def list_migrations(self): + migrations = self.repository.find_all(self.app_id) + return [ + [item['name'], item['code']] for item in migrations + ] + + def remove_migration(self, name): + self.repository.delete_permanently(name, self.app_id) + + +configure(storage=CosmosDBStorage("migrations", "time-tracker-api")) diff --git a/requirements/migrations.txt b/requirements/migrations.txt new file mode 100644 index 00000000..6b95ae8e --- /dev/null +++ b/requirements/migrations.txt @@ -0,0 +1,6 @@ +# requirements/migrations.txt + +# For running any kind of data migration + +# Migration tool +migrate-anything==0.1.6 \ No newline at end of file diff --git a/tests/commons/data_access_layer/cosmos_db_test.py b/tests/commons/data_access_layer/cosmos_db_test.py index d369cf20..b126df89 100644 --- a/tests/commons/data_access_layer/cosmos_db_test.py +++ b/tests/commons/data_access_layer/cosmos_db_test.py @@ -507,3 +507,32 @@ def test_partial_update_should_not_find_element_that_is_already_deleted( except Exception as e: assert type(e) is CosmosResourceNotFoundError assert e.status_code == 404 + + +def test_delete_permanently_with_invalid_id_should_fail( + cosmos_db_repository: CosmosDBRepository, + sample_item: dict): + try: + cosmos_db_repository.delete_permanently(fake.uuid4(), sample_item['tenant_id']) + fail('It should have not found the deleted item') + except Exception as e: + assert type(e) is CosmosResourceNotFoundError + assert e.status_code == 404 + + +def test_delete_permanently_with_valid_id_should_succeed( + cosmos_db_repository: CosmosDBRepository, + sample_item: dict): + found_item = cosmos_db_repository.find(sample_item['id'], sample_item['tenant_id']) + + assert found_item is not None + assert found_item['id'] == sample_item['id'] + + cosmos_db_repository.delete_permanently(sample_item['id'], sample_item['tenant_id']) + + try: + cosmos_db_repository.find(sample_item['id'], sample_item['tenant_id']) + fail('It should have not found the deleted item') + except Exception as e: + assert type(e) is CosmosResourceNotFoundError + assert e.status_code == 404 diff --git a/tests/conftest.py b/tests/conftest.py index 78ec7584..ed9034e1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -80,7 +80,7 @@ def cosmos_db_repository(app: Flask, cosmos_db_model) -> CosmosDBRepository: from commons.data_access_layer.cosmos_db import cosmos_helper app.logger.info("Creating Cosmos DB test models...") - cosmos_helper.create_container(cosmos_db_model) + cosmos_helper.create_container_if_not_exists(cosmos_db_model) app.logger.info("Cosmos DB test models created!") yield CosmosDBRepository.from_definition(cosmos_db_model) diff --git a/time_tracker_api/activities/activities_model.py b/time_tracker_api/activities/activities_model.py index 7305a0b9..ecb954fc 100644 --- a/time_tracker_api/activities/activities_model.py +++ b/time_tracker_api/activities/activities_model.py @@ -1,4 +1,4 @@ -import abc +from azure.cosmos import PartitionKey from time_tracker_api.database import CRUDDao @@ -32,3 +32,15 @@ def __init__(self): SQLCRUDDao.__init__(self, ActivitySQLModel) return ActivitySQLDao() + + +container_definition = { + 'id': 'activity', + 'partition_key': PartitionKey(path='/tenant_id'), + 'unique_key_policy': { + 'uniqueKeys': [ + {'paths': ['/name']}, + {'paths': ['/deleted']}, + ] + } +} diff --git a/time_tracker_api/config.py b/time_tracker_api/config.py index 7376b08c..c91f50b0 100644 --- a/time_tracker_api/config.py +++ b/time_tracker_api/config.py @@ -7,7 +7,7 @@ class Config: SECRET_KEY = generate_dev_secret_key() - DATABASE_URI = os.environ.get('DATABASE_URI') + SQL_DATABASE_URI = os.environ.get('SQL_DATABASE_URI') PROPAGATE_EXCEPTIONS = True RESTPLUS_VALIDATE = True DEBUG = True @@ -23,12 +23,12 @@ class DevelopmentConfig(Config): class SQLConfig(Config): SQLALCHEMY_COMMIT_ON_TEARDOWN = True SQLALCHEMY_TRACK_MODIFICATIONS = False - DATABASE_URI = os.environ.get('DATABASE_URI') - SQLALCHEMY_DATABASE_URI = DATABASE_URI + SQL_DATABASE_URI = os.environ.get('SQL_DATABASE_URI') + SQLALCHEMY_DATABASE_URI = SQL_DATABASE_URI class CosmosDB(Config): - DATABASE_URI = os.environ.get('DATABASE_URI') + COSMOS_DATABASE_URI = os.environ.get('COSMOS_DATABASE_URI') DATABASE_ACCOUNT_URI = os.environ.get('DATABASE_ACCOUNT_URI') DATABASE_MASTER_KEY = os.environ.get('DATABASE_MASTER_KEY') DATABASE_NAME = os.environ.get('DATABASE_NAME') @@ -38,8 +38,8 @@ class TestConfig(CosmosDB, SQLConfig): TESTING = True FLASK_DEBUG = True TEST_TABLE = 'tests' - DATABASE_URI = os.environ.get('DATABASE_URI') - SQLALCHEMY_DATABASE_URI = DATABASE_URI or 'sqlite:///:memory:' + SQL_DATABASE_URI = os.environ.get('SQL_DATABASE_URI') + SQLALCHEMY_DATABASE_URI = SQL_DATABASE_URI or 'sqlite:///:memory:' class ProductionConfig(Config): @@ -49,8 +49,9 @@ class ProductionConfig(Config): class AzureConfig(CosmosDB): - DATABASE_URI = os.environ.get('DATABASE_URI', os.environ.get('SQLAZURECONNSTR_DATABASE_URI')) - SQLALCHEMY_DATABASE_URI = DATABASE_URI + SQL_DATABASE_URI = os.environ.get('SQL_DATABASE_URI', os.environ.get('SQLCONNSTR_DATABASE_URI')) + COSMOS_DATABASE_URI = os.environ.get('COSMOS_DATABASE_URI', os.environ.get('CUSTOMCONNSTR_COSMOS_DATABASE_URI')) + SQLALCHEMY_DATABASE_URI = SQL_DATABASE_URI class AzureDevelopmentConfig(DevelopmentConfig, AzureConfig): diff --git a/time_tracker_api/customers/customers_model.py b/time_tracker_api/customers/customers_model.py index cc994bfe..2f4e568c 100644 --- a/time_tracker_api/customers/customers_model.py +++ b/time_tracker_api/customers/customers_model.py @@ -1,3 +1,5 @@ +from azure.cosmos import PartitionKey + from time_tracker_api.database import CRUDDao @@ -31,3 +33,15 @@ def __init__(self): SQLCRUDDao.__init__(self, CustomerSQLModel) return CustomerSQLDao() + + +container_definition = { + 'id': 'customer', + 'partition_key': PartitionKey(path='/tenant_id'), + 'unique_key_policy': { + 'uniqueKeys': [ + {'paths': ['/name']}, + {'paths': ['/deleted']}, + ] + } +} diff --git a/time_tracker_api/project_types/project_types_model.py b/time_tracker_api/project_types/project_types_model.py index 589ce5ef..136be198 100644 --- a/time_tracker_api/project_types/project_types_model.py +++ b/time_tracker_api/project_types/project_types_model.py @@ -1,3 +1,5 @@ +from azure.cosmos import PartitionKey + from time_tracker_api.database import CRUDDao @@ -33,3 +35,15 @@ def __init__(self): SQLCRUDDao.__init__(self, ProjectTypeSQLModel) return ProjectTypeSQLDao() + + +container_definition = { + 'id': 'project_type', + 'partition_key': PartitionKey(path='/customer_id'), + 'unique_key_policy': { + 'uniqueKeys': [ + {'paths': ['/name']}, + {'paths': ['/deleted']}, + ] + } +} diff --git a/time_tracker_api/projects/projects_model.py b/time_tracker_api/projects/projects_model.py index 2c580ca6..23c1c055 100644 --- a/time_tracker_api/projects/projects_model.py +++ b/time_tracker_api/projects/projects_model.py @@ -1,5 +1,7 @@ import enum +from azure.cosmos import PartitionKey + from time_tracker_api.database import CRUDDao @@ -44,3 +46,15 @@ def __init__(self): SQLCRUDDao.__init__(self, ProjectSQLModel) return ProjectSQLDao() + + +container_definition = { + 'id': 'project', + 'partition_key': PartitionKey(path='/tenant_id'), + 'unique_key_policy': { + 'uniqueKeys': [ + {'paths': ['/name']}, + {'paths': ['/deleted']}, + ] + } +} diff --git a/time_tracker_api/projects/projects_namespace.py b/time_tracker_api/projects/projects_namespace.py index 5e236a3f..9c4cdd3d 100644 --- a/time_tracker_api/projects/projects_namespace.py +++ b/time_tracker_api/projects/projects_namespace.py @@ -3,7 +3,7 @@ from flask_restplus._http import HTTPStatus from time_tracker_api.api import audit_fields -from time_tracker_api.projects.projects_model import PROJECT_TYPE, create_dao +from time_tracker_api.projects.projects_model import create_dao faker = Faker() diff --git a/time_tracker_api/time_entries/time_entries_model.py b/time_tracker_api/time_entries/time_entries_model.py index 7759ace8..e876e884 100644 --- a/time_tracker_api/time_entries/time_entries_model.py +++ b/time_tracker_api/time_entries/time_entries_model.py @@ -1,3 +1,4 @@ +from azure.cosmos import PartitionKey from sqlalchemy_utils import ScalarListType from time_tracker_api.database import CRUDDao @@ -47,3 +48,15 @@ def __init__(self): SQLCRUDDao.__init__(self, TimeEntrySQLModel) return TimeEntriesSQLDao() + + +container_definition = { + 'id': 'time_entry', + 'partition_key': PartitionKey(path='/tenant_id'), + 'unique_key_policy': { + 'uniqueKeys': [ + {'paths': ['/owner_id', '/end_date']}, + {'paths': ['/deleted']}, + ] + } +} From 8e287d87706c2496be1effe387fafb107a2ebc42 Mon Sep 17 00:00:00 2001 From: EliuX Date: Mon, 13 Apr 2020 21:11:35 -0500 Subject: [PATCH 012/387] Close #52 Implement project model in Cosmos DB --- README.md | 4 + cli.py | 17 ---- commons/data_access_layer/cosmos_db.py | 33 +++++++- .../data_access_layer}/database.py | 39 ++------- commons/data_access_layer/sql.py | 17 +--- migrations/__init__.py | 2 +- requirements/migrations.txt | 2 +- requirements/time_tracker_api/dev.txt | 2 +- .../projects/projects_namespace_test.py | 41 ++++++---- tests/time_tracker_api/smoke_test.py | 5 +- time_tracker_api/__init__.py | 2 +- .../activities/activities_model.py | 3 +- time_tracker_api/api.py | 34 +++----- time_tracker_api/customers/customers_model.py | 4 +- .../project_types/project_types_model.py | 5 +- time_tracker_api/projects/projects_model.py | 80 +++++++++---------- .../projects/projects_namespace.py | 14 ++-- time_tracker_api/security.py | 8 +- .../time_entries/time_entries_model.py | 5 +- .../time_entries/time_entries_namespace.py | 2 +- 20 files changed, 149 insertions(+), 170 deletions(-) rename {time_tracker_api => commons/data_access_layer}/database.py (50%) diff --git a/README.md b/README.md index 54011751..545c8473 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,11 @@ # time-tracker-api +[![Build status](https://dev.azure.com/IOET-DevOps/TimeTracker-API/_apis/build/status/TimeTracker-API%20-%20CI)](https://dev.azure.com/IOET-DevOps/TimeTracker-API/_build/latest?definitionId=1) + This is the mono-repository for the backend services and their common codebase + + ## Getting started Follow the following instructions to get the project ready to use ASAP. diff --git a/cli.py b/cli.py index 6cf2a08b..2cfa0600 100644 --- a/cli.py +++ b/cli.py @@ -46,23 +46,6 @@ def gen_postman_collection(filename='timetracker-api-postman-collection.json', save_data(parsed_json, filename) -@cli_manager.command -def seed(): - from time_tracker_api.database import seeder as seed - seed() - - -@cli_manager.command -def re_create_db(): - print('This is going to drop all tables and seed again the database') - confirm_answer = input('Do you confirm (Y) you want to remove all your data?\n') - if confirm_answer.upper() == 'Y': - from time_tracker_api.database import seeder - seeder.fresh() - else: - print('\nThis action was cancelled!') - - def save_data(data: str, filename: str) -> None: """ Save text content to a file """ if filename: diff --git a/commons/data_access_layer/cosmos_db.py b/commons/data_access_layer/cosmos_db.py index b0b494e8..1d13d6c4 100644 --- a/commons/data_access_layer/cosmos_db.py +++ b/commons/data_access_layer/cosmos_db.py @@ -8,6 +8,9 @@ from azure.cosmos import ContainerProxy, PartitionKey from flask import Flask +from commons.data_access_layer.database import CRUDDao +from time_tracker_api.security import current_user_tenant_id + class CosmosDBFacade: def __init__(self, client, db_id: str, logger=None): # pragma: no cover @@ -122,7 +125,7 @@ def find_all(self, partition_key_value: str, max_count=None, offset=0, def partial_update(self, id: str, changes: dict, partition_key_value: str, visible_only=True, mapper: Callable = None): - item_data = self.find(id, partition_key_value, visible_only=visible_only) + item_data = self.find(id, partition_key_value, visible_only=visible_only, mapper=dict) item_data.update(changes) return self.update(id, item_data, mapper=mapper) @@ -160,6 +163,34 @@ def get_page_size_or(self, custom_page_size: int) -> int: return custom_page_size or 100 +class CosmosDBDao(CRUDDao): + def __init__(self, repository: CosmosDBRepository): + self.repository = repository + + def get_all(self) -> list: + tenant_id: str = current_user_tenant_id() + return self.repository.find_all(partition_key_value=tenant_id) + + def get(self, id): + tenant_id: str = current_user_tenant_id() + return self.repository.find(id, partition_key_value=tenant_id) + + def create(self, data: dict): + data['id'] = str(uuid.uuid4()) + data['tenant_id'] = current_user_tenant_id() + return self.repository.create(data) + + def update(self, id, data: dict): + tenant_id: str = current_user_tenant_id() + return self.repository.partial_update(id, + changes=data, + partition_key_value=tenant_id) + + def delete(self, id): + tenant_id: str = current_user_tenant_id() + self.repository.delete(id, partition_key_value=tenant_id) + + def init_app(app: Flask) -> None: global cosmos_helper cosmos_helper = CosmosDBFacade.from_flask_config(app) diff --git a/time_tracker_api/database.py b/commons/data_access_layer/database.py similarity index 50% rename from time_tracker_api/database.py rename to commons/data_access_layer/database.py index c9a21b54..077780cf 100644 --- a/time_tracker_api/database.py +++ b/commons/data_access_layer/database.py @@ -35,45 +35,16 @@ def delete(self, id): raise NotImplementedError # pragma: no cover -class Seeder(abc.ABC): - @abc.abstractmethod - def run(self): - raise NotImplementedError # pragma: no cover - - @abc.abstractmethod - def fresh(self): - raise NotImplementedError # pragma: no cover - - def __call__(self, *args, **kwargs): - self.run() # pragma: no cover - - -seeder: Seeder = None - - def init_app(app: Flask) -> None: - init_sql(app) + init_sql(app) # TODO Delete after the migration to Cosmos DB has finished. + init_cosmos_db(app) def init_sql(app: Flask) -> None: - from commons.data_access_layer.sql import init_app, SQLSeeder + from commons.data_access_layer.sql import init_app init_app(app) - global seeder - seeder = SQLSeeder() def init_cosmos_db(app: Flask) -> None: - # from commons.data_access_layer.azure.cosmos_db import cosmos_helper - class CosmosSeeder(Seeder): - def run(self): - print("Provisioning namespace(database)...") - # cosmos_helper.create_container() - print("Database seeded!") - - def fresh(self): - print("Removing namespace(database)...") - # cosmos_helper.remove_container() - self.run() - - global seeder - seeder = CosmosSeeder() + from commons.data_access_layer.cosmos_db import init_app + init_app(app) diff --git a/commons/data_access_layer/sql.py b/commons/data_access_layer/sql.py index e9a02491..c1afb0e2 100644 --- a/commons/data_access_layer/sql.py +++ b/commons/data_access_layer/sql.py @@ -3,7 +3,7 @@ from flask import Flask from flask_sqlalchemy import SQLAlchemy -from time_tracker_api.database import CRUDDao, Seeder, ID_MAX_LENGTH +from commons.data_access_layer.database import CRUDDao, ID_MAX_LENGTH from time_tracker_api.security import current_user_id db: SQLAlchemy = None @@ -14,7 +14,7 @@ def handle_commit_issues(f): def rollback_if_necessary(*args, **kw): try: return f(*args, **kw) - except: # pragma: no cover + except: # pragma: no cover db.session.rollback() raise @@ -90,16 +90,3 @@ def update(self, id, data: dict): def delete(self, id): self.repository.remove(id) - - -class SQLSeeder(Seeder): # pragma: no cover - def run(self): - print("Provisioning database...") - db.create_all() - print("Database seeded!") - - def fresh(self): - print("Removing all existing data...") - db.drop_all() - - self.run() diff --git a/migrations/__init__.py b/migrations/__init__.py index 65cf435b..ece224aa 100644 --- a/migrations/__init__.py +++ b/migrations/__init__.py @@ -74,4 +74,4 @@ def remove_migration(self, name): self.repository.delete_permanently(name, self.app_id) -configure(storage=CosmosDBStorage("migrations", "time-tracker-api")) +configure(storage=CosmosDBStorage("migration", "time-tracker-api")) diff --git a/requirements/migrations.txt b/requirements/migrations.txt index 6b95ae8e..5cce99ba 100644 --- a/requirements/migrations.txt +++ b/requirements/migrations.txt @@ -3,4 +3,4 @@ # For running any kind of data migration # Migration tool -migrate-anything==0.1.6 \ No newline at end of file +migrate-anything==0.1.6 diff --git a/requirements/time_tracker_api/dev.txt b/requirements/time_tracker_api/dev.txt index a9cc28ca..c242e4a1 100644 --- a/requirements/time_tracker_api/dev.txt +++ b/requirements/time_tracker_api/dev.txt @@ -12,4 +12,4 @@ pytest==5.2.0 pytest-mock==2.0.0 # Coverage -coverage==4.5.1 \ No newline at end of file +coverage==4.5.1 diff --git a/tests/time_tracker_api/projects/projects_namespace_test.py b/tests/time_tracker_api/projects/projects_namespace_test.py index 5d5459a3..91786f4b 100644 --- a/tests/time_tracker_api/projects/projects_namespace_test.py +++ b/tests/time_tracker_api/projects/projects_namespace_test.py @@ -4,7 +4,7 @@ from flask_restplus._http import HTTPStatus from pytest_mock import MockFixture -from time_tracker_api.projects.projects_model import PROJECT_TYPE +from time_tracker_api.security import current_user_tenant_id fake = Faker() @@ -12,7 +12,6 @@ "name": fake.company(), "description": fake.paragraph(), 'customer_id': fake.uuid4(), - 'tenant_id': fake.uuid4(), 'project_type_id': fake.uuid4() } @@ -30,7 +29,7 @@ def test_create_project_should_succeed_with_valid_request(client: FlaskClient, m response = client.post("/projects", json=valid_project_data, follow_redirects=True) assert HTTPStatus.CREATED == response.status_code - repository_create_mock.assert_called_once_with(valid_project_data) + repository_create_mock.assert_called_once() def test_create_project_should_reject_bad_request(client: FlaskClient, mocker: MockFixture): @@ -73,7 +72,8 @@ def test_get_project_should_succeed_with_valid_id(client: FlaskClient, mocker: M assert HTTPStatus.OK == response.status_code fake_project == json.loads(response.data) - repository_find_mock.assert_called_once_with(str(valid_id)) + repository_find_mock.assert_called_once_with(str(valid_id), + partition_key_value=current_user_tenant_id()) def test_get_project_should_return_not_found_with_invalid_id(client: FlaskClient, mocker: MockFixture): @@ -89,7 +89,8 @@ def test_get_project_should_return_not_found_with_invalid_id(client: FlaskClient response = client.get("/projects/%s" % invalid_id, follow_redirects=True) assert HTTPStatus.NOT_FOUND == response.status_code - repository_find_mock.assert_called_once_with(str(invalid_id)) + repository_find_mock.assert_called_once_with(str(invalid_id), + partition_key_value=current_user_tenant_id()) def test_get_project_should_response_with_unprocessable_entity_for_invalid_id_format(client: FlaskClient, @@ -106,14 +107,15 @@ def test_get_project_should_response_with_unprocessable_entity_for_invalid_id_fo response = client.get("/projects/%s" % invalid_id, follow_redirects=True) assert HTTPStatus.UNPROCESSABLE_ENTITY == response.status_code - repository_find_mock.assert_called_once_with(str(invalid_id)) + repository_find_mock.assert_called_once_with(str(invalid_id), + partition_key_value=current_user_tenant_id()) def test_update_project_should_succeed_with_valid_data(client: FlaskClient, mocker: MockFixture): from time_tracker_api.projects.projects_namespace import project_dao repository_update_mock = mocker.patch.object(project_dao.repository, - 'update', + 'partial_update', return_value=fake_project) valid_id = fake.random_int(1, 9999) @@ -121,7 +123,9 @@ def test_update_project_should_succeed_with_valid_data(client: FlaskClient, mock assert HTTPStatus.OK == response.status_code fake_project == json.loads(response.data) - repository_update_mock.assert_called_once_with(str(valid_id), valid_project_data) + repository_update_mock.assert_called_once_with(str(valid_id), + changes=valid_project_data, + partition_key_value=current_user_tenant_id()) def test_update_project_should_reject_bad_request(client: FlaskClient, mocker: MockFixture): @@ -148,7 +152,7 @@ def test_update_project_should_return_not_found_with_invalid_id(client: FlaskCli invalid_id = fake.random_int(1, 9999) repository_update_mock = mocker.patch.object(project_dao.repository, - 'update', + 'partial_update', side_effect=NotFound) response = client.put("/projects/%s" % invalid_id, @@ -156,7 +160,9 @@ def test_update_project_should_return_not_found_with_invalid_id(client: FlaskCli follow_redirects=True) assert HTTPStatus.NOT_FOUND == response.status_code - repository_update_mock.assert_called_once_with(str(invalid_id), valid_project_data) + repository_update_mock.assert_called_once_with(str(invalid_id), + changes=valid_project_data, + partition_key_value=current_user_tenant_id()) def test_delete_project_should_succeed_with_valid_id(client: FlaskClient, mocker: MockFixture): @@ -165,14 +171,15 @@ def test_delete_project_should_succeed_with_valid_id(client: FlaskClient, mocker valid_id = fake.random_int(1, 9999) repository_remove_mock = mocker.patch.object(project_dao.repository, - 'remove', + 'delete', return_value=None) response = client.delete("/projects/%s" % valid_id, follow_redirects=True) assert HTTPStatus.NO_CONTENT == response.status_code assert b'' == response.data - repository_remove_mock.assert_called_once_with(str(valid_id)) + repository_remove_mock.assert_called_once_with(str(valid_id), + partition_key_value=current_user_tenant_id()) def test_delete_project_should_return_not_found_with_invalid_id(client: FlaskClient, mocker: MockFixture): @@ -182,13 +189,14 @@ def test_delete_project_should_return_not_found_with_invalid_id(client: FlaskCli invalid_id = fake.random_int(1, 9999) repository_remove_mock = mocker.patch.object(project_dao.repository, - 'remove', + 'delete', side_effect=NotFound) response = client.delete("/projects/%s" % invalid_id, follow_redirects=True) assert HTTPStatus.NOT_FOUND == response.status_code - repository_remove_mock.assert_called_once_with(str(invalid_id)) + repository_remove_mock.assert_called_once_with(str(invalid_id), + partition_key_value=current_user_tenant_id()) def test_delete_project_should_return_unprocessable_entity_for_invalid_id_format(client: FlaskClient, @@ -199,10 +207,11 @@ def test_delete_project_should_return_unprocessable_entity_for_invalid_id_format invalid_id = fake.company() repository_remove_mock = mocker.patch.object(project_dao.repository, - 'remove', + 'delete', side_effect=UnprocessableEntity) response = client.delete("/projects/%s" % invalid_id, follow_redirects=True) assert HTTPStatus.UNPROCESSABLE_ENTITY == response.status_code - repository_remove_mock.assert_called_once_with(str(invalid_id)) + repository_remove_mock.assert_called_once_with(str(invalid_id), + partition_key_value=current_user_tenant_id()) diff --git a/tests/time_tracker_api/smoke_test.py b/tests/time_tracker_api/smoke_test.py index e2a6e35d..aa1a75d1 100644 --- a/tests/time_tracker_api/smoke_test.py +++ b/tests/time_tracker_api/smoke_test.py @@ -1,11 +1,10 @@ -import pyodbc - import pytest +from azure.cosmos.exceptions import CosmosHttpResponseError, CosmosResourceExistsError, CosmosResourceNotFoundError from flask.testing import FlaskClient from flask_restplus._http import HTTPStatus from pytest_mock import MockFixture -unexpected_errors_to_be_handled = [pyodbc.OperationalError] +unexpected_errors_to_be_handled = [CosmosHttpResponseError, CosmosResourceNotFoundError, CosmosResourceExistsError] def test_app_exists(app): diff --git a/time_tracker_api/__init__.py b/time_tracker_api/__init__.py index b76493d4..b584fff8 100644 --- a/time_tracker_api/__init__.py +++ b/time_tracker_api/__init__.py @@ -37,7 +37,7 @@ def init_app_config(app: Flask, config_path: str, config_data: dict = None): def init_app(app: Flask): - from time_tracker_api.database import init_app as init_database + from commons.data_access_layer.database import init_app as init_database init_database(app) from time_tracker_api.api import api diff --git a/time_tracker_api/activities/activities_model.py b/time_tracker_api/activities/activities_model.py index ecb954fc..2d5d0f14 100644 --- a/time_tracker_api/activities/activities_model.py +++ b/time_tracker_api/activities/activities_model.py @@ -1,6 +1,6 @@ from azure.cosmos import PartitionKey -from time_tracker_api.database import CRUDDao +from commons.data_access_layer.database import CRUDDao class ActivityDao(CRUDDao): @@ -40,7 +40,6 @@ def __init__(self): 'unique_key_policy': { 'uniqueKeys': [ {'paths': ['/name']}, - {'paths': ['/deleted']}, ] } } diff --git a/time_tracker_api/api.py b/time_tracker_api/api.py index 90dec35f..a20fea46 100644 --- a/time_tracker_api/api.py +++ b/time_tracker_api/api.py @@ -1,10 +1,9 @@ -import pyodbc - -import sqlalchemy +from azure.cosmos.exceptions import CosmosResourceExistsError, CosmosResourceNotFoundError, CosmosHttpResponseError from faker import Faker from flask import current_app as app from flask_restplus import Api, fields from flask_restplus._http import HTTPStatus + from time_tracker_api import __version__ faker = Faker() @@ -22,7 +21,6 @@ required=True, title='Last event Identifier', description='Last event over this resource', - example=faker.uuid4(), ), } @@ -52,28 +50,22 @@ """ -@api.errorhandler(sqlalchemy.exc.IntegrityError) -def handle_db_integrity_error(e): - """Handles errors related to data consistency""" - if e.code == 'gkpj': - return {'message': 'It already exists or references data that does not exist.'}, HTTPStatus.CONFLICT - else: - return {'message': 'Data integrity issues.'}, HTTPStatus.CONFLICT +@api.errorhandler(CosmosResourceExistsError) +def handle_cosmos_resource_exists_error(error): + return {'message': 'This item already exists'}, HTTPStatus.CONFLICT -@api.errorhandler(sqlalchemy.exc.DataError) -def handle_invalid_data_error(e): - """Return a 422 because the user entered data of an invalid type""" - return {'message': 'The processed data was invalid. Please correct it.'}, HTTPStatus.UNPROCESSABLE_ENTITY +@api.errorhandler(CosmosResourceNotFoundError) +def handle_cosmos_resource_not_found_error(error): + return {'message': 'This item was not found'}, HTTPStatus.NOT_FOUND -@api.errorhandler(pyodbc.OperationalError) -def handle_connection_error(e): - """Return a 500 due to a issue in the connection to a 3rd party service""" - return {'message': 'Connection issues. Please try again in a few minutes.'}, HTTPStatus.SERVICE_UNAVAILABLE +@api.errorhandler(CosmosHttpResponseError) +def handle_cosmos_http_response_error(error): + return {'message': 'Invalid request. Please verify your data.'}, HTTPStatus.BAD_REQUEST @api.errorhandler -def generic_exception_handler(e): - app.logger.error(e) +def default_error_handler(error): + app.logger.error(error) return {'message': 'An unhandled exception occurred.'}, HTTPStatus.INTERNAL_SERVER_ERROR diff --git a/time_tracker_api/customers/customers_model.py b/time_tracker_api/customers/customers_model.py index 2f4e568c..a91d965c 100644 --- a/time_tracker_api/customers/customers_model.py +++ b/time_tracker_api/customers/customers_model.py @@ -1,6 +1,6 @@ from azure.cosmos import PartitionKey -from time_tracker_api.database import CRUDDao +from commons.data_access_layer.database import CRUDDao class CustomerDao(CRUDDao): @@ -9,7 +9,7 @@ class CustomerDao(CRUDDao): def create_dao() -> CustomerDao: from commons.data_access_layer.sql import db - from time_tracker_api.database import COMMENTS_MAX_LENGTH + from commons.data_access_layer.database import COMMENTS_MAX_LENGTH from commons.data_access_layer.sql import SQLCRUDDao from sqlalchemy_utils import UUIDType import uuid diff --git a/time_tracker_api/project_types/project_types_model.py b/time_tracker_api/project_types/project_types_model.py index 136be198..936456ef 100644 --- a/time_tracker_api/project_types/project_types_model.py +++ b/time_tracker_api/project_types/project_types_model.py @@ -1,6 +1,6 @@ from azure.cosmos import PartitionKey -from time_tracker_api.database import CRUDDao +from commons.data_access_layer.database import CRUDDao class ProjectTypeDao(CRUDDao): @@ -9,7 +9,7 @@ class ProjectTypeDao(CRUDDao): def create_dao() -> ProjectTypeDao: from commons.data_access_layer.sql import db - from time_tracker_api.database import COMMENTS_MAX_LENGTH + from commons.data_access_layer.database import COMMENTS_MAX_LENGTH from commons.data_access_layer.sql import SQLCRUDDao from sqlalchemy_utils import UUIDType import uuid @@ -43,7 +43,6 @@ def __init__(self): 'unique_key_policy': { 'uniqueKeys': [ {'paths': ['/name']}, - {'paths': ['/deleted']}, ] } } diff --git a/time_tracker_api/projects/projects_model.py b/time_tracker_api/projects/projects_model.py index 23c1c055..38d36874 100644 --- a/time_tracker_api/projects/projects_model.py +++ b/time_tracker_api/projects/projects_model.py @@ -1,52 +1,22 @@ -import enum +from dataclasses import dataclass from azure.cosmos import PartitionKey -from time_tracker_api.database import CRUDDao +from commons.data_access_layer.cosmos_db import CosmosDBModel, CosmosDBDao, CosmosDBRepository +from commons.data_access_layer.database import CRUDDao - -class PROJECT_TYPE(enum.Enum): - CUSTOMER = 'CUSTOMER' - TRAINING = 'TRAINING' - - @classmethod - def valid_type_values(self): - return list(map(lambda x: x.value, PROJECT_TYPE._member_map_.values())) +""" +Protocols +""" class ProjectDao(CRUDDao): pass -def create_dao() -> ProjectDao: - from commons.data_access_layer.sql import db - from time_tracker_api.database import COMMENTS_MAX_LENGTH - from sqlalchemy_utils import UUIDType - import uuid - from commons.data_access_layer.sql import SQLCRUDDao - - class ProjectSQLModel(db.Model): - __tablename__ = 'project' - id = db.Column(UUIDType(binary=False), primary_key=True, default=uuid.uuid4) - name = db.Column(db.String(50), unique=True, nullable=False) - description = db.Column(db.String(COMMENTS_MAX_LENGTH), unique=False, nullable=False) - project_type_id = db.Column(UUIDType(binary=False), default=uuid.uuid4) - customer_id = db.Column(UUIDType(binary=False), default=uuid.uuid4) - deleted = db.Column(UUIDType(binary=False), default=uuid.uuid4) - tenant_id = db.Column(UUIDType(binary=False), default=uuid.uuid4) - - def __repr__(self): - return '' % self.name - - def __str___(self): - return "the project \"%s\"" % self.name - - class ProjectSQLDao(SQLCRUDDao): - def __init__(self): - SQLCRUDDao.__init__(self, ProjectSQLModel) - - return ProjectSQLDao() - +""" +Cosmos DB +""" container_definition = { 'id': 'project', @@ -54,7 +24,37 @@ def __init__(self): 'unique_key_policy': { 'uniqueKeys': [ {'paths': ['/name']}, - {'paths': ['/deleted']}, ] } } + + +@dataclass() +class ProjectCosmosDBModel(CosmosDBModel): + id: str + name: str + description: str + project_type_id: int + customer_id: str + deleted: str + tenant_id: str + + def __init__(self, data): + super(ProjectCosmosDBModel, self).__init__(data) + + def __repr__(self): + return '' % self.name + + def __str___(self): + return "the project \"%s\"" % self.name + + +def create_dao() -> ProjectDao: + repository = CosmosDBRepository.from_definition(container_definition, + mapper=ProjectCosmosDBModel) + + class ProjectCosmosDBDao(CosmosDBDao, ProjectDao): + def __init__(self): + CosmosDBDao.__init__(self, repository) + + return ProjectCosmosDBDao() diff --git a/time_tracker_api/projects/projects_namespace.py b/time_tracker_api/projects/projects_namespace.py index 9c4cdd3d..a90b8697 100644 --- a/time_tracker_api/projects/projects_namespace.py +++ b/time_tracker_api/projects/projects_namespace.py @@ -30,12 +30,6 @@ description='Customer this project belongs to', example=faker.uuid4(), ), - 'tenant_id': fields.String( - required=True, - title='Identifier of Tenant', - description='Tenant this project belongs to', - example=faker.uuid4(), - ), 'project_type_id': fields.String( title='Identifier of Project type', description='Type of the project. Used for grouping', @@ -50,7 +44,13 @@ title='Identifier', description='The unique identifier', example=faker.uuid4(), - ) + ), + 'tenant_id': fields.String( + required=False, + title='Identifier of Tenant', + description='Tenant this project belongs to', + example=faker.uuid4(), + ), } project_response_fields.update(audit_fields) diff --git a/time_tracker_api/security.py b/time_tracker_api/security.py index b8ae1124..0309a13e 100644 --- a/time_tracker_api/security.py +++ b/time_tracker_api/security.py @@ -9,7 +9,7 @@ dev_secret_key: str = None -def current_user_id(): +def current_user_id() -> str: """ Returns the id of the authenticated user in Azure Active Directory @@ -17,6 +17,11 @@ def current_user_id(): return 'anonymous' +def current_user_tenant_id() -> str: + # TODO Get this from the JWT + return "ioet" + + def generate_dev_secret_key(): from time_tracker_api import flask_app as app """ @@ -30,3 +35,4 @@ def generate_dev_secret_key(): print("The generated secret is \"%s\"" % dev_secret_key) print('*********************************************************') return dev_secret_key + diff --git a/time_tracker_api/time_entries/time_entries_model.py b/time_tracker_api/time_entries/time_entries_model.py index e876e884..9cdb9024 100644 --- a/time_tracker_api/time_entries/time_entries_model.py +++ b/time_tracker_api/time_entries/time_entries_model.py @@ -1,7 +1,7 @@ from azure.cosmos import PartitionKey from sqlalchemy_utils import ScalarListType -from time_tracker_api.database import CRUDDao +from commons.data_access_layer.database import CRUDDao class TimeEntriesDao(CRUDDao): @@ -10,7 +10,7 @@ class TimeEntriesDao(CRUDDao): def create_dao() -> TimeEntriesDao: from commons.data_access_layer.sql import db - from time_tracker_api.database import COMMENTS_MAX_LENGTH + from commons.data_access_layer.database import COMMENTS_MAX_LENGTH from sqlalchemy_utils import UUIDType import uuid from commons.data_access_layer.sql import SQLCRUDDao @@ -56,7 +56,6 @@ def __init__(self): 'unique_key_policy': { 'uniqueKeys': [ {'paths': ['/owner_id', '/end_date']}, - {'paths': ['/deleted']}, ] } } diff --git a/time_tracker_api/time_entries/time_entries_namespace.py b/time_tracker_api/time_entries/time_entries_namespace.py index a5d71ca5..7fd6861b 100644 --- a/time_tracker_api/time_entries/time_entries_namespace.py +++ b/time_tracker_api/time_entries/time_entries_namespace.py @@ -5,7 +5,7 @@ from flask_restplus._http import HTTPStatus from time_tracker_api.api import audit_fields -from time_tracker_api.database import COMMENTS_MAX_LENGTH +from commons.data_access_layer.database import COMMENTS_MAX_LENGTH from time_tracker_api.time_entries.time_entries_model import create_dao faker = Faker() From 819ca72f70cf53310c3d37429c770ab3ca7ae101 Mon Sep 17 00:00:00 2001 From: EliuX Date: Tue, 14 Apr 2020 13:45:26 -0500 Subject: [PATCH 013/387] Close #55 Create activity model for Cosmos DB --- commons/data_access_layer/cosmos_db.py | 13 ++-- .../activities/activities_namespace_test.py | 46 +++++++++------ .../projects/projects_namespace_test.py | 2 +- .../activities/activities_model.py | 59 ++++++++++--------- .../activities/activities_namespace.py | 12 ++-- time_tracker_api/projects/projects_model.py | 8 --- 6 files changed, 76 insertions(+), 64 deletions(-) diff --git a/commons/data_access_layer/cosmos_db.py b/commons/data_access_layer/cosmos_db.py index 1d13d6c4..73ae4731 100644 --- a/commons/data_access_layer/cosmos_db.py +++ b/commons/data_access_layer/cosmos_db.py @@ -168,28 +168,31 @@ def __init__(self, repository: CosmosDBRepository): self.repository = repository def get_all(self) -> list: - tenant_id: str = current_user_tenant_id() + tenant_id: str = self.partition_key_value return self.repository.find_all(partition_key_value=tenant_id) def get(self, id): - tenant_id: str = current_user_tenant_id() + tenant_id: str = self.partition_key_value return self.repository.find(id, partition_key_value=tenant_id) def create(self, data: dict): data['id'] = str(uuid.uuid4()) - data['tenant_id'] = current_user_tenant_id() + data['tenant_id'] = self.partition_key_value return self.repository.create(data) def update(self, id, data: dict): - tenant_id: str = current_user_tenant_id() return self.repository.partial_update(id, changes=data, - partition_key_value=tenant_id) + partition_key_value=self.partition_key_value) def delete(self, id): tenant_id: str = current_user_tenant_id() self.repository.delete(id, partition_key_value=tenant_id) + @property + def partition_key_value(self): + return current_user_tenant_id() + def init_app(app: Flask) -> None: global cosmos_helper diff --git a/tests/time_tracker_api/activities/activities_namespace_test.py b/tests/time_tracker_api/activities/activities_namespace_test.py index 735a884b..1b69fd74 100644 --- a/tests/time_tracker_api/activities/activities_namespace_test.py +++ b/tests/time_tracker_api/activities/activities_namespace_test.py @@ -1,8 +1,10 @@ from faker import Faker from flask import json from flask.testing import FlaskClient -from pytest_mock import MockFixture from flask_restplus._http import HTTPStatus +from pytest_mock import MockFixture + +from time_tracker_api.security import current_user_tenant_id fake = Faker() @@ -26,7 +28,7 @@ def test_create_activity_should_succeed_with_valid_request(client: FlaskClient, response = client.post("/activities", json=valid_activity_data, follow_redirects=True) assert HTTPStatus.CREATED == response.status_code - repository_create_mock.assert_called_once_with(valid_activity_data) + repository_create_mock.assert_called_once() def test_create_activity_should_reject_bad_request(client: FlaskClient, mocker: MockFixture): @@ -54,7 +56,7 @@ def test_list_all_activities(client: FlaskClient, mocker: MockFixture): assert [] == json_data repository_find_all_mock.assert_called_once() -#HEY + def test_get_activity_should_succeed_with_valid_id(client: FlaskClient, mocker: MockFixture): from time_tracker_api.activities.activities_namespace import activity_dao @@ -68,7 +70,8 @@ def test_get_activity_should_succeed_with_valid_id(client: FlaskClient, mocker: assert HTTPStatus.OK == response.status_code fake_activity == json.loads(response.data) - repository_find_mock.assert_called_once_with(str(valid_id)) + repository_find_mock.assert_called_once_with(str(valid_id), + partition_key_value=current_user_tenant_id()) def test_get_activity_should_return_not_found_with_invalid_id(client: FlaskClient, mocker: MockFixture): @@ -84,7 +87,8 @@ def test_get_activity_should_return_not_found_with_invalid_id(client: FlaskClien response = client.get("/activities/%s" % invalid_id, follow_redirects=True) assert HTTPStatus.NOT_FOUND == response.status_code - repository_find_mock.assert_called_once_with(str(invalid_id)) + repository_find_mock.assert_called_once_with(str(invalid_id), + partition_key_value=current_user_tenant_id()) def test_get_activity_should_return_422_for_invalid_id_format(client: FlaskClient, mocker: MockFixture): @@ -100,14 +104,15 @@ def test_get_activity_should_return_422_for_invalid_id_format(client: FlaskClien response = client.get("/activities/%s" % invalid_id, follow_redirects=True) assert HTTPStatus.UNPROCESSABLE_ENTITY == response.status_code - repository_find_mock.assert_called_once_with(str(invalid_id)) + repository_find_mock.assert_called_once_with(str(invalid_id), + partition_key_value=current_user_tenant_id()) def test_update_activity_should_succeed_with_valid_data(client: FlaskClient, mocker: MockFixture): from time_tracker_api.activities.activities_namespace import activity_dao repository_update_mock = mocker.patch.object(activity_dao.repository, - 'update', + 'partial_update', return_value=fake_activity) valid_id = fake.random_int(1, 9999) @@ -115,13 +120,15 @@ def test_update_activity_should_succeed_with_valid_data(client: FlaskClient, moc assert HTTPStatus.OK == response.status_code fake_activity == json.loads(response.data) - repository_update_mock.assert_called_once_with(str(valid_id), valid_activity_data) + repository_update_mock.assert_called_once_with(str(valid_id), + changes=valid_activity_data, + partition_key_value=current_user_tenant_id()) def test_update_activity_should_reject_bad_request(client: FlaskClient, mocker: MockFixture): from time_tracker_api.activities.activities_namespace import activity_dao repository_update_mock = mocker.patch.object(activity_dao.repository, - 'update', + 'partial_update', return_value=fake_activity) valid_id = fake.random_int(1, 9999) @@ -138,7 +145,7 @@ def test_update_activity_should_return_not_found_with_invalid_id(client: FlaskCl invalid_id = fake.random_int(1, 9999) repository_update_mock = mocker.patch.object(activity_dao.repository, - 'update', + 'partial_update', side_effect=NotFound) response = client.put("/activities/%s" % invalid_id, @@ -146,7 +153,9 @@ def test_update_activity_should_return_not_found_with_invalid_id(client: FlaskCl follow_redirects=True) assert HTTPStatus.NOT_FOUND == response.status_code - repository_update_mock.assert_called_once_with(str(invalid_id), valid_activity_data) + repository_update_mock.assert_called_once_with(str(invalid_id), + changes=valid_activity_data, + partition_key_value=current_user_tenant_id()) def test_delete_activity_should_succeed_with_valid_id(client: FlaskClient, mocker: MockFixture): @@ -155,14 +164,15 @@ def test_delete_activity_should_succeed_with_valid_id(client: FlaskClient, mocke valid_id = fake.random_int(1, 9999) repository_remove_mock = mocker.patch.object(activity_dao.repository, - 'remove', + 'delete', return_value=None) response = client.delete("/activities/%s" % valid_id, follow_redirects=True) assert HTTPStatus.NO_CONTENT == response.status_code assert b'' == response.data - repository_remove_mock.assert_called_once_with(str(valid_id)) + repository_remove_mock.assert_called_once_with(str(valid_id), + partition_key_value=current_user_tenant_id()) def test_delete_activity_should_return_not_found_with_invalid_id(client: FlaskClient, mocker: MockFixture): @@ -172,13 +182,14 @@ def test_delete_activity_should_return_not_found_with_invalid_id(client: FlaskCl invalid_id = fake.random_int(1, 9999) repository_remove_mock = mocker.patch.object(activity_dao.repository, - 'remove', + 'delete', side_effect=NotFound) response = client.delete("/activities/%s" % invalid_id, follow_redirects=True) assert HTTPStatus.NOT_FOUND == response.status_code - repository_remove_mock.assert_called_once_with(str(invalid_id)) + repository_remove_mock.assert_called_once_with(str(invalid_id), + partition_key_value=current_user_tenant_id()) def test_delete_activity_should_return_422_for_invalid_id_format(client: FlaskClient, mocker: MockFixture): @@ -188,10 +199,11 @@ def test_delete_activity_should_return_422_for_invalid_id_format(client: FlaskCl invalid_id = fake.company() repository_remove_mock = mocker.patch.object(activity_dao.repository, - 'remove', + 'delete', side_effect=UnprocessableEntity) response = client.delete("/activities/%s" % invalid_id, follow_redirects=True) assert HTTPStatus.UNPROCESSABLE_ENTITY == response.status_code - repository_remove_mock.assert_called_once_with(str(invalid_id)) + repository_remove_mock.assert_called_once_with(str(invalid_id), + partition_key_value=current_user_tenant_id()) diff --git a/tests/time_tracker_api/projects/projects_namespace_test.py b/tests/time_tracker_api/projects/projects_namespace_test.py index 91786f4b..e8707dc3 100644 --- a/tests/time_tracker_api/projects/projects_namespace_test.py +++ b/tests/time_tracker_api/projects/projects_namespace_test.py @@ -135,7 +135,7 @@ def test_update_project_should_reject_bad_request(client: FlaskClient, mocker: M "project_type_id": fake.pyint(min_value=1, max_value=100), }) repository_update_mock = mocker.patch.object(project_dao.repository, - 'update', + 'partial_update', return_value=fake_project) valid_id = fake.random_int(1, 9999) diff --git a/time_tracker_api/activities/activities_model.py b/time_tracker_api/activities/activities_model.py index 2d5d0f14..36e8742c 100644 --- a/time_tracker_api/activities/activities_model.py +++ b/time_tracker_api/activities/activities_model.py @@ -1,5 +1,8 @@ +from dataclasses import dataclass + from azure.cosmos import PartitionKey +from commons.data_access_layer.cosmos_db import CosmosDBModel, CosmosDBDao, CosmosDBRepository from commons.data_access_layer.database import CRUDDao @@ -7,33 +10,6 @@ class ActivityDao(CRUDDao): pass -def create_dao() -> ActivityDao: - from sqlalchemy_utils import UUIDType - import uuid - from commons.data_access_layer.sql import db - from commons.data_access_layer.sql import SQLCRUDDao - - class ActivitySQLModel(db.Model): - __tablename__ = 'activity' - id = db.Column(UUIDType(binary=False), primary_key=True, default=uuid.uuid4) - name = db.Column(db.String(50), unique=True, nullable=False) - description = db.Column(db.String(250), unique=False, nullable=False) - deleted = db.Column(UUIDType(binary=False), default=uuid.uuid4) - tenant_id = db.Column(UUIDType(binary=False), default=uuid.uuid4) - - def __repr__(self): - return '' % self.name - - def __str___(self): - return "the activity \"%s\"" % self.name - - class ActivitySQLDao(ActivityDao, SQLCRUDDao): - def __init__(self): - SQLCRUDDao.__init__(self, ActivitySQLModel) - - return ActivitySQLDao() - - container_definition = { 'id': 'activity', 'partition_key': PartitionKey(path='/tenant_id'), @@ -43,3 +19,32 @@ def __init__(self): ] } } + + +@dataclass() +class ActivityCosmosDBModel(CosmosDBModel): + id: str + name: str + description: str + deleted: str + tenant_id: str + + def __init__(self, data): + super(ActivityCosmosDBModel, self).__init__(data) + + def __repr__(self): + return '' % self.name + + def __str___(self): + return "the activity \"%s\"" % self.name + + +def create_dao() -> ActivityDao: + repository = CosmosDBRepository.from_definition(container_definition, + mapper=ActivityCosmosDBModel) + + class ActivityCosmosDBDao(CosmosDBDao, ActivityDao): + def __init__(self): + CosmosDBDao.__init__(self, repository) + + return ActivityCosmosDBDao() diff --git a/time_tracker_api/activities/activities_namespace.py b/time_tracker_api/activities/activities_namespace.py index e87f2ccd..3c5ccf8b 100644 --- a/time_tracker_api/activities/activities_namespace.py +++ b/time_tracker_api/activities/activities_namespace.py @@ -22,12 +22,6 @@ title='Description', description='Comments about the activity', example=faker.paragraph(), - ), - 'tenant_id': fields.String( - required=True, - title='Identifier of Tenant', - description='Tenant this activity belongs to', - example=faker.uuid4(), ) }) @@ -39,6 +33,12 @@ description='The unique identifier', example=faker.uuid4(), ), + 'tenant_id': fields.String( + required=True, + title='Identifier of Tenant', + description='Tenant this activity belongs to', + example=faker.uuid4(), + ), } activity_response_fields.update(audit_fields) diff --git a/time_tracker_api/projects/projects_model.py b/time_tracker_api/projects/projects_model.py index 38d36874..33ce9ce0 100644 --- a/time_tracker_api/projects/projects_model.py +++ b/time_tracker_api/projects/projects_model.py @@ -5,19 +5,11 @@ from commons.data_access_layer.cosmos_db import CosmosDBModel, CosmosDBDao, CosmosDBRepository from commons.data_access_layer.database import CRUDDao -""" -Protocols -""" - class ProjectDao(CRUDDao): pass -""" -Cosmos DB -""" - container_definition = { 'id': 'project', 'partition_key': PartitionKey(path='/tenant_id'), From a9aca4c023cd29405d0394b32b7536895931d16e Mon Sep 17 00:00:00 2001 From: EliuX Date: Tue, 14 Apr 2020 15:02:20 -0500 Subject: [PATCH 014/387] Close #56 Create customer model for Cosmos DB --- setup.cfg | 6 + tests/time_tracker_api/customers/__init__.py | 0 .../customers/customers_namespace_test.py | 209 ++++++++++++++++++ .../activities/activities_model.py | 6 +- .../activities/activities_namespace.py | 4 +- time_tracker_api/api.py | 2 +- time_tracker_api/customers/customers_model.py | 61 ++--- .../customers/customers_namespace.py | 18 +- .../project_types/project_types_namespace.py | 4 +- time_tracker_api/projects/projects_model.py | 6 +- .../projects/projects_namespace.py | 4 +- time_tracker_api/security.py | 3 +- .../time_entries/time_entries_namespace.py | 4 +- 13 files changed, 272 insertions(+), 55 deletions(-) create mode 100644 tests/time_tracker_api/customers/__init__.py create mode 100644 tests/time_tracker_api/customers/customers_namespace_test.py diff --git a/setup.cfg b/setup.cfg index a6500ee6..a4097447 100644 --- a/setup.cfg +++ b/setup.cfg @@ -12,3 +12,9 @@ branch = True source = time_tracker_api commons + +[report] +exclude_lines = + pragma: no cover + @dataclass() + raise EnvironmentError diff --git a/tests/time_tracker_api/customers/__init__.py b/tests/time_tracker_api/customers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/time_tracker_api/customers/customers_namespace_test.py b/tests/time_tracker_api/customers/customers_namespace_test.py new file mode 100644 index 00000000..707df28a --- /dev/null +++ b/tests/time_tracker_api/customers/customers_namespace_test.py @@ -0,0 +1,209 @@ +from faker import Faker +from flask import json +from flask.testing import FlaskClient +from flask_restplus._http import HTTPStatus +from pytest_mock import MockFixture + +from time_tracker_api.security import current_user_tenant_id + +fake = Faker() + +valid_customer_data = { + "name": fake.company(), + "description": fake.paragraph(), + "tenant_id": fake.uuid4() +} + +fake_customer = ({ + "id": fake.random_int(1, 9999) +}).update(valid_customer_data) + + +def test_create_customer_should_succeed_with_valid_request(client: FlaskClient, mocker: MockFixture): + from time_tracker_api.customers.customers_namespace import customer_dao + repository_create_mock = mocker.patch.object(customer_dao.repository, + 'create', + return_value=fake_customer) + + response = client.post("/customers", json=valid_customer_data, follow_redirects=True) + + assert HTTPStatus.CREATED == response.status_code + repository_create_mock.assert_called_once() + + +def test_create_customer_should_reject_bad_request(client: FlaskClient, mocker: MockFixture): + from time_tracker_api.customers.customers_namespace import customer_dao + repository_create_mock = mocker.patch.object(customer_dao.repository, + 'create', + return_value=fake_customer) + + response = client.post("/customers", json=None, follow_redirects=True) + + assert HTTPStatus.BAD_REQUEST == response.status_code + repository_create_mock.assert_not_called() + + +def test_list_all_customers(client: FlaskClient, mocker: MockFixture): + from time_tracker_api.customers.customers_namespace import customer_dao + repository_find_all_mock = mocker.patch.object(customer_dao.repository, + 'find_all', + return_value=[]) + + response = client.get("/customers", follow_redirects=True) + + assert HTTPStatus.OK == response.status_code + json_data = json.loads(response.data) + assert [] == json_data + repository_find_all_mock.assert_called_once() + + +def test_get_customer_should_succeed_with_valid_id(client: FlaskClient, mocker: MockFixture): + from time_tracker_api.customers.customers_namespace import customer_dao + + valid_id = fake.random_int(1, 9999) + + repository_find_mock = mocker.patch.object(customer_dao.repository, + 'find', + return_value=fake_customer) + + response = client.get("/customers/%s" % valid_id, follow_redirects=True) + + assert HTTPStatus.OK == response.status_code + fake_customer == json.loads(response.data) + repository_find_mock.assert_called_once_with(str(valid_id), + partition_key_value=current_user_tenant_id()) + + +def test_get_customer_should_return_not_found_with_invalid_id(client: FlaskClient, mocker: MockFixture): + from time_tracker_api.customers.customers_namespace import customer_dao + from werkzeug.exceptions import NotFound + + invalid_id = fake.random_int(1, 9999) + + repository_find_mock = mocker.patch.object(customer_dao.repository, + 'find', + side_effect=NotFound) + + response = client.get("/customers/%s" % invalid_id, follow_redirects=True) + + assert HTTPStatus.NOT_FOUND == response.status_code + repository_find_mock.assert_called_once_with(str(invalid_id), + partition_key_value=current_user_tenant_id()) + + +def test_get_customer_should_return_422_for_invalid_id_format(client: FlaskClient, mocker: MockFixture): + from time_tracker_api.customers.customers_namespace import customer_dao + from werkzeug.exceptions import UnprocessableEntity + + invalid_id = fake.company() + + repository_find_mock = mocker.patch.object(customer_dao.repository, + 'find', + side_effect=UnprocessableEntity) + + response = client.get("/customers/%s" % invalid_id, follow_redirects=True) + + assert HTTPStatus.UNPROCESSABLE_ENTITY == response.status_code + repository_find_mock.assert_called_once_with(str(invalid_id), + partition_key_value=current_user_tenant_id()) + + +def test_update_customer_should_succeed_with_valid_data(client: FlaskClient, mocker: MockFixture): + from time_tracker_api.customers.customers_namespace import customer_dao + + repository_update_mock = mocker.patch.object(customer_dao.repository, + 'partial_update', + return_value=fake_customer) + + valid_id = fake.random_int(1, 9999) + response = client.put("/customers/%s" % valid_id, json=valid_customer_data, follow_redirects=True) + + assert HTTPStatus.OK == response.status_code + fake_customer == json.loads(response.data) + repository_update_mock.assert_called_once_with(str(valid_id), + changes=valid_customer_data, + partition_key_value=current_user_tenant_id()) + + +def test_update_customer_should_reject_bad_request(client: FlaskClient, mocker: MockFixture): + from time_tracker_api.customers.customers_namespace import customer_dao + repository_update_mock = mocker.patch.object(customer_dao.repository, + 'partial_update', + return_value=fake_customer) + + valid_id = fake.random_int(1, 9999) + response = client.put("/customers/%s" % valid_id, json=None, follow_redirects=True) + + assert HTTPStatus.BAD_REQUEST == response.status_code + repository_update_mock.assert_not_called() + + +def test_update_customer_should_return_not_found_with_invalid_id(client: FlaskClient, mocker: MockFixture): + from time_tracker_api.customers.customers_namespace import customer_dao + from werkzeug.exceptions import NotFound + + invalid_id = fake.random_int(1, 9999) + + repository_update_mock = mocker.patch.object(customer_dao.repository, + 'partial_update', + side_effect=NotFound) + + response = client.put("/customers/%s" % invalid_id, + json=valid_customer_data, + follow_redirects=True) + + assert HTTPStatus.NOT_FOUND == response.status_code + repository_update_mock.assert_called_once_with(str(invalid_id), + changes=valid_customer_data, + partition_key_value=current_user_tenant_id()) + + +def test_delete_customer_should_succeed_with_valid_id(client: FlaskClient, mocker: MockFixture): + from time_tracker_api.customers.customers_namespace import customer_dao + + valid_id = fake.random_int(1, 9999) + + repository_remove_mock = mocker.patch.object(customer_dao.repository, + 'delete', + return_value=None) + + response = client.delete("/customers/%s" % valid_id, follow_redirects=True) + + assert HTTPStatus.NO_CONTENT == response.status_code + assert b'' == response.data + repository_remove_mock.assert_called_once_with(str(valid_id), + partition_key_value=current_user_tenant_id()) + + +def test_delete_customer_should_return_not_found_with_invalid_id(client: FlaskClient, mocker: MockFixture): + from time_tracker_api.customers.customers_namespace import customer_dao + from werkzeug.exceptions import NotFound + + invalid_id = fake.random_int(1, 9999) + + repository_remove_mock = mocker.patch.object(customer_dao.repository, + 'delete', + side_effect=NotFound) + + response = client.delete("/customers/%s" % invalid_id, follow_redirects=True) + + assert HTTPStatus.NOT_FOUND == response.status_code + repository_remove_mock.assert_called_once_with(str(invalid_id), + partition_key_value=current_user_tenant_id()) + + +def test_delete_customer_should_return_422_for_invalid_id_format(client: FlaskClient, mocker: MockFixture): + from time_tracker_api.customers.customers_namespace import customer_dao + from werkzeug.exceptions import UnprocessableEntity + + invalid_id = fake.company() + + repository_remove_mock = mocker.patch.object(customer_dao.repository, + 'delete', + side_effect=UnprocessableEntity) + + response = client.delete("/customers/%s" % invalid_id, follow_redirects=True) + + assert HTTPStatus.UNPROCESSABLE_ENTITY == response.status_code + repository_remove_mock.assert_called_once_with(str(invalid_id), + partition_key_value=current_user_tenant_id()) diff --git a/time_tracker_api/activities/activities_model.py b/time_tracker_api/activities/activities_model.py index 36e8742c..9a14c110 100644 --- a/time_tracker_api/activities/activities_model.py +++ b/time_tracker_api/activities/activities_model.py @@ -30,13 +30,13 @@ class ActivityCosmosDBModel(CosmosDBModel): tenant_id: str def __init__(self, data): - super(ActivityCosmosDBModel, self).__init__(data) + super(ActivityCosmosDBModel, self).__init__(data) # pragma: no cover def __repr__(self): - return '' % self.name + return '' % self.name # pragma: no cover def __str___(self): - return "the activity \"%s\"" % self.name + return "the activity \"%s\"" % self.name # pragma: no cover def create_dao() -> ActivityDao: diff --git a/time_tracker_api/activities/activities_namespace.py b/time_tracker_api/activities/activities_namespace.py index 3c5ccf8b..4e6b3f6d 100644 --- a/time_tracker_api/activities/activities_namespace.py +++ b/time_tracker_api/activities/activities_namespace.py @@ -3,7 +3,7 @@ from flask_restplus._http import HTTPStatus from time_tracker_api.activities.activities_model import create_dao -from time_tracker_api.api import audit_fields +from time_tracker_api.api import common_fields faker = Faker() @@ -40,7 +40,7 @@ example=faker.uuid4(), ), } -activity_response_fields.update(audit_fields) +activity_response_fields.update(common_fields) activity = ns.inherit( 'Activity', diff --git a/time_tracker_api/api.py b/time_tracker_api/api.py index a20fea46..339d54c1 100644 --- a/time_tracker_api/api.py +++ b/time_tracker_api/api.py @@ -15,7 +15,7 @@ ) # Common models structure -audit_fields = { +common_fields = { 'deleted': fields.String( readOnly=True, required=True, diff --git a/time_tracker_api/customers/customers_model.py b/time_tracker_api/customers/customers_model.py index a91d965c..11e1af05 100644 --- a/time_tracker_api/customers/customers_model.py +++ b/time_tracker_api/customers/customers_model.py @@ -1,5 +1,8 @@ +from dataclasses import dataclass + from azure.cosmos import PartitionKey +from commons.data_access_layer.cosmos_db import CosmosDBModel, CosmosDBRepository, CosmosDBDao from commons.data_access_layer.database import CRUDDao @@ -7,41 +10,41 @@ class CustomerDao(CRUDDao): pass -def create_dao() -> CustomerDao: - from commons.data_access_layer.sql import db - from commons.data_access_layer.database import COMMENTS_MAX_LENGTH - from commons.data_access_layer.sql import SQLCRUDDao - from sqlalchemy_utils import UUIDType - import uuid - - class CustomerSQLModel(db.Model): - __tablename__ = 'customer' - id = db.Column(UUIDType(binary=False), primary_key=True, default=uuid.uuid4) - name = db.Column(db.String(50), unique=True, nullable=False) - description = db.Column(db.String(COMMENTS_MAX_LENGTH), unique=False, nullable=False) - deleted = db.Column(UUIDType(binary=False), default=uuid.uuid4) - tenant_id = db.Column(UUIDType(binary=False), default=uuid.uuid4) - - def __repr__(self): - return '' % self.name - - def __str___(self): - return "the customer \"%s\"" % self.name - - class CustomerSQLDao(SQLCRUDDao): - def __init__(self): - SQLCRUDDao.__init__(self, CustomerSQLModel) - - return CustomerSQLDao() - - container_definition = { 'id': 'customer', 'partition_key': PartitionKey(path='/tenant_id'), 'unique_key_policy': { 'uniqueKeys': [ {'paths': ['/name']}, - {'paths': ['/deleted']}, ] } } + + +@dataclass() +class CustomerCosmosDBModel(CosmosDBModel): + id: str + name: str + description: str + deleted: str + tenant_id: str + + def __init__(self, data): + super(CustomerCosmosDBModel, self).__init__(data) # pragma: no cover + + def __repr__(self): + return '' % self.name # pragma: no cover + + def __str___(self): + return "the customer \"%s\"" % self.name # pragma: no cover + + +def create_dao() -> CustomerDao: + repository = CosmosDBRepository.from_definition(container_definition, + mapper=CustomerCosmosDBModel) + + class CustomerCosmosDBDao(CosmosDBDao, CustomerDao): + def __init__(self): + CosmosDBDao.__init__(self, repository) + + return CustomerCosmosDBDao() diff --git a/time_tracker_api/customers/customers_namespace.py b/time_tracker_api/customers/customers_namespace.py index 53dac000..f8064f95 100644 --- a/time_tracker_api/customers/customers_namespace.py +++ b/time_tracker_api/customers/customers_namespace.py @@ -2,7 +2,7 @@ from flask_restplus import Namespace, Resource, fields from flask_restplus._http import HTTPStatus -from time_tracker_api.api import audit_fields +from time_tracker_api.api import common_fields from time_tracker_api.customers.customers_model import create_dao faker = Faker() @@ -24,12 +24,6 @@ description='Description about the customer', example=faker.paragraph(), ), - 'tenant_id': fields.String( - required=True, - title='Identifier of Tenant', - description='Tenant this customer belongs to', - example=faker.uuid4(), - ), }) customer_response_fields = { @@ -39,9 +33,15 @@ title='Identifier', description='The unique identifier', example=faker.uuid4(), - ) + ), + 'tenant_id': fields.String( + required=True, + title='Identifier of Tenant', + description='Tenant this customer belongs to', + example=faker.uuid4(), + ), } -customer_response_fields.update(audit_fields) +customer_response_fields.update(common_fields) customer = ns.inherit( 'Customer', diff --git a/time_tracker_api/project_types/project_types_namespace.py b/time_tracker_api/project_types/project_types_namespace.py index 397821e8..c73a436a 100644 --- a/time_tracker_api/project_types/project_types_namespace.py +++ b/time_tracker_api/project_types/project_types_namespace.py @@ -2,7 +2,7 @@ from flask_restplus import Namespace, Resource, fields from flask_restplus._http import HTTPStatus -from time_tracker_api.api import audit_fields +from time_tracker_api.api import common_fields from time_tracker_api.project_types.project_types_model import create_dao faker = Faker() @@ -51,7 +51,7 @@ example=faker.uuid4(), ) } -project_type_response_fields.update(audit_fields) +project_type_response_fields.update(common_fields) project_type = ns.inherit( 'ProjectType', diff --git a/time_tracker_api/projects/projects_model.py b/time_tracker_api/projects/projects_model.py index 33ce9ce0..b119f07e 100644 --- a/time_tracker_api/projects/projects_model.py +++ b/time_tracker_api/projects/projects_model.py @@ -32,13 +32,13 @@ class ProjectCosmosDBModel(CosmosDBModel): tenant_id: str def __init__(self, data): - super(ProjectCosmosDBModel, self).__init__(data) + super(ProjectCosmosDBModel, self).__init__(data) # pragma: no cover def __repr__(self): - return '' % self.name + return '' % self.name # pragma: no cover def __str___(self): - return "the project \"%s\"" % self.name + return "the project \"%s\"" % self.name # pragma: no cover def create_dao() -> ProjectDao: diff --git a/time_tracker_api/projects/projects_namespace.py b/time_tracker_api/projects/projects_namespace.py index a90b8697..82a0fb8e 100644 --- a/time_tracker_api/projects/projects_namespace.py +++ b/time_tracker_api/projects/projects_namespace.py @@ -2,7 +2,7 @@ from flask_restplus import Namespace, Resource, fields from flask_restplus._http import HTTPStatus -from time_tracker_api.api import audit_fields +from time_tracker_api.api import common_fields from time_tracker_api.projects.projects_model import create_dao faker = Faker() @@ -52,7 +52,7 @@ example=faker.uuid4(), ), } -project_response_fields.update(audit_fields) +project_response_fields.update(common_fields) project = ns.inherit( 'Project', diff --git a/time_tracker_api/security.py b/time_tracker_api/security.py index 0309a13e..684f1dfc 100644 --- a/time_tracker_api/security.py +++ b/time_tracker_api/security.py @@ -30,9 +30,8 @@ def generate_dev_secret_key(): """ global dev_secret_key dev_secret_key = fake.password(length=16, special_chars=True, digits=True, upper_case=True, lower_case=True) - if app.config.get("FLASK_DEBUG", False): + if app.config.get("FLASK_DEBUG", False): # pragma: no cover print('*********************************************************') print("The generated secret is \"%s\"" % dev_secret_key) print('*********************************************************') return dev_secret_key - diff --git a/time_tracker_api/time_entries/time_entries_namespace.py b/time_tracker_api/time_entries/time_entries_namespace.py index 7fd6861b..f919d2e1 100644 --- a/time_tracker_api/time_entries/time_entries_namespace.py +++ b/time_tracker_api/time_entries/time_entries_namespace.py @@ -4,8 +4,8 @@ from flask_restplus import fields, Resource, Namespace from flask_restplus._http import HTTPStatus -from time_tracker_api.api import audit_fields from commons.data_access_layer.database import COMMENTS_MAX_LENGTH +from time_tracker_api.api import common_fields from time_tracker_api.time_entries.time_entries_model import create_dao faker = Faker() @@ -83,7 +83,7 @@ example=faker.boolean(), ), } -time_entry_response_fields.update(audit_fields) +time_entry_response_fields.update(common_fields) time_entry = ns.inherit( 'TimeEntry', From 3a3ae47f4ba3dc7930eee0104070600b1305e9b9 Mon Sep 17 00:00:00 2001 From: EliuX Date: Tue, 14 Apr 2020 16:16:06 -0500 Subject: [PATCH 015/387] Close #57 Create project type model for Cosmos DB --- .../project_types/__init__.py | 0 .../project_types_namespace_test.py | 218 ++++++++++++++++++ .../project_types/project_types_model.py | 68 +++--- .../project_types/project_types_namespace.py | 18 +- time_tracker_api/projects/projects_model.py | 2 +- 5 files changed, 264 insertions(+), 42 deletions(-) create mode 100644 tests/time_tracker_api/project_types/__init__.py create mode 100644 tests/time_tracker_api/project_types/project_types_namespace_test.py diff --git a/tests/time_tracker_api/project_types/__init__.py b/tests/time_tracker_api/project_types/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/time_tracker_api/project_types/project_types_namespace_test.py b/tests/time_tracker_api/project_types/project_types_namespace_test.py new file mode 100644 index 00000000..613b0cdc --- /dev/null +++ b/tests/time_tracker_api/project_types/project_types_namespace_test.py @@ -0,0 +1,218 @@ +from faker import Faker +from flask import json +from flask.testing import FlaskClient +from flask_restplus._http import HTTPStatus +from pytest_mock import MockFixture + +from time_tracker_api.security import current_user_tenant_id + +fake = Faker() + +valid_project_type_data = { + "name": fake.company(), + "description": fake.paragraph(), + 'customer_id': fake.uuid4(), + 'parent_id': fake.uuid4(), +} + +fake_project_type = ({ + "id": fake.random_int(1, 9999), + "tenant_id": fake.uuid4(), +}).update(valid_project_type_data) + + +def test_create_project_type_should_succeed_with_valid_request(client: FlaskClient, mocker: MockFixture): + from time_tracker_api.project_types.project_types_namespace import project_type_dao + repository_create_mock = mocker.patch.object(project_type_dao.repository, + 'create', + return_value=fake_project_type) + + response = client.post("/project-types", json=valid_project_type_data, follow_redirects=True) + + assert HTTPStatus.CREATED == response.status_code + repository_create_mock.assert_called_once() + + +def test_create_project_type_should_reject_bad_request(client: FlaskClient, mocker: MockFixture): + from time_tracker_api.project_types.project_types_namespace import project_type_dao + invalid_project_type_data = valid_project_type_data.copy() + invalid_project_type_data.update({ + "parent_id": None, + }) + repository_create_mock = mocker.patch.object(project_type_dao.repository, + 'create', + return_value=fake_project_type) + + response = client.post("/project-types", json=invalid_project_type_data, follow_redirects=True) + + assert HTTPStatus.BAD_REQUEST == response.status_code + repository_create_mock.assert_not_called() + + +def test_list_all_project_types(client: FlaskClient, mocker: MockFixture): + from time_tracker_api.project_types.project_types_namespace import project_type_dao + repository_find_all_mock = mocker.patch.object(project_type_dao.repository, + 'find_all', + return_value=[]) + + response = client.get("/project-types", follow_redirects=True) + + assert HTTPStatus.OK == response.status_code + assert [] == json.loads(response.data) + repository_find_all_mock.assert_called_once() + + +def test_get_project_should_succeed_with_valid_id(client: FlaskClient, mocker: MockFixture): + from time_tracker_api.project_types.project_types_namespace import project_type_dao + valid_id = fake.random_int(1, 9999) + repository_find_mock = mocker.patch.object(project_type_dao.repository, + 'find', + return_value=fake_project_type) + + response = client.get("/project-types/%s" % valid_id, follow_redirects=True) + + assert HTTPStatus.OK == response.status_code + fake_project_type == json.loads(response.data) + repository_find_mock.assert_called_once_with(str(valid_id), + partition_key_value=current_user_tenant_id()) + + +def test_get_project_should_return_not_found_with_invalid_id(client: FlaskClient, mocker: MockFixture): + from time_tracker_api.project_types.project_types_namespace import project_type_dao + from werkzeug.exceptions import NotFound + + invalid_id = fake.random_int(1, 9999) + + repository_find_mock = mocker.patch.object(project_type_dao.repository, + 'find', + side_effect=NotFound) + + response = client.get("/project-types/%s" % invalid_id, follow_redirects=True) + + assert HTTPStatus.NOT_FOUND == response.status_code + repository_find_mock.assert_called_once_with(str(invalid_id), + partition_key_value=current_user_tenant_id()) + + +def test_get_project_should_response_with_unprocessable_entity_for_invalid_id_format(client: FlaskClient, + mocker: MockFixture): + from time_tracker_api.project_types.project_types_namespace import project_type_dao + from werkzeug.exceptions import UnprocessableEntity + + invalid_id = fake.company() + + repository_find_mock = mocker.patch.object(project_type_dao.repository, + 'find', + side_effect=UnprocessableEntity) + + response = client.get("/project-types/%s" % invalid_id, follow_redirects=True) + + assert HTTPStatus.UNPROCESSABLE_ENTITY == response.status_code + repository_find_mock.assert_called_once_with(str(invalid_id), + partition_key_value=current_user_tenant_id()) + + +def test_update_project_should_succeed_with_valid_data(client: FlaskClient, mocker: MockFixture): + from time_tracker_api.project_types.project_types_namespace import project_type_dao + + repository_update_mock = mocker.patch.object(project_type_dao.repository, + 'partial_update', + return_value=fake_project_type) + + valid_id = fake.random_int(1, 9999) + response = client.put("/project-types/%s" % valid_id, json=valid_project_type_data, follow_redirects=True) + + assert HTTPStatus.OK == response.status_code + fake_project_type == json.loads(response.data) + repository_update_mock.assert_called_once_with(str(valid_id), + changes=valid_project_type_data, + partition_key_value=current_user_tenant_id()) + + +def test_update_project_should_reject_bad_request(client: FlaskClient, mocker: MockFixture): + from time_tracker_api.project_types.project_types_namespace import project_type_dao + invalid_project_type_data = valid_project_type_data.copy() + invalid_project_type_data.update({ + "parent_id": None, + }) + repository_update_mock = mocker.patch.object(project_type_dao.repository, + 'partial_update', + return_value=fake_project_type) + + valid_id = fake.random_int(1, 9999) + response = client.put("/project-types/%s" % valid_id, json=invalid_project_type_data, follow_redirects=True) + + assert HTTPStatus.BAD_REQUEST == response.status_code + repository_update_mock.assert_not_called() + + +def test_update_project_should_return_not_found_with_invalid_id(client: FlaskClient, mocker: MockFixture): + from time_tracker_api.project_types.project_types_namespace import project_type_dao + from werkzeug.exceptions import NotFound + + invalid_id = fake.random_int(1, 9999) + + repository_update_mock = mocker.patch.object(project_type_dao.repository, + 'partial_update', + side_effect=NotFound) + + response = client.put("/project-types/%s" % invalid_id, + json=valid_project_type_data, + follow_redirects=True) + + assert HTTPStatus.NOT_FOUND == response.status_code + repository_update_mock.assert_called_once_with(str(invalid_id), + changes=valid_project_type_data, + partition_key_value=current_user_tenant_id()) + + +def test_delete_project_should_succeed_with_valid_id(client: FlaskClient, mocker: MockFixture): + from time_tracker_api.project_types.project_types_namespace import project_type_dao + + valid_id = fake.random_int(1, 9999) + + repository_remove_mock = mocker.patch.object(project_type_dao.repository, + 'delete', + return_value=None) + + response = client.delete("/project-types/%s" % valid_id, follow_redirects=True) + + assert HTTPStatus.NO_CONTENT == response.status_code + assert b'' == response.data + repository_remove_mock.assert_called_once_with(str(valid_id), + partition_key_value=current_user_tenant_id()) + + +def test_delete_project_should_return_not_found_with_invalid_id(client: FlaskClient, mocker: MockFixture): + from time_tracker_api.project_types.project_types_namespace import project_type_dao + from werkzeug.exceptions import NotFound + + invalid_id = fake.random_int(1, 9999) + + repository_remove_mock = mocker.patch.object(project_type_dao.repository, + 'delete', + side_effect=NotFound) + + response = client.delete("/project-types/%s" % invalid_id, follow_redirects=True) + + assert HTTPStatus.NOT_FOUND == response.status_code + repository_remove_mock.assert_called_once_with(str(invalid_id), + partition_key_value=current_user_tenant_id()) + + +def test_delete_project_should_return_unprocessable_entity_for_invalid_id_format(client: FlaskClient, + mocker: MockFixture): + from time_tracker_api.project_types.project_types_namespace import project_type_dao + from werkzeug.exceptions import UnprocessableEntity + + invalid_id = fake.company() + + repository_remove_mock = mocker.patch.object(project_type_dao.repository, + 'delete', + side_effect=UnprocessableEntity) + + response = client.delete("/project-types/%s" % invalid_id, follow_redirects=True) + + assert HTTPStatus.UNPROCESSABLE_ENTITY == response.status_code + repository_remove_mock.assert_called_once_with(str(invalid_id), + partition_key_value=current_user_tenant_id()) diff --git a/time_tracker_api/project_types/project_types_model.py b/time_tracker_api/project_types/project_types_model.py index 936456ef..9ff226d8 100644 --- a/time_tracker_api/project_types/project_types_model.py +++ b/time_tracker_api/project_types/project_types_model.py @@ -1,5 +1,8 @@ +from dataclasses import dataclass + from azure.cosmos import PartitionKey +from commons.data_access_layer.cosmos_db import CosmosDBModel, CosmosDBDao, CosmosDBRepository from commons.data_access_layer.database import CRUDDao @@ -7,42 +10,43 @@ class ProjectTypeDao(CRUDDao): pass -def create_dao() -> ProjectTypeDao: - from commons.data_access_layer.sql import db - from commons.data_access_layer.database import COMMENTS_MAX_LENGTH - from commons.data_access_layer.sql import SQLCRUDDao - from sqlalchemy_utils import UUIDType - import uuid - - class ProjectTypeSQLModel(db.Model): - __tablename__ = 'project_type' - id = db.Column(UUIDType(binary=False), primary_key=True, default=uuid.uuid4) - name = db.Column(db.String(50), unique=True, nullable=False) - description = db.Column(db.String(COMMENTS_MAX_LENGTH), unique=False, nullable=False) - parent_id = db.Column(UUIDType(binary=False), default=uuid.uuid4) - customer_id = db.Column(UUIDType(binary=False), default=uuid.uuid4) - deleted = db.Column(UUIDType(binary=False), default=uuid.uuid4) - tenant_id = db.Column(UUIDType(binary=False), default=uuid.uuid4) - - def __repr__(self): - return '' % self.name - - def __str___(self): - return "the project type \"%s\"" % self.name - - class ProjectTypeSQLDao(SQLCRUDDao): - def __init__(self): - SQLCRUDDao.__init__(self, ProjectTypeSQLModel) - - return ProjectTypeSQLDao() - - container_definition = { 'id': 'project_type', - 'partition_key': PartitionKey(path='/customer_id'), + 'partition_key': PartitionKey(path='/tenant_id'), 'unique_key_policy': { 'uniqueKeys': [ - {'paths': ['/name']}, + {'paths': ['/name', '/customer_id']}, ] } } + + +@dataclass() +class ProjectTypeCosmosDBModel(CosmosDBModel): + id: str + name: str + description: str + parent_id: str + customer_id: str + deleted: str + tenant_id: str + + def __init__(self, data): + super(ProjectTypeCosmosDBModel, self).__init__(data) # pragma: no cover + + def __repr__(self): + return '' % self.name # pragma: no cover + + def __str___(self): + return "the project type \"%s\"" % self.name # pragma: no cover + + +def create_dao() -> ProjectTypeDao: + repository = CosmosDBRepository.from_definition(container_definition, + mapper=ProjectTypeCosmosDBModel) + + class ProjectTypeCosmosDBDao(CosmosDBDao, ProjectTypeDao): + def __init__(self): + CosmosDBDao.__init__(self, repository) + + return ProjectTypeCosmosDBDao() diff --git a/time_tracker_api/project_types/project_types_namespace.py b/time_tracker_api/project_types/project_types_namespace.py index c73a436a..2ed93054 100644 --- a/time_tracker_api/project_types/project_types_namespace.py +++ b/time_tracker_api/project_types/project_types_namespace.py @@ -16,7 +16,7 @@ title='Name', max_length=50, description='Name of the project type', - example=faker.company(), + example=faker.random_element(["Customer","Training","Internal"]), ), 'description': fields.String( title='Description', @@ -29,17 +29,11 @@ description='Customer this project type belongs to', example=faker.uuid4(), ), - 'tenant_id': fields.String( - required=True, - title='Identifier of Tenant', - description='Tenant this project type belongs to', - example=faker.uuid4(), - ), 'parent_id': fields.String( title='Identifier of Parent of the project type', description='Defines a self reference of the model ProjectType', example=faker.uuid4(), - ) + ), }) project_type_response_fields = { @@ -49,7 +43,13 @@ title='Identifier', description='The unique identifier', example=faker.uuid4(), - ) + ), + 'tenant_id': fields.String( + required=True, + title='Identifier of Tenant', + description='Tenant this project type belongs to', + example=faker.uuid4(), + ), } project_type_response_fields.update(common_fields) diff --git a/time_tracker_api/projects/projects_model.py b/time_tracker_api/projects/projects_model.py index b119f07e..6700005e 100644 --- a/time_tracker_api/projects/projects_model.py +++ b/time_tracker_api/projects/projects_model.py @@ -15,7 +15,7 @@ class ProjectDao(CRUDDao): 'partition_key': PartitionKey(path='/tenant_id'), 'unique_key_policy': { 'uniqueKeys': [ - {'paths': ['/name']}, + {'paths': ['/name', '/customer_id']}, ] } } From d23529e2805f23cf039b7e80a78c5248a9168bef Mon Sep 17 00:00:00 2001 From: EliuX Date: Tue, 14 Apr 2020 21:23:11 -0500 Subject: [PATCH 016/387] Close #58 Create time entry model for Cosmos DB --- README.md | 37 +--- commons/data_access_layer/cosmos_db.py | 95 +++++++--- requirements/azure_cosmos.txt | 5 +- tests/conftest.py | 13 +- tests/time_tracker_api/smoke_test.py | 6 +- .../time_entries/time_entries_model_test.py | 80 +++++++++ .../time_entries_namespace_test.py | 115 ++++++++---- time_tracker_api/api.py | 15 +- .../time_entries/time_entries_model.py | 165 +++++++++++++----- .../time_entries/time_entries_namespace.py | 75 +++++--- 10 files changed, 443 insertions(+), 163 deletions(-) create mode 100644 tests/time_tracker_api/time_entries/time_entries_model_test.py diff --git a/README.md b/README.md index 545c8473..a4fdaff7 100644 --- a/README.md +++ b/README.md @@ -9,6 +9,7 @@ This is the mono-repository for the backend services and their common codebase ## Getting started Follow the following instructions to get the project ready to use ASAP. + ### Requirements Be sure you have installed in your system @@ -46,35 +47,6 @@ automatically [pip](https://pip.pypa.io/en/stable/) as well. Remember to do it with Python 3. - -- Install the [Microsoft ODBC Driver for SQL Server](https://docs.microsoft.com/en-us/sql/connect/odbc/microsoft-odbc-driver-for-sql-server?view=sql-server-ver15) -in your operative system. Then you have to check out what is the name of the SQL Driver installation. -Check it out with: - -```bash -vim /usr/local/etc/odbcinst.ini -``` - -It may display something like - -```.ini -[ODBC Driver 17 for SQL Server] -Description=Microsoft ODBC Driver 17 for SQL Server -Driver=/usr/local/lib/libmsodbcsql.17.dylib -UsageCount=2 -``` - -Then specify the driver name, in this case _DBC Driver 17 for SQL Server_ in the `SQL_DATABASE_URI`, e.g.: - -```.dotenv -SQL_DATABASE_URI=mssql+pyodbc://:@time-tracker-srv.database.windows.net/?driver\=ODBC Driver 17 for SQL Server -``` - -To troubleshoot issues regarding this part please check out: -- [Install the Microsoft ODBC driver for SQL Server (macOS)](https://docs.microsoft.com/en-us/sql/connect/odbc/linux-mac/install-microsoft-odbc-driver-sql-server-macos?view=sql-server-ver15). -- Github issue [odbcinst: SQLRemoveDriver failed with Unable to find component name](https://github.com/Microsoft/homebrew-mssql-preview/issues/2). -- Stack overflow solution to [Can't open lib 'ODBC Driver 13 for SQL Server'? Sym linking issue?](https://stackoverflow.com/questions/44527452/cant-open-lib-odbc-driver-13-for-sql-server-sym-linking-issue). - ### How to use it - Set the env var `FLASK_APP` to `time_tracker_api` and start the app: @@ -93,6 +65,13 @@ To troubleshoot issues regarding this part please check out: a link to the swagger.json with the definition of the api. +### Important notes +Due to the used technology and particularities on the implementation of this API, it is important that you respect the +following notes regarding to the manipulation of the data from and towards the API: + +- The [recommended](https://docs.microsoft.com/en-us/azure/cosmos-db/working-with-dates#storing-datetimes) format for +DateTime strings in Azure Cosmos DB is `YYYY-MM-DDThh:mm:ss.fffffffZ` which follows the ISO 8601 **UTC standard**. + ## Development ### Test diff --git a/commons/data_access_layer/cosmos_db.py b/commons/data_access_layer/cosmos_db.py index 73ae4731..5e5e3ff6 100644 --- a/commons/data_access_layer/cosmos_db.py +++ b/commons/data_access_layer/cosmos_db.py @@ -1,15 +1,17 @@ import dataclasses import logging import uuid +from datetime import datetime from typing import Callable import azure.cosmos.cosmos_client as cosmos_client import azure.cosmos.exceptions as exceptions from azure.cosmos import ContainerProxy, PartitionKey from flask import Flask +from werkzeug.exceptions import HTTPException from commons.data_access_layer.database import CRUDDao -from time_tracker_api.security import current_user_tenant_id +from time_tracker_api.security import current_user_tenant_id, current_user_id class CosmosDBFacade: @@ -75,12 +77,14 @@ class CosmosDBRepository: def __init__(self, container_id: str, partition_key_attribute: str, mapper: Callable = None, + order_fields: list = [], custom_cosmos_helper: CosmosDBFacade = None): global cosmos_helper self.cosmos_helper = custom_cosmos_helper or cosmos_helper if self.cosmos_helper is None: # pragma: no cover raise ValueError("The cosmos_db module has not been initialized!") self.mapper = mapper + self.order_fields = order_fields self.container: ContainerProxy = self.cosmos_helper.db.get_container_client(container_id) self.partition_key_attribute: str = partition_key_attribute @@ -93,7 +97,29 @@ def from_definition(cls, container_definition: dict, mapper=mapper, custom_cosmos_helper=custom_cosmos_helper) + @staticmethod + def create_sql_condition_for_visibility(visible_only: bool, container_name='c') -> str: + if visible_only: + # We are considering that `deleted == null` is not a choice + return 'AND NOT IS_DEFINED(%s.deleted)' % container_name + return '' + + @staticmethod + def create_sql_condition_for_owner_id(owner_id: str, container_name='c') -> str: + if owner_id: + return 'AND %s.owner_id=@owner_id' % container_name + return '' + + @staticmethod + def check_visibility(item, throw_not_found_if_deleted): + if throw_not_found_if_deleted and item.get('deleted') is not None: + raise exceptions.CosmosResourceNotFoundError(message='Deleted item', + status_code=404) + + return item + def create(self, data: dict, mapper: Callable = None): + self.on_create(data) function_mapper = self.get_mapper_or_dict(mapper) return function_mapper(self.container.create_item(body=data)) @@ -102,20 +128,24 @@ def find(self, id: str, partition_key_value, visible_only=True, mapper: Callable function_mapper = self.get_mapper_or_dict(mapper) return function_mapper(self.check_visibility(found_item, visible_only)) - def find_all(self, partition_key_value: str, max_count=None, offset=0, + def find_all(self, partition_key_value: str, owner_id=None, max_count=None, offset=0, visible_only=True, mapper: Callable = None): # TODO Use the tenant_id param and change container alias max_count = self.get_page_size_or(max_count) result = self.container.query_items( query=""" - SELECT * FROM c WHERE c.{partition_key_attribute}=@partition_key_value AND {visibility_condition} + SELECT * FROM c WHERE c.{partition_key_attribute}=@partition_key_value + {owner_condition} {visibility_condition} {order_clause} OFFSET @offset LIMIT @max_count """.format(partition_key_attribute=self.partition_key_attribute, - visibility_condition=self.create_sql_condition_for_visibility(visible_only)), + visibility_condition=self.create_sql_condition_for_visibility(visible_only), + owner_condition=self.create_sql_condition_for_owner_id(owner_id), + order_clause=self.create_sql_order_clause()), parameters=[ {"name": "@partition_key_value", "value": partition_key_value}, {"name": "@offset", "value": offset}, {"name": "@max_count", "value": max_count}, + {"name": "@owner_id", "value": owner_id}, ], partition_key=partition_key_value, max_item_count=max_count) @@ -130,6 +160,7 @@ def partial_update(self, id: str, changes: dict, partition_key_value: str, return self.update(id, item_data, mapper=mapper) def update(self, id: str, item_data: dict, mapper: Callable = None): + self.on_update(item_data) function_mapper = self.get_mapper_or_dict(mapper) return function_mapper(self.container.replace_item(id, body=item_data)) @@ -141,19 +172,6 @@ def delete(self, id: str, partition_key_value: str, mapper: Callable = None): def delete_permanently(self, id: str, partition_key_value: str) -> None: self.container.delete_item(id, partition_key_value) - def check_visibility(self, item, throw_not_found_if_deleted): - if throw_not_found_if_deleted and item.get('deleted') is not None: - raise exceptions.CosmosResourceNotFoundError(message='Deleted item', - status_code=404) - - return item - - def create_sql_condition_for_visibility(self, visible_only: bool, container_name='c') -> str: - if visible_only: - # We are considering that `deleted == null` is not a choice - return 'NOT IS_DEFINED(%s.deleted)' % container_name - return 'true' - def get_mapper_or_dict(self, alternative_mapper: Callable) -> Callable: return alternative_mapper or self.mapper or dict @@ -162,22 +180,40 @@ def get_page_size_or(self, custom_page_size: int) -> int: # or any other repository for the settings return custom_page_size or 100 + def on_create(self, new_item_data: dict): + if new_item_data.get('id') is None: + new_item_data['id'] = str(uuid.uuid4()) + + def on_update(self, update_item_data: dict): + pass + + def create_sql_order_clause(self): + if len(self.order_fields) > 0: + return "ORDER BY c.{}".format(", c.".join(self.order_fields)) + else: + return "" + class CosmosDBDao(CRUDDao): def __init__(self, repository: CosmosDBRepository): self.repository = repository + @property + def partition_key_value(self): + return current_user_tenant_id() + def get_all(self) -> list: tenant_id: str = self.partition_key_value - return self.repository.find_all(partition_key_value=tenant_id) + owner_id = current_user_id() + return self.repository.find_all(partition_key_value=tenant_id, owner_id=owner_id) def get(self, id): tenant_id: str = self.partition_key_value return self.repository.find(id, partition_key_value=tenant_id) def create(self, data: dict): - data['id'] = str(uuid.uuid4()) - data['tenant_id'] = self.partition_key_value + data[self.repository.partition_key_attribute] = self.partition_key_value + data['owner_id'] = current_user_id() return self.repository.create(data) def update(self, id, data: dict): @@ -189,9 +225,22 @@ def delete(self, id): tenant_id: str = current_user_tenant_id() self.repository.delete(id, partition_key_value=tenant_id) - @property - def partition_key_value(self): - return current_user_tenant_id() + +class CustomError(HTTPException): + def __init__(self, status_code: int, description: str = None): + self.code = status_code + self.description = description + + +def current_datetime(): + return datetime.utcnow() + + +def datetime_str(value: datetime): + if value is not None: + return value.isoformat() + else: + return None def init_app(app: Flask) -> None: diff --git a/requirements/azure_cosmos.txt b/requirements/azure_cosmos.txt index 53ab3e98..ed253c84 100644 --- a/requirements/azure_cosmos.txt +++ b/requirements/azure_cosmos.txt @@ -11,4 +11,7 @@ idna==2.8 six==1.13.0 urllib3==1.25.7 virtualenv==16.7.9 -virtualenv-clone==0.5.3 \ No newline at end of file +virtualenv-clone==0.5.3 + +# Dataclasses +dataclasses==0.6 \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py index ed9034e1..7dfd83ef 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,6 +5,7 @@ from commons.data_access_layer.cosmos_db import CosmosDBRepository from time_tracker_api import create_app +from time_tracker_api.time_entries.time_entries_model import TimeEntryCosmosDBRepository fake = Faker() Faker.seed() @@ -96,7 +97,12 @@ def tenant_id() -> str: @pytest.fixture(scope="session") -def another_tenant_id() -> str: +def another_tenant_id(tenant_id) -> str: + return tenant_id[:-5] + 'fffff' + + +@pytest.fixture(scope="session") +def owner_id() -> str: return fake.uuid4() @@ -109,3 +115,8 @@ def sample_item(cosmos_db_repository: CosmosDBRepository, tenant_id: str) -> dic tenant_id=tenant_id) return cosmos_db_repository.create(sample_item_data) + + +@pytest.yield_fixture(scope="module") +def time_entry_repository(cosmos_db_repository: CosmosDBRepository) -> TimeEntryCosmosDBRepository: + return TimeEntryCosmosDBRepository() diff --git a/tests/time_tracker_api/smoke_test.py b/tests/time_tracker_api/smoke_test.py index aa1a75d1..0d425473 100644 --- a/tests/time_tracker_api/smoke_test.py +++ b/tests/time_tracker_api/smoke_test.py @@ -4,7 +4,11 @@ from flask_restplus._http import HTTPStatus from pytest_mock import MockFixture -unexpected_errors_to_be_handled = [CosmosHttpResponseError, CosmosResourceNotFoundError, CosmosResourceExistsError] +from commons.data_access_layer.cosmos_db import CustomError + +unexpected_errors_to_be_handled = [CustomError(HTTPStatus.BAD_REQUEST, "Anything"), + CosmosHttpResponseError, CosmosResourceNotFoundError, + CosmosResourceExistsError, AttributeError] def test_app_exists(app): diff --git a/tests/time_tracker_api/time_entries/time_entries_model_test.py b/tests/time_tracker_api/time_entries/time_entries_model_test.py new file mode 100644 index 00000000..fd094041 --- /dev/null +++ b/tests/time_tracker_api/time_entries/time_entries_model_test.py @@ -0,0 +1,80 @@ +from datetime import datetime, timedelta + +import pytest +from faker import Faker + +from commons.data_access_layer.cosmos_db import current_datetime, datetime_str +from time_tracker_api.time_entries.time_entries_model import TimeEntryCosmosDBRepository, TimeEntryCosmosDBModel + +fake = Faker() + +now = current_datetime() +yesterday = current_datetime() - timedelta(days=1) +two_days_ago = current_datetime() - timedelta(days=2) + + +def create_time_entry(start_date: datetime, + end_date: datetime, + owner_id: str, + tenant_id: str, + time_entry_repository: TimeEntryCosmosDBRepository) -> TimeEntryCosmosDBModel: + data = { + "project_id": fake.uuid4(), + "activity_id": fake.uuid4(), + "description": fake.paragraph(nb_sentences=2), + "start_date": datetime_str(start_date), + "end_date": datetime_str(end_date), + "owner_id": owner_id, + "tenant_id": tenant_id + } + + created_item = time_entry_repository.create(data, mapper=TimeEntryCosmosDBModel) + return created_item + + +@pytest.mark.parametrize( + 'start_date,end_date', [(two_days_ago, yesterday), (now, None)] +) +def test_find_interception_with_date_range_should_find(start_date: datetime, + end_date: datetime, + owner_id: str, + tenant_id: str, + time_entry_repository: TimeEntryCosmosDBRepository): + existing_item = create_time_entry(start_date, end_date, owner_id, tenant_id, time_entry_repository) + + try: + result = time_entry_repository.find_interception_with_date_range(datetime_str(yesterday), datetime_str(now), + owner_id=owner_id, + partition_key_value=tenant_id) + + assert result is not None + assert len(result) >= 0 + assert any([existing_item.id == item.id for item in result]) + finally: + time_entry_repository.delete_permanently(existing_item.id, partition_key_value=existing_item.tenant_id) + +def test_find_interception_should_ignore_id_of_existing_item(owner_id: str, + tenant_id: str, + time_entry_repository: TimeEntryCosmosDBRepository): + start_date = datetime_str(yesterday) + end_date = datetime_str(now) + existing_item = create_time_entry(yesterday, now, owner_id, tenant_id, time_entry_repository) + try: + + colliding_result = time_entry_repository.find_interception_with_date_range(start_date, end_date, + owner_id=owner_id, + partition_key_value=tenant_id) + + non_colliding_result = time_entry_repository.find_interception_with_date_range(start_date, end_date, + owner_id=owner_id, + partition_key_value=tenant_id, + ignore_id=existing_item.id) + + colliding_result is not None + assert any([existing_item.id == item.id for item in colliding_result]) + + non_colliding_result is not None + assert not any([existing_item.id == item.id for item in non_colliding_result]) + finally: + time_entry_repository.delete_permanently(existing_item.id, partition_key_value=existing_item.tenant_id) + diff --git a/tests/time_tracker_api/time_entries/time_entries_namespace_test.py b/tests/time_tracker_api/time_entries/time_entries_namespace_test.py index e45e4510..0d209988 100644 --- a/tests/time_tracker_api/time_entries/time_entries_namespace_test.py +++ b/tests/time_tracker_api/time_entries/time_entries_namespace_test.py @@ -1,26 +1,66 @@ +from datetime import timedelta +from unittest.mock import ANY + from faker import Faker from flask import json from flask.testing import FlaskClient from flask_restplus._http import HTTPStatus from pytest_mock import MockFixture +from commons.data_access_layer.cosmos_db import current_datetime +from time_tracker_api.security import current_user_tenant_id + fake = Faker() +yesterday = current_datetime() - timedelta(days=1) valid_time_entry_input = { "project_id": fake.uuid4(), "activity_id": fake.uuid4(), "description": fake.paragraph(nb_sentences=2), - "start_date": fake.iso8601(end_datetime=None), - "end_date": fake.iso8601(end_datetime=None), + "start_date": str(yesterday.isoformat()), "owner_id": fake.uuid4(), "tenant_id": fake.uuid4() } + fake_time_entry = ({ "id": fake.random_int(1, 9999), "running": True, }).update(valid_time_entry_input) +def test_create_time_entry_with_invalid_date_range_should_raise_bad_request_error(client: FlaskClient, + mocker: MockFixture): + from time_tracker_api.time_entries.time_entries_namespace import time_entries_dao + repository_container_create_item_mock = mocker.patch.object(time_entries_dao.repository.container, + 'create_item', + return_value=fake_time_entry) + + invalid_time_entry_input = valid_time_entry_input.copy() + invalid_time_entry_input.update({ + "end_date": str(yesterday.isoformat()) + }) + response = client.post("/time-entries", json=invalid_time_entry_input, follow_redirects=True) + + assert HTTPStatus.BAD_REQUEST == response.status_code + repository_container_create_item_mock.assert_not_called() + + +def test_create_time_entry_with_end_date_in_future_should_raise_bad_request_error(client: FlaskClient, + mocker: MockFixture): + from time_tracker_api.time_entries.time_entries_namespace import time_entries_dao + repository_container_create_item_mock = mocker.patch.object(time_entries_dao.repository.container, + 'create_item', + return_value=fake_time_entry) + invalid_time_entry_input = valid_time_entry_input.copy() + invalid_time_entry_input.update({ + "end_date": str(fake.future_datetime().isoformat()) + }) + response = client.post("/time-entries", json=invalid_time_entry_input, follow_redirects=True) + + assert HTTPStatus.BAD_REQUEST == response.status_code + repository_container_create_item_mock.assert_not_called() + + def test_create_time_entry_should_succeed_with_valid_request(client: FlaskClient, mocker: MockFixture): from time_tracker_api.time_entries.time_entries_namespace import time_entries_dao repository_create_mock = mocker.patch.object(time_entries_dao.repository, @@ -30,7 +70,7 @@ def test_create_time_entry_should_succeed_with_valid_request(client: FlaskClient response = client.post("/time-entries", json=valid_time_entry_input, follow_redirects=True) assert HTTPStatus.CREATED == response.status_code - repository_create_mock.assert_called_once_with(valid_time_entry_input) + repository_create_mock.assert_called_once() def test_create_time_entry_should_reject_bad_request(client: FlaskClient, mocker: MockFixture): @@ -64,16 +104,16 @@ def test_list_all_time_entries(client: FlaskClient, mocker: MockFixture): def test_get_time_entry_should_succeed_with_valid_id(client: FlaskClient, mocker: MockFixture): from time_tracker_api.time_entries.time_entries_namespace import time_entries_dao - valid_id = fake.random_int(1, 9999) repository_find_mock = mocker.patch.object(time_entries_dao.repository, 'find', return_value=fake_time_entry) + valid_id = fake.random_int(1, 9999) response = client.get("/time-entries/%s" % valid_id, follow_redirects=True) assert HTTPStatus.OK == response.status_code fake_time_entry == json.loads(response.data) - repository_find_mock.assert_called_once_with(str(valid_id)) + repository_find_mock.assert_called_once_with(str(valid_id), partition_key_value=current_user_tenant_id()) def test_get_time_entry_should_response_with_unprocessable_entity_for_invalid_id_format(client: FlaskClient, @@ -90,13 +130,13 @@ def test_get_time_entry_should_response_with_unprocessable_entity_for_invalid_id response = client.get("/time-entries/%s" % invalid_id, follow_redirects=True) assert HTTPStatus.UNPROCESSABLE_ENTITY == response.status_code - repository_find_mock.assert_called_once_with(str(invalid_id)) + repository_find_mock.assert_called_once_with(str(invalid_id), partition_key_value=current_user_tenant_id()) def test_update_time_entry_should_succeed_with_valid_data(client: FlaskClient, mocker: MockFixture): from time_tracker_api.time_entries.time_entries_namespace import time_entries_dao repository_update_mock = mocker.patch.object(time_entries_dao.repository, - 'update', + 'partial_update', return_value=fake_time_entry) valid_id = fake.random_int(1, 9999) @@ -106,7 +146,9 @@ def test_update_time_entry_should_succeed_with_valid_data(client: FlaskClient, m assert HTTPStatus.OK == response.status_code fake_time_entry == json.loads(response.data) - repository_update_mock.assert_called_once_with(str(valid_id), valid_time_entry_input) + repository_update_mock.assert_called_once_with(str(valid_id), + changes=valid_time_entry_input, + partition_key_value=current_user_tenant_id()) def test_update_time_entry_should_reject_bad_request(client: FlaskClient, mocker: MockFixture): @@ -132,7 +174,7 @@ def test_update_time_entry_should_return_not_found_with_invalid_id(client: Flask from time_tracker_api.time_entries.time_entries_namespace import time_entries_dao from werkzeug.exceptions import NotFound repository_update_mock = mocker.patch.object(time_entries_dao.repository, - 'update', + 'partial_update', side_effect=NotFound) invalid_id = fake.random_int(1, 9999) @@ -141,13 +183,15 @@ def test_update_time_entry_should_return_not_found_with_invalid_id(client: Flask follow_redirects=True) assert HTTPStatus.NOT_FOUND == response.status_code - repository_update_mock.assert_called_once_with(str(invalid_id), valid_time_entry_input) + repository_update_mock.assert_called_once_with(str(invalid_id), + changes=valid_time_entry_input, + partition_key_value=current_user_tenant_id()) def test_delete_time_entry_should_succeed_with_valid_id(client: FlaskClient, mocker: MockFixture): from time_tracker_api.time_entries.time_entries_namespace import time_entries_dao repository_remove_mock = mocker.patch.object(time_entries_dao.repository, - 'remove', + 'delete', return_value=None) valid_id = fake.random_int(1, 9999) @@ -155,7 +199,8 @@ def test_delete_time_entry_should_succeed_with_valid_id(client: FlaskClient, moc assert HTTPStatus.NO_CONTENT == response.status_code assert b'' == response.data - repository_remove_mock.assert_called_once_with(str(valid_id)) + repository_remove_mock.assert_called_once_with(str(valid_id), + partition_key_value=current_user_tenant_id()) def test_delete_time_entry_should_return_not_found_with_invalid_id(client: FlaskClient, @@ -163,14 +208,15 @@ def test_delete_time_entry_should_return_not_found_with_invalid_id(client: Flask from time_tracker_api.time_entries.time_entries_namespace import time_entries_dao from werkzeug.exceptions import NotFound repository_remove_mock = mocker.patch.object(time_entries_dao.repository, - 'remove', + 'delete', side_effect=NotFound) invalid_id = fake.random_int(1, 9999) response = client.delete("/time-entries/%s" % invalid_id, follow_redirects=True) assert HTTPStatus.NOT_FOUND == response.status_code - repository_remove_mock.assert_called_once_with(str(invalid_id)) + repository_remove_mock.assert_called_once_with(str(invalid_id), + partition_key_value=current_user_tenant_id()) def test_delete_time_entry_should_return_unprocessable_entity_for_invalid_id_format(client: FlaskClient, @@ -178,73 +224,74 @@ def test_delete_time_entry_should_return_unprocessable_entity_for_invalid_id_for from time_tracker_api.time_entries.time_entries_namespace import time_entries_dao from werkzeug.exceptions import UnprocessableEntity repository_remove_mock = mocker.patch.object(time_entries_dao.repository, - 'remove', + 'delete', side_effect=UnprocessableEntity) invalid_id = fake.word() response = client.delete("/time-entries/%s" % invalid_id, follow_redirects=True) assert HTTPStatus.UNPROCESSABLE_ENTITY == response.status_code - repository_remove_mock.assert_called_once_with(str(invalid_id)) + repository_remove_mock.assert_called_once_with(str(invalid_id), + partition_key_value=current_user_tenant_id()) def test_stop_time_entry_with_valid_id(client: FlaskClient, mocker: MockFixture): from time_tracker_api.time_entries.time_entries_namespace import time_entries_dao repository_update_mock = mocker.patch.object(time_entries_dao.repository, - 'update', + 'partial_update', return_value=fake_time_entry) valid_id = fake.random_int(1, 9999) response = client.post("/time-entries/%s/stop" % valid_id, follow_redirects=True) assert HTTPStatus.OK == response.status_code - repository_update_mock.assert_called_once_with(str(valid_id), { - "end_date": mocker.ANY - }) + repository_update_mock.assert_called_once_with(str(valid_id), + changes={"end_date": mocker.ANY}, + partition_key_value=current_user_tenant_id()) -def test_stop_time_entry_with_invalid_id(client: FlaskClient, mocker: MockFixture): +def test_stop_time_entry_with_id_with_invalid_format(client: FlaskClient, mocker: MockFixture): from time_tracker_api.time_entries.time_entries_namespace import time_entries_dao from werkzeug.exceptions import UnprocessableEntity repository_update_mock = mocker.patch.object(time_entries_dao.repository, - 'update', + 'partial_update', side_effect=UnprocessableEntity) invalid_id = fake.word() response = client.post("/time-entries/%s/stop" % invalid_id, follow_redirects=True) assert HTTPStatus.UNPROCESSABLE_ENTITY == response.status_code - repository_update_mock.assert_called_once_with(invalid_id, { - "end_date": mocker.ANY - }) + repository_update_mock.assert_called_once_with(invalid_id, + changes={"end_date": ANY}, + partition_key_value=current_user_tenant_id()) def test_restart_time_entry_with_valid_id(client: FlaskClient, mocker: MockFixture): from time_tracker_api.time_entries.time_entries_namespace import time_entries_dao repository_update_mock = mocker.patch.object(time_entries_dao.repository, - 'update', + 'partial_update', return_value=fake_time_entry) valid_id = fake.random_int(1, 9999) response = client.post("/time-entries/%s/restart" % valid_id, follow_redirects=True) assert HTTPStatus.OK == response.status_code - repository_update_mock.assert_called_once_with(str(valid_id), { - "end_date": None - }) + repository_update_mock.assert_called_once_with(str(valid_id), + changes={"end_date": None}, + partition_key_value=current_user_tenant_id()) -def test_restart_time_entry_with_invalid_id(client: FlaskClient, mocker: MockFixture): +def test_restart_time_entry_with_id_with_invalid_format(client: FlaskClient, mocker: MockFixture): from time_tracker_api.time_entries.time_entries_namespace import time_entries_dao from werkzeug.exceptions import UnprocessableEntity repository_update_mock = mocker.patch.object(time_entries_dao.repository, - 'update', + 'partial_update', side_effect=UnprocessableEntity) invalid_id = fake.word() response = client.post("/time-entries/%s/restart" % invalid_id, follow_redirects=True) assert HTTPStatus.UNPROCESSABLE_ENTITY == response.status_code - repository_update_mock.assert_called_once_with(invalid_id, { - "end_date": None - }) + repository_update_mock.assert_called_once_with(invalid_id, + changes={"end_date": None}, + partition_key_value=current_user_tenant_id()) diff --git a/time_tracker_api/api.py b/time_tracker_api/api.py index 339d54c1..b8b026ce 100644 --- a/time_tracker_api/api.py +++ b/time_tracker_api/api.py @@ -4,6 +4,7 @@ from flask_restplus import Api, fields from flask_restplus._http import HTTPStatus +from commons.data_access_layer.cosmos_db import CustomError from time_tracker_api import __version__ faker = Faker() @@ -52,12 +53,12 @@ @api.errorhandler(CosmosResourceExistsError) def handle_cosmos_resource_exists_error(error): - return {'message': 'This item already exists'}, HTTPStatus.CONFLICT + return {'message': 'It already exists'}, HTTPStatus.CONFLICT @api.errorhandler(CosmosResourceNotFoundError) def handle_cosmos_resource_not_found_error(error): - return {'message': 'This item was not found'}, HTTPStatus.NOT_FOUND + return {'message': 'It was not found'}, HTTPStatus.NOT_FOUND @api.errorhandler(CosmosHttpResponseError) @@ -65,6 +66,16 @@ def handle_cosmos_http_response_error(error): return {'message': 'Invalid request. Please verify your data.'}, HTTPStatus.BAD_REQUEST +@api.errorhandler(AttributeError) +def handle_attribute_error(error): + return {'message': "There are missing attributes"}, HTTPStatus.UNPROCESSABLE_ENTITY + + +@api.errorhandler(CustomError) +def handle_custom_error(error): + return {'message': error.description}, error.code + + @api.errorhandler def default_error_handler(error): app.logger.error(error) diff --git a/time_tracker_api/time_entries/time_entries_model.py b/time_tracker_api/time_entries/time_entries_model.py index 9cdb9024..daa780fe 100644 --- a/time_tracker_api/time_entries/time_entries_model.py +++ b/time_tracker_api/time_entries/time_entries_model.py @@ -1,53 +1,20 @@ +import abc +from dataclasses import dataclass, field +from typing import List, Callable + from azure.cosmos import PartitionKey -from sqlalchemy_utils import ScalarListType +from flask_restplus._http import HTTPStatus +from commons.data_access_layer.cosmos_db import CosmosDBDao, CosmosDBRepository, CustomError, CosmosDBModel, \ + current_datetime, datetime_str from commons.data_access_layer.database import CRUDDao +from time_tracker_api.security import current_user_tenant_id class TimeEntriesDao(CRUDDao): - pass - - -def create_dao() -> TimeEntriesDao: - from commons.data_access_layer.sql import db - from commons.data_access_layer.database import COMMENTS_MAX_LENGTH - from sqlalchemy_utils import UUIDType - import uuid - from commons.data_access_layer.sql import SQLCRUDDao - - class TimeEntrySQLModel(db.Model): - __tablename__ = 'time_entry' - id = db.Column(UUIDType(binary=False), primary_key=True, default=uuid.uuid4) - description = db.Column(db.String(COMMENTS_MAX_LENGTH)) - start_date = db.Column(db.DateTime, server_default=db.func.now()) - end_date = db.Column(db.DateTime) - project_id = db.Column(UUIDType(binary=False), - db.ForeignKey('project.id'), - nullable=False) - activity_id = db.Column(UUIDType(binary=False), - db.ForeignKey('activity.id'), - nullable=False) - technologies = db.Column(ScalarListType()) - uri = db.Column(db.String(500)) - owner_id = db.Column(UUIDType(binary=False), default=uuid.uuid4) - deleted = db.Column(UUIDType(binary=False), default=uuid.uuid4) - tenant_id = db.Column(UUIDType(binary=False), default=uuid.uuid4) - - @property - def running(self): - return self.end_date is None - - def __repr__(self): - return '