From 80c256ae554614ff1b13ed606b1e4598da2eed9d Mon Sep 17 00:00:00 2001 From: Jipson Murillo <38593785+Jobzi@users.noreply.github.com> Date: Sat, 13 Nov 2021 14:57:16 -0500 Subject: [PATCH 01/33] test: TT-384 revert to origin get_all (#345) --- time_tracker_api/activities/activities_model.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/time_tracker_api/activities/activities_model.py b/time_tracker_api/activities/activities_model.py index ddb46411..158c8053 100644 --- a/time_tracker_api/activities/activities_model.py +++ b/time_tracker_api/activities/activities_model.py @@ -143,7 +143,7 @@ def get_all_with_id_in_list( activity_ids, ) - def get_all_v1( + def get_all( self, conditions: dict = None, activities_id: List = None, @@ -162,7 +162,7 @@ def get_all_v1( ) return activities - def get_all(self, conditions: dict = None) -> list: + def get_all_test(self, conditions: dict = None) -> list: event_ctx = self.create_event_context("read-many") activities = self.repository.find_all_from_blob_storage(event_context=event_ctx) return activities From 80f4ed136b81c14f4265384bdd888bff2b3c6206 Mon Sep 17 00:00:00 2001 From: Jipson Murillo <38593785+Jobzi@users.noreply.github.com> Date: Mon, 15 Nov 2021 09:10:10 -0500 Subject: [PATCH 02/33] test: TT-384 get all activities from blob storage (#348) --- time_tracker_api/activities/activities_model.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/time_tracker_api/activities/activities_model.py b/time_tracker_api/activities/activities_model.py index 158c8053..ddb46411 100644 --- a/time_tracker_api/activities/activities_model.py +++ b/time_tracker_api/activities/activities_model.py @@ -143,7 +143,7 @@ def get_all_with_id_in_list( activity_ids, ) - def get_all( + def get_all_v1( self, conditions: dict = None, activities_id: List = None, @@ -162,7 +162,7 @@ def get_all( ) return activities - def get_all_test(self, conditions: dict = None) -> list: + def get_all(self, conditions: dict = None) -> list: event_ctx = self.create_event_context("read-many") activities = self.repository.find_all_from_blob_storage(event_context=event_ctx) return activities From 3a99add39a3130c540d86b02c5a69dbda8536e8e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Rafael=20Arcos=20G=C3=B3mez?= <37599693+ararcos@users.noreply.github.com> Date: Mon, 15 Nov 2021 09:53:25 -0500 Subject: [PATCH 03/33] feat: TT-357 Create V2 Activities Azure DAO (#334) * feat: TT-357 Change Json Implementation for SQL * fix: TT-357 Resolution of comments * fix: TT-357 Update requirements * Refactor: TT-357 correction of FlakeV8 * fix: TT-357 change of an environment variable to a constant * refactor: TT-357 Refactor update and create activity Co-authored-by: Daniela Garcia --- V2/.flake8 | 2 +- V2/Makefile | 3 +- V2/create_activity/function.json | 22 +++ V2/delete_activity/function.json | 22 +++ V2/docker-compose.yml | 10 ++ V2/get_activities/function.json | 22 +++ V2/requirements.txt | 6 +- V2/tests/api/api_fixtures.py | 41 ----- .../azure/activity_azure_endpoints_test.py | 129 +++++++++------ V2/tests/conftest.py | 2 +- V2/tests/fixtures.py | 35 ++++ .../daos/activities_json_dao_test.py | 152 ------------------ .../daos/activities_sql_dao_test.py | 138 ++++++++++++++++ .../unit/services/activity_service_test.py | 8 +- .../use_cases/activities_use_case_test.py | 30 ++-- V2/time_tracker/_infrastructure/__init__.py | 3 + V2/time_tracker/_infrastructure/_config.py | 20 +++ V2/time_tracker/_infrastructure/_db.py | 20 +++ .../_activities/_create_activity.py | 12 +- .../_activities/_delete_activity.py | 49 +++--- .../_activities/_get_activities.py | 59 +++---- .../_activities/_update_activity.py | 70 ++++---- .../activities/_domain/_entities/_activity.py | 7 +- .../_persistence_contracts/_activities_dao.py | 8 +- .../activities/_domain/_services/_activity.py | 12 +- .../_use_cases/_create_activity_use_case.py | 4 +- .../_use_cases/_delete_activity_use_case.py | 2 +- .../_get_activity_by_id_use_case.py | 2 +- .../_use_cases/_update_activity_use_case.py | 4 +- .../activities/_infrastructure/__init__.py | 2 +- .../_data_persistence/__init__.py | 2 +- .../_data_persistence/_activities_json_dao.py | 105 ------------ .../_data_persistence/_activities_sql_dao.py | 67 ++++++++ .../_data_persistence/activities_data.json | 65 -------- V2/time_tracker/activities/interface.py | 2 +- V2/update_activity/function.json | 22 +++ 36 files changed, 617 insertions(+), 542 deletions(-) create mode 100644 V2/create_activity/function.json create mode 100644 V2/delete_activity/function.json create mode 100644 V2/docker-compose.yml create mode 100644 V2/get_activities/function.json delete mode 100644 V2/tests/api/api_fixtures.py create mode 100644 V2/tests/fixtures.py delete mode 100644 V2/tests/integration/daos/activities_json_dao_test.py create mode 100644 V2/tests/integration/daos/activities_sql_dao_test.py create mode 100644 V2/time_tracker/_infrastructure/__init__.py create mode 100644 V2/time_tracker/_infrastructure/_config.py create mode 100644 V2/time_tracker/_infrastructure/_db.py delete mode 100644 V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_json_dao.py create mode 100644 V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_sql_dao.py delete mode 100644 V2/time_tracker/activities/_infrastructure/_data_persistence/activities_data.json create mode 100644 V2/update_activity/function.json diff --git a/V2/.flake8 b/V2/.flake8 index cb282cae..ecba83ba 100644 --- a/V2/.flake8 +++ b/V2/.flake8 @@ -1,4 +1,4 @@ [flake8] -exclude = .git,__pycache__,./node_modules, +exclude = .git,__pycache__,./node_modules,.venv max-complexity = 10 max_line_length = 120 \ No newline at end of file diff --git a/V2/Makefile b/V2/Makefile index 9a0956ba..45080238 100644 --- a/V2/Makefile +++ b/V2/Makefile @@ -4,4 +4,5 @@ install: pip install --upgrade pip pip install -r requirements.txt @echo "Completed! " - +start-local: + docker compose up \ No newline at end of file diff --git a/V2/create_activity/function.json b/V2/create_activity/function.json new file mode 100644 index 00000000..ed3454a9 --- /dev/null +++ b/V2/create_activity/function.json @@ -0,0 +1,22 @@ +{ + "disabled": false, + "bindings": [ + { + "type": "httpTrigger", + "direction": "in", + "name": "req", + "route": "activities/", + "authLevel": "anonymous", + "methods": [ + "POST" + ] + }, + { + "type": "http", + "direction": "out", + "name": "$return" + } + ], + "entryPoint": "create_activity", + "scriptFile": "../time_tracker/activities/interface.py" +} \ No newline at end of file diff --git a/V2/delete_activity/function.json b/V2/delete_activity/function.json new file mode 100644 index 00000000..d51170fd --- /dev/null +++ b/V2/delete_activity/function.json @@ -0,0 +1,22 @@ +{ + "disabled": false, + "bindings": [ + { + "type": "httpTrigger", + "direction": "in", + "name": "req", + "route": "activities/{id}", + "authLevel": "anonymous", + "methods": [ + "DELETE" + ] + }, + { + "type": "http", + "direction": "out", + "name": "$return" + } + ], + "entryPoint": "delete_activity", + "scriptFile": "../time_tracker/activities/interface.py" +} \ No newline at end of file diff --git a/V2/docker-compose.yml b/V2/docker-compose.yml new file mode 100644 index 00000000..a89f5250 --- /dev/null +++ b/V2/docker-compose.yml @@ -0,0 +1,10 @@ +version: '3.9' +services: + database: + image: postgres:14 + ports: + - "5433:5432" + environment: + - POSTGRES_USER=${DB_USER} + - POSTGRES_PASSWORD=${DB_PASS} + - POSTGRES_DB=${DB_NAME} \ No newline at end of file diff --git a/V2/get_activities/function.json b/V2/get_activities/function.json new file mode 100644 index 00000000..ee1efe53 --- /dev/null +++ b/V2/get_activities/function.json @@ -0,0 +1,22 @@ +{ + "disabled": false, + "bindings": [ + { + "type": "httpTrigger", + "direction": "in", + "name": "req", + "route": "activities/{id:?}", + "authLevel": "anonymous", + "methods": [ + "GET" + ] + }, + { + "type": "http", + "direction": "out", + "name": "$return" + } + ], + "entryPoint": "get_activities", + "scriptFile": "../time_tracker/activities/interface.py" +} \ No newline at end of file diff --git a/V2/requirements.txt b/V2/requirements.txt index c651bb35..8be0a2a8 100644 --- a/V2/requirements.txt +++ b/V2/requirements.txt @@ -10,4 +10,8 @@ flake8==4.0.1 pytest-mock # To create sample content in tests and API documentation -Faker==4.0.2 \ No newline at end of file +Faker==4.0.2 + +#SQL ALCHEMY +SQLAlchemy==1.4.24 +psycopg2==2.9.1 \ No newline at end of file diff --git a/V2/tests/api/api_fixtures.py b/V2/tests/api/api_fixtures.py deleted file mode 100644 index 21b58021..00000000 --- a/V2/tests/api/api_fixtures.py +++ /dev/null @@ -1,41 +0,0 @@ -import json -import pytest -import shutil - - -@pytest.fixture -def create_temp_activities(tmpdir_factory): - temporary_directory = tmpdir_factory.mktemp("tmp") - json_file = temporary_directory.join("activities.json") - activities = [ - { - 'id': 'c61a4a49-3364-49a3-a7f7-0c5f2d15072b', - 'name': 'Development', - 'description': 'Development', - 'deleted': 'b4327ba6-9f96-49ee-a9ac-3c1edf525172', - 'status': 'active', - 'tenant_id': 'cc925a5d-9644-4a4f-8d99-0bee49aadd05', - }, - { - 'id': '94ec92e2-a500-4700-a9f6-e41eb7b5507c', - 'name': 'Management', - 'description': 'Description of management', - 'deleted': '7cf6efe5-a221-4fe4-b94f-8945127a489a', - 'status': 'active', - 'tenant_id': 'cc925a5d-9644-4a4f-8d99-0bee49aadd05', - }, - { - 'id': 'd45c770a-b1a0-4bd8-a713-22c01a23e41b', - 'name': 'Operations', - 'description': 'Operation activities performed.', - 'deleted': '7cf6efe5-a221-4fe4-b94f-8945127a489a', - 'status': 'active', - 'tenant_id': 'cc925a5d-9644-4a4f-8d99-0bee49aadd05', - }, - ] - - with open(json_file, 'w') as outfile: - json.dump(activities, outfile) - - yield activities, json_file - shutil.rmtree(temporary_directory) diff --git a/V2/tests/api/azure/activity_azure_endpoints_test.py b/V2/tests/api/azure/activity_azure_endpoints_test.py index e3bf4ffe..9b2618a8 100644 --- a/V2/tests/api/azure/activity_azure_endpoints_test.py +++ b/V2/tests/api/azure/activity_azure_endpoints_test.py @@ -1,108 +1,135 @@ -from time_tracker.activities._application import _activities as activities +import pytest +import json from faker import Faker import azure.functions as func -import json +import time_tracker.activities._application._activities as azure_activities +import time_tracker.activities._infrastructure as infrastructure +from time_tracker._infrastructure import DB +from time_tracker.activities import _domain + +ACTIVITY_URL = '/api/activities/' -ACTIVITY_URL = "/api/activities/" + +@pytest.fixture(name='insert_activity') +def _insert_activity() -> dict: + def _new_activity(activity: _domain.Activity, database: DB): + dao = infrastructure.ActivitiesSQLDao(database) + new_activity = dao.create(activity) + return new_activity.__dict__ + return _new_activity def test__activity_azure_endpoint__returns_all_activities( - create_temp_activities, + create_fake_database, activity_factory, insert_activity ): - activities_json, tmp_directory = create_temp_activities - activities._get_activities.JSON_PATH = tmp_directory - req = func.HttpRequest(method="GET", body=None, url=ACTIVITY_URL) - - response = activities.get_activities(req) + fake_database = create_fake_database + existent_activities = [activity_factory(), activity_factory()] + inserted_activities = [ + insert_activity(existent_activities[0], fake_database), + insert_activity(existent_activities[1], fake_database) + ] + + azure_activities._get_activities.DATABASE = fake_database + req = func.HttpRequest(method='GET', body=None, url=ACTIVITY_URL) + response = azure_activities._get_activities.get_activities(req) activities_json_data = response.get_body().decode("utf-8") assert response.status_code == 200 - assert activities_json_data == json.dumps(activities_json) + assert activities_json_data == json.dumps(inserted_activities) def test__activity_azure_endpoint__returns_an_activity__when_activity_matches_its_id( - create_temp_activities, + create_fake_database, activity_factory, insert_activity ): - activities_json, tmp_directory = create_temp_activities - activities._get_activities.JSON_PATH = tmp_directory + fake_database = create_fake_database + existent_activity = activity_factory() + inserted_activity = insert_activity(existent_activity, fake_database) + + azure_activities._get_activities.DATABASE = fake_database req = func.HttpRequest( - method="GET", + method='GET', body=None, url=ACTIVITY_URL, - route_params={"id": activities_json[0]["id"]}, + route_params={"id": inserted_activity["id"]}, ) - response = activities.get_activities(req) + response = azure_activities._get_activities.get_activities(req) activitiy_json_data = response.get_body().decode("utf-8") assert response.status_code == 200 - assert activitiy_json_data == json.dumps(activities_json[0]) + assert activitiy_json_data == json.dumps(inserted_activity) def test__activity_azure_endpoint__returns_an_activity_with_inactive_status__when_an_activity_matching_its_id_is_found( - create_temp_activities, + create_fake_database, activity_factory, insert_activity ): - activities_json, tmp_directory = create_temp_activities - activities._delete_activity.JSON_PATH = tmp_directory + fake_database = create_fake_database + existent_activity = activity_factory() + inserted_activity = insert_activity(existent_activity, fake_database) + + azure_activities._delete_activity.DATABASE = fake_database req = func.HttpRequest( - method="DELETE", + method='DELETE', body=None, url=ACTIVITY_URL, - route_params={"id": activities_json[0]["id"]}, + route_params={"id": inserted_activity["id"]}, ) - response = activities.delete_activity(req) + response = azure_activities._delete_activity.delete_activity(req) activity_json_data = json.loads(response.get_body().decode("utf-8")) assert response.status_code == 200 - assert activity_json_data["status"] == "inactive" + assert activity_json_data['status'] == 0 + assert activity_json_data['deleted'] is True def test__update_activity_azure_endpoint__returns_an_activity__when_found_an_activity_to_update( - create_temp_activities, + create_fake_database, activity_factory, insert_activity ): - activities_json, tmp_directory = create_temp_activities - activities._update_activity.JSON_PATH = tmp_directory - activity_data = {"description": Faker().sentence()} + fake_database = create_fake_database + existent_activity = activity_factory() + inserted_activity = insert_activity(existent_activity, fake_database) + + azure_activities._update_activity.DATABASE = fake_database + activity_body = {"description": Faker().sentence()} req = func.HttpRequest( - method="PUT", - body=json.dumps(activity_data).encode("utf-8"), + method='PUT', + body=json.dumps(activity_body).encode("utf-8"), url=ACTIVITY_URL, - route_params={"id": activities_json[0]["id"]}, + route_params={"id": inserted_activity["id"]}, ) - response = activities.update_activity(req) + response = azure_activities._update_activity.update_activity(req) activitiy_json_data = response.get_body().decode("utf-8") - new_activity = {**activities_json[0], **activity_data} + inserted_activity.update(activity_body) assert response.status_code == 200 - assert activitiy_json_data == json.dumps(new_activity) + assert activitiy_json_data == json.dumps(inserted_activity) def test__activity_azure_endpoint__creates_an_activity__when_activity_has_all_attributes( - create_temp_activities, -): - activities_json, tmp_directory = create_temp_activities - activities._create_activity._JSON_PATH = tmp_directory - + create_fake_database, + ): + azure_activities._create_activity.DATABASE = create_fake_database activity_body = { - "id": None, - "name": Faker().user_name(), - "description": Faker().sentence(), - "deleted": Faker().uuid4(), - "status": "active", - "tenant_id": Faker().uuid4(), + 'id': None, + 'name': Faker().user_name(), + 'description': Faker().sentence(), + 'deleted': False, + 'status': 1 } body = json.dumps(activity_body).encode("utf-8") req = func.HttpRequest( - method="POST", - body=body, - url=ACTIVITY_URL, + method='POST', + body=body, + url=ACTIVITY_URL, ) - response = activities.create_activity(req) - activitiy_json_data = response.get_body() + response = azure_activities._create_activity.create_activity(req) + activitiy_json_data = json.loads(response.get_body()) + activity_body['id'] = activitiy_json_data['id'] + assert response.status_code == 201 - assert activitiy_json_data == body + assert activitiy_json_data == activity_body diff --git a/V2/tests/conftest.py b/V2/tests/conftest.py index 2741ce95..d1c4928f 100644 --- a/V2/tests/conftest.py +++ b/V2/tests/conftest.py @@ -1,2 +1,2 @@ # flake8: noqa -from tests.api.api_fixtures import create_temp_activities +from fixtures import _activity_factory, _create_fake_dao, _create_fake_database \ No newline at end of file diff --git a/V2/tests/fixtures.py b/V2/tests/fixtures.py new file mode 100644 index 00000000..d9539035 --- /dev/null +++ b/V2/tests/fixtures.py @@ -0,0 +1,35 @@ +import pytest + +import time_tracker.activities._domain as domain +import time_tracker.activities._infrastructure as infrastructure +from time_tracker._infrastructure import DB +from faker import Faker + + +@pytest.fixture(name='activity_factory') +def _activity_factory() -> domain.Activity: + def _make_activity( + name: str = Faker().name(), description: str = Faker().sentence(), deleted: bool = False, status: int = 1 + ): + activity = domain.Activity( + id=None, + name=name, + description=description, + deleted=deleted, + status=status + ) + return activity + return _make_activity + + +@pytest.fixture(name='create_fake_dao') +def _create_fake_dao() -> domain.ActivitiesDao: + db_fake = DB('sqlite:///:memory:') + dao = infrastructure.ActivitiesSQLDao(db_fake) + return dao + + +@pytest.fixture(name='create_fake_database') +def _create_fake_database() -> domain.ActivitiesDao: + db_fake = DB('sqlite:///:memory:') + return db_fake diff --git a/V2/tests/integration/daos/activities_json_dao_test.py b/V2/tests/integration/daos/activities_json_dao_test.py deleted file mode 100644 index 8eff9609..00000000 --- a/V2/tests/integration/daos/activities_json_dao_test.py +++ /dev/null @@ -1,152 +0,0 @@ -from time_tracker.activities._infrastructure import ActivitiesJsonDao -from time_tracker.activities._domain import Activity -from faker import Faker -import json -import pytest -import typing - - -fake_activities = [ - { - "id": Faker().uuid4(), - "name": Faker().user_name(), - "description": Faker().sentence(), - "deleted": Faker().uuid4(), - "status": "active", - "tenant_id": Faker().uuid4(), - } -] - - -@pytest.fixture(name="create_fake_activities") -def _create_fake_activities(mocker) -> typing.List[Activity]: - def _creator(activities): - read_data = json.dumps(activities) - mocker.patch("builtins.open", mocker.mock_open(read_data=read_data)) - return [Activity(**activity) for activity in activities] - - return _creator - - -def test_get_by_id__returns_an_activity_dto__when_found_one_activity_that_matches_its_id( - create_fake_activities, -): - activities_json_dao = ActivitiesJsonDao(Faker().file_path()) - activities = create_fake_activities(fake_activities) - activity_dto = activities.pop() - - result = activities_json_dao.get_by_id(activity_dto.id) - - assert result == activity_dto - - -def test__get_by_id__returns_none__when_no_activity_matches_its_id( - create_fake_activities, -): - activities_json_dao = ActivitiesJsonDao(Faker().file_path()) - create_fake_activities([]) - - result = activities_json_dao.get_by_id(Faker().uuid4()) - - assert result is None - - -def test__get_all__returns_a_list_of_activity_dto_objects__when_one_or_more_activities_are_found( - create_fake_activities, -): - activities_json_dao = ActivitiesJsonDao(Faker().file_path()) - number_of_activities = 3 - activities = create_fake_activities(fake_activities * number_of_activities) - - result = activities_json_dao.get_all() - - assert result == activities - - -def test_get_all__returns_an_empty_list__when_doesnt_found_any_activities( - create_fake_activities, -): - activities_json_dao = ActivitiesJsonDao(Faker().file_path()) - activities = create_fake_activities([]) - - result = activities_json_dao.get_all() - - assert result == activities - - -def test_delete__returns_an_activity_with_inactive_status__when_an_activity_matching_its_id_is_found( - create_fake_activities, -): - activities_json_dao = ActivitiesJsonDao(Faker().file_path()) - activities = create_fake_activities( - [ - { - "name": "test_name", - "description": "test_description", - "tenant_id": "test_tenant_id", - "id": "test_id", - "deleted": "test_deleted", - "status": "test_status", - } - ] - ) - - activity_dto = activities.pop() - result = activities_json_dao.delete(activity_dto.id) - - assert result.status == "inactive" - - -def test_delete__returns_none__when_no_activity_matching_its_id_is_found( - create_fake_activities, -): - activities_json_dao = ActivitiesJsonDao(Faker().file_path()) - create_fake_activities([]) - - result = activities_json_dao.delete(Faker().uuid4()) - - assert result is None - - -def test_update__returns_an_activity_dto__when_found_one_activity_to_update( - create_fake_activities, -): - activities_json_dao = ActivitiesJsonDao(Faker().file_path()) - activities = create_fake_activities(fake_activities) - activity_dto = activities.pop() - activity_data = {"description": Faker().sentence()} - - result = activities_json_dao.update(activity_dto.id, activity_data) - new_activity = {**activity_dto.__dict__, **activity_data} - - assert result == Activity(**new_activity) - - -def test_update__returns_none__when_doesnt_found_one_activity_to_update( - create_fake_activities, -): - activities_json_dao = ActivitiesJsonDao(Faker().file_path()) - create_fake_activities([]) - activity_data = {"description": Faker().sentence()} - - result = activities_json_dao.update("", activity_data) - - assert result is None - - -def test_create_activity__returns_an_activity_dto__when_create_an_activity_that_matches_attributes( - create_fake_activities, -): - create_fake_activities([]) - - activities_json_dao = ActivitiesJsonDao(Faker().file_path()) - activity_data = { - "name": "test_name", - "description": "test_description", - "tenant_id": "test_tenant_id", - "id": "test_id", - "deleted": "test_deleted", - "status": "test_status", - } - result = activities_json_dao.create_activity(activity_data) - assert result == Activity(**activity_data) diff --git a/V2/tests/integration/daos/activities_sql_dao_test.py b/V2/tests/integration/daos/activities_sql_dao_test.py new file mode 100644 index 00000000..25f62500 --- /dev/null +++ b/V2/tests/integration/daos/activities_sql_dao_test.py @@ -0,0 +1,138 @@ +import pytest +import typing +from faker import Faker + +import time_tracker.activities._domain as domain +import time_tracker.activities._infrastructure as infrastructure +from time_tracker._infrastructure import DB + + +@pytest.fixture(name='insert_activity') +def _insert_activity() -> domain.Activity: + def _new_activity(activity: domain.Activity, dao: domain.ActivitiesDao): + new_activity = dao.create(activity) + return new_activity + return _new_activity + + +@pytest.fixture(name='clean_database', autouse=True) +def _clean_database(): + yield + db_fake = DB('sqlite:///:memory:') + dao = infrastructure.ActivitiesSQLDao(db_fake) + query = dao.activity.delete() + dao.db.get_session().execute(query) + + +def test__create_activity__returns_a_activity_dto__when_saves_correctly_with_sql_database( + create_fake_dao, activity_factory +): + dao = create_fake_dao + existent_activity = activity_factory() + + inserted_activity = dao.create(existent_activity) + + assert isinstance(inserted_activity, domain.Activity) + assert inserted_activity == existent_activity + + +def test_update__returns_an_update_activity__when_an_activity_matching_its_id_is_found_with_sql_database( + create_fake_dao, activity_factory, insert_activity +): + dao = create_fake_dao + existent_activity = activity_factory() + inserted_activity = insert_activity(existent_activity, dao) + + expected_description = Faker().sentence() + updated_activity = dao.update(inserted_activity.id, None, expected_description, None, None) + + assert isinstance(updated_activity, domain.Activity) + assert updated_activity.id == inserted_activity.id + assert updated_activity.description == expected_description + + +def test_update__returns_none__when_no_activity_matching_its_id_is_found_with_sql_database( + create_fake_dao, activity_factory +): + dao = create_fake_dao + existent_activity = activity_factory() + + results = dao.update(existent_activity.id, Faker().name(), None, None, None) + + assert results is None + + +def test__get_all__returns_a_list_of_activity_dto_objects__when_one_or_more_activities_are_found_with_sql_database( + create_fake_dao, activity_factory, insert_activity +): + dao = create_fake_dao + existent_activities = [activity_factory(), activity_factory()] + inserted_activities = [ + insert_activity(existent_activities[0], dao), + insert_activity(existent_activities[1], dao) + ] + + activities = dao.get_all() + + assert isinstance(activities, typing.List) + assert activities == inserted_activities + + +def test_get_by_id__returns_an_activity_dto__when_found_one_activity_that_matches_its_id_with_sql_database( + create_fake_dao, activity_factory, insert_activity +): + dao = create_fake_dao + existent_activity = activity_factory() + inserted_activity = insert_activity(existent_activity, dao) + + activity = dao.get_by_id(inserted_activity.id) + + assert isinstance(activity, domain.Activity) + assert activity.id == inserted_activity.id + assert activity == inserted_activity + + +def test__get_by_id__returns_none__when_no_activity_matches_its_id_with_sql_database( + create_fake_dao, activity_factory +): + dao = create_fake_dao + existent_activity = activity_factory() + + activity = dao.get_by_id(existent_activity.id) + + assert activity is None + + +def test_get_all__returns_an_empty_list__when_doesnt_found_any_activities_with_sql_database( + create_fake_dao +): + activities = create_fake_dao.get_all() + + assert isinstance(activities, typing.List) + assert activities == [] + + +def test_delete__returns_an_activity_with_inactive_status__when_an_activity_matching_its_id_is_found_with_sql_database( + create_fake_dao, activity_factory, insert_activity +): + dao = create_fake_dao + existent_activity = activity_factory() + inserted_activity = insert_activity(existent_activity, dao) + + activity = dao.delete(inserted_activity.id) + + assert isinstance(activity, domain.Activity) + assert activity.id == inserted_activity.id + assert activity.status == 0 + assert activity.deleted is True + + +def test_delete__returns_none__when_no_activity_matching_its_id_is_found_with_sql_database( + create_fake_dao, activity_factory +): + dao = create_fake_dao + existent_activity = activity_factory() + + results = dao.delete(existent_activity.id) + + assert results is None diff --git a/V2/tests/unit/services/activity_service_test.py b/V2/tests/unit/services/activity_service_test.py index befdb1fb..e8816d42 100644 --- a/V2/tests/unit/services/activity_service_test.py +++ b/V2/tests/unit/services/activity_service_test.py @@ -53,7 +53,7 @@ def test__update_activity__uses_the_activity_dao__to_update_one_activity( activity_service = ActivityService(activity_dao) updated_activity = activity_service.update( - Faker().uuid4(), Faker().pydict() + Faker().uuid4(), Faker().name(), Faker().sentence(), Faker().pyint(), Faker().pybool() ) assert activity_dao.update.called @@ -63,11 +63,11 @@ def test__update_activity__uses_the_activity_dao__to_update_one_activity( def test__create_activity__uses_the_activity_dao__to_create_an_activity(mocker): expected_activity = mocker.Mock() activity_dao = mocker.Mock( - create_activity=mocker.Mock(return_value=expected_activity) + create=mocker.Mock(return_value=expected_activity) ) activity_service = ActivityService(activity_dao) - actual_activity = activity_service.create_activity(Faker().pydict()) + actual_activity = activity_service.create(Faker().pydict()) - assert activity_dao.create_activity.called + assert activity_dao.create.called assert expected_activity == actual_activity diff --git a/V2/tests/unit/use_cases/activities_use_case_test.py b/V2/tests/unit/use_cases/activities_use_case_test.py index 334c7489..ca711019 100644 --- a/V2/tests/unit/use_cases/activities_use_case_test.py +++ b/V2/tests/unit/use_cases/activities_use_case_test.py @@ -1,6 +1,7 @@ -from time_tracker.activities._domain import _use_cases -from pytest_mock import MockFixture from faker import Faker +from pytest_mock import MockFixture + +from time_tracker.activities._domain import _use_cases fake = Faker() @@ -36,17 +37,17 @@ def test__get_activity_by_id_function__uses_the_activity_service__to_retrieve_ac def test__create_activity_function__uses_the_activities_service__to_create_activity( - mocker: MockFixture, -): + mocker: MockFixture, activity_factory + ): expected_activity = mocker.Mock() activity_service = mocker.Mock( - create_activity=mocker.Mock(return_value=expected_activity) + create=mocker.Mock(return_value=expected_activity) ) activity_use_case = _use_cases.CreateActivityUseCase(activity_service) - actual_activity = activity_use_case.create_activity(fake.pydict()) + actual_activity = activity_use_case.create_activity(activity_factory()) - assert activity_service.create_activity.called + assert activity_service.create.called assert expected_activity == actual_activity @@ -54,7 +55,9 @@ def test__delete_activity_function__uses_the_activity_service__to_change_activit mocker: MockFixture, ): expected_activity = mocker.Mock() - activity_service = mocker.Mock(delete=mocker.Mock(return_value=expected_activity)) + activity_service = mocker.Mock( + delete=mocker.Mock(return_value=expected_activity) + ) activity_use_case = _use_cases.DeleteActivityUseCase(activity_service) deleted_activity = activity_use_case.delete_activity(fake.uuid4()) @@ -64,13 +67,18 @@ def test__delete_activity_function__uses_the_activity_service__to_change_activit def test__update_activity_function__uses_the_activities_service__to_update_an_activity( - mocker: MockFixture, + mocker: MockFixture, activity_factory ): expected_activity = mocker.Mock() - activity_service = mocker.Mock(update=mocker.Mock(return_value=expected_activity)) + activity_service = mocker.Mock( + update=mocker.Mock(return_value=expected_activity) + ) + new_activity = activity_factory() activity_use_case = _use_cases.UpdateActivityUseCase(activity_service) - updated_activity = activity_use_case.update_activity(fake.uuid4(), fake.pydict()) + updated_activity = activity_use_case.update_activity( + fake.uuid4(), new_activity.name, new_activity.description, new_activity.status, new_activity.deleted + ) assert activity_service.update.called assert expected_activity == updated_activity diff --git a/V2/time_tracker/_infrastructure/__init__.py b/V2/time_tracker/_infrastructure/__init__.py new file mode 100644 index 00000000..ab651958 --- /dev/null +++ b/V2/time_tracker/_infrastructure/__init__.py @@ -0,0 +1,3 @@ +# flake8: noqa +from ._db import DB +from ._config import Config diff --git a/V2/time_tracker/_infrastructure/_config.py b/V2/time_tracker/_infrastructure/_config.py new file mode 100644 index 00000000..7f8c8fa7 --- /dev/null +++ b/V2/time_tracker/_infrastructure/_config.py @@ -0,0 +1,20 @@ +import typing +import os + +CONNECTION_STRING = 'postgresql://root:root@localhost:5433/timetracker' + + +class Config(typing.NamedTuple): + DB_CONNECTION_STRING: str + DB_USER: str + DB_PASS: str + DB_NAME: str + + +def load_config(): + return Config( + CONNECTION_STRING if os.environ.get("DB_CONNECTION_STRING") is None else os.environ.get("DB_CONNECTION_STRING"), + os.environ.get("DB_USER"), + os.environ.get("DB_PASS"), + os.environ.get("DB_NAME") + ) diff --git a/V2/time_tracker/_infrastructure/_db.py b/V2/time_tracker/_infrastructure/_db.py new file mode 100644 index 00000000..8fe5cef1 --- /dev/null +++ b/V2/time_tracker/_infrastructure/_db.py @@ -0,0 +1,20 @@ +import sqlalchemy + +from . import _config + + +class DB(): + config = _config.load_config() + connection = None + engine = None + conn_string = config.DB_CONNECTION_STRING + metadata = sqlalchemy.MetaData() + + def __init__(self, conn_string: str = conn_string): + self.engine = sqlalchemy.create_engine(conn_string) + + def get_session(self): + if self.connection is None: + self.metadata.create_all(self.engine) + self.connection = self.engine.connect() + return self.connection diff --git a/V2/time_tracker/activities/_application/_activities/_create_activity.py b/V2/time_tracker/activities/_application/_activities/_create_activity.py index be53815a..94f3701d 100644 --- a/V2/time_tracker/activities/_application/_activities/_create_activity.py +++ b/V2/time_tracker/activities/_application/_activities/_create_activity.py @@ -6,14 +6,13 @@ from ... import _domain from ... import _infrastructure +from time_tracker._infrastructure import DB -_JSON_PATH = ( - 'activities/_infrastructure/_data_persistence/activities_data.json' -) +DATABASE = DB() def create_activity(req: func.HttpRequest) -> func.HttpResponse: - activity_dao = _infrastructure.ActivitiesJsonDao(_JSON_PATH) + activity_dao = _infrastructure.ActivitiesSQLDao(DATABASE) activity_service = _domain.ActivityService(activity_dao) use_case = _domain._use_cases.CreateActivityUseCase(activity_service) @@ -30,11 +29,10 @@ def create_activity(req: func.HttpRequest) -> func.HttpResponse: name=activity_data['name'], description=activity_data['description'], status=activity_data['status'], - deleted=activity_data['deleted'], - tenant_id=activity_data['tenant_id'] + deleted=activity_data['deleted'] ) - created_activity = use_case.create_activity(activity_to_create.__dict__) + created_activity = use_case.create_activity(activity_to_create) if not create_activity: return func.HttpResponse( body={'error': 'activity could not be created'}, diff --git a/V2/time_tracker/activities/_application/_activities/_delete_activity.py b/V2/time_tracker/activities/_application/_activities/_delete_activity.py index 80d55446..14ada8ab 100644 --- a/V2/time_tracker/activities/_application/_activities/_delete_activity.py +++ b/V2/time_tracker/activities/_application/_activities/_delete_activity.py @@ -1,36 +1,41 @@ -from time_tracker.activities._infrastructure import ActivitiesJsonDao -from time_tracker.activities._domain import ActivityService, _use_cases - -import azure.functions as func import json import logging -JSON_PATH = ( - 'activities/_infrastructure/_data_persistence/activities_data.json' -) +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + +DATABASE = DB() def delete_activity(req: func.HttpRequest) -> func.HttpResponse: logging.info( 'Python HTTP trigger function processed a request to delete an activity.' ) - activity_id = req.route_params.get('id') - response = _delete(activity_id) - status_code = 200 if response != b'Not found' else 404 - - return func.HttpResponse( - body=response, status_code=status_code, mimetype="application/json" - ) - - -def _delete(activity_id: str) -> str: - activity_use_case = _use_cases.DeleteActivityUseCase( - _create_activity_service(JSON_PATH) + try: + activity_id = int(req.route_params.get('id')) + response = _delete(activity_id) + status_code = 200 if response != b'Not found' else 404 + + return func.HttpResponse( + body=response, status_code=status_code, mimetype="application/json" + ) + except ValueError: + return func.HttpResponse( + body=b"Invalid format id", status_code=400, mimetype="application/json" + ) + + +def _delete(activity_id: int) -> str: + activity_use_case = _domain._use_cases.DeleteActivityUseCase( + _create_activity_service(DATABASE) ) activity = activity_use_case.delete_activity(activity_id) return json.dumps(activity.__dict__) if activity else b'Not found' -def _create_activity_service(path: str): - activity_json = ActivitiesJsonDao(path) - return ActivityService(activity_json) +def _create_activity_service(db: DB) -> _domain.ActivityService: + activity_sql = _infrastructure.ActivitiesSQLDao(db) + return _domain.ActivityService(activity_sql) diff --git a/V2/time_tracker/activities/_application/_activities/_get_activities.py b/V2/time_tracker/activities/_application/_activities/_get_activities.py index 9f52069d..d92503dd 100644 --- a/V2/time_tracker/activities/_application/_activities/_get_activities.py +++ b/V2/time_tracker/activities/_application/_activities/_get_activities.py @@ -1,13 +1,13 @@ -from time_tracker.activities._infrastructure import ActivitiesJsonDao -from time_tracker.activities._domain import ActivityService, _use_cases - -import azure.functions as func import json import logging -JSON_PATH = ( - 'activities/_infrastructure/_data_persistence/activities_data.json' -) +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + +DATABASE = DB() def get_activities(req: func.HttpRequest) -> func.HttpResponse: @@ -17,21 +17,26 @@ def get_activities(req: func.HttpRequest) -> func.HttpResponse: activity_id = req.route_params.get('id') status_code = 200 - if activity_id: - response = _get_by_id(activity_id) - if response == b'Not Found': - status_code = 404 - else: - response = _get_all() - - return func.HttpResponse( - body=response, status_code=status_code, mimetype="application/json" - ) - - -def _get_by_id(activity_id: str) -> str: - activity_use_case = _use_cases.GetActivityUseCase( - _create_activity_service(JSON_PATH) + try: + if activity_id: + response = _get_by_id(int(activity_id)) + if response == b'Not Found': + status_code = 404 + else: + response = _get_all() + + return func.HttpResponse( + body=response, status_code=status_code, mimetype="application/json" + ) + except ValueError: + return func.HttpResponse( + body=b"Invalid format id", status_code=400, mimetype="application/json" + ) + + +def _get_by_id(activity_id: int) -> str: + activity_use_case = _domain._use_cases.GetActivityUseCase( + _create_activity_service(DATABASE) ) activity = activity_use_case.get_activity_by_id(activity_id) @@ -39,8 +44,8 @@ def _get_by_id(activity_id: str) -> str: def _get_all() -> str: - activities_use_case = _use_cases.GetActivitiesUseCase( - _create_activity_service(JSON_PATH) + activities_use_case = _domain._use_cases.GetActivitiesUseCase( + _create_activity_service(DATABASE) ) return json.dumps( [ @@ -50,6 +55,6 @@ def _get_all() -> str: ) -def _create_activity_service(path: str): - activity_json = ActivitiesJsonDao(path) - return ActivityService(activity_json) +def _create_activity_service(db: DB) -> _domain.ActivityService: + activity_sql = _infrastructure.ActivitiesSQLDao(db) + return _domain.ActivityService(activity_sql) diff --git a/V2/time_tracker/activities/_application/_activities/_update_activity.py b/V2/time_tracker/activities/_application/_activities/_update_activity.py index 1709f77a..0933fd72 100644 --- a/V2/time_tracker/activities/_application/_activities/_update_activity.py +++ b/V2/time_tracker/activities/_application/_activities/_update_activity.py @@ -1,44 +1,54 @@ -from time_tracker.activities._infrastructure import ActivitiesJsonDao -from time_tracker.activities._domain import ActivityService, Activity, _use_cases - -import azure.functions as func import dataclasses import json import logging -JSON_PATH = ( - 'activities/_infrastructure/_data_persistence/activities_data.json' -) +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + +DATABASE = DB() def update_activity(req: func.HttpRequest) -> func.HttpResponse: logging.info( 'Python HTTP trigger function processed a request to update an activity.' ) - activity_id = req.route_params.get('id') - activity_data = req.get_json() if req.get_body() else {} - activity_keys = [field.name for field in dataclasses.fields(Activity)] - - if all(key in activity_keys for key in activity_data.keys()): - response = _update(activity_id, activity_data) - status_code = 200 - else: - response = b'Incorrect activity body' - status_code = 400 - - return func.HttpResponse( - body=response, status_code=status_code, mimetype="application/json" - ) - - -def _update(activity_id: str, activity_data: dict) -> str: - activity_use_case = _use_cases.UpdateActivityUseCase( - _create_activity_service(JSON_PATH) + try: + activity_id = int(req.route_params.get('id')) + activity_data = req.get_json() if req.get_body() else {} + activity_keys = [field.name for field in dataclasses.fields(_domain.Activity)] + + if all(key in activity_keys for key in activity_data.keys()): + response = _update(activity_id, activity_data) + status_code = 200 + else: + response = b'Incorrect activity body' + status_code = 400 + + return func.HttpResponse( + body=response, status_code=status_code, mimetype="application/json" + ) + except ValueError: + return func.HttpResponse( + body=b"Invalid format id", status_code=400, mimetype="application/json" + ) + + +def _update(activity_id: int, activity_data: dict) -> str: + activity_use_case = _domain._use_cases.UpdateActivityUseCase( + _create_activity_service(DATABASE) ) - activity = activity_use_case.update_activity(activity_id, activity_data) + activity = activity_use_case.update_activity( + activity_id, activity_data.get("name"), + activity_data.get("description"), + activity_data.get("status"), + activity_data.get("deleted") + ) return json.dumps(activity.__dict__) if activity else b'Not Found' -def _create_activity_service(path: str): - activity_json = ActivitiesJsonDao(path) - return ActivityService(activity_json) +def _create_activity_service(db: DB) -> _domain.ActivityService: + activity_sql = _infrastructure.ActivitiesSQLDao(db) + return _domain.ActivityService(activity_sql) diff --git a/V2/time_tracker/activities/_domain/_entities/_activity.py b/V2/time_tracker/activities/_domain/_entities/_activity.py index 86f56ee9..cf574054 100644 --- a/V2/time_tracker/activities/_domain/_entities/_activity.py +++ b/V2/time_tracker/activities/_domain/_entities/_activity.py @@ -3,9 +3,8 @@ @dataclass(frozen=True) class Activity: - id: str + id: int name: str description: str - deleted: str - status: str - tenant_id: str + deleted: bool + status: int diff --git a/V2/time_tracker/activities/_domain/_persistence_contracts/_activities_dao.py b/V2/time_tracker/activities/_domain/_persistence_contracts/_activities_dao.py index 80b8c711..e079ed6a 100644 --- a/V2/time_tracker/activities/_domain/_persistence_contracts/_activities_dao.py +++ b/V2/time_tracker/activities/_domain/_persistence_contracts/_activities_dao.py @@ -5,7 +5,7 @@ class ActivitiesDao(abc.ABC): @abc.abstractmethod - def get_by_id(self, id: str) -> Activity: + def get_by_id(self, id: int) -> Activity: pass @abc.abstractmethod @@ -13,13 +13,13 @@ def get_all(self) -> typing.List[Activity]: pass @abc.abstractmethod - def delete(self, id: str) -> Activity: + def delete(self, id: int) -> Activity: pass @abc.abstractmethod - def update(self, id: str, new_activity: dict) -> Activity: + def update(self, id: int, name: str, description: str, status: int, deleted: bool) -> Activity: pass @abc.abstractmethod - def create_activity(self, activity_data: dict) -> Activity: + def create(self, activity_data: Activity) -> Activity: pass diff --git a/V2/time_tracker/activities/_domain/_services/_activity.py b/V2/time_tracker/activities/_domain/_services/_activity.py index a564577a..a2c45e54 100644 --- a/V2/time_tracker/activities/_domain/_services/_activity.py +++ b/V2/time_tracker/activities/_domain/_services/_activity.py @@ -6,17 +6,17 @@ class ActivityService: def __init__(self, activities_dao: ActivitiesDao): self.activities_dao = activities_dao - def get_by_id(self, activity_id: str) -> Activity: + def get_by_id(self, activity_id: int) -> Activity: return self.activities_dao.get_by_id(activity_id) def get_all(self) -> typing.List[Activity]: return self.activities_dao.get_all() - def delete(self, activity_id: str) -> Activity: + def delete(self, activity_id: int) -> Activity: return self.activities_dao.delete(activity_id) - def update(self, activity_id: str, new_activity: dict) -> Activity: - return self.activities_dao.update(activity_id, new_activity) + def update(self, activity_id: int, name: str, description: str, status: int, deleted: bool) -> Activity: + return self.activities_dao.update(activity_id, name, description, status, deleted) - def create_activity(self, activity_data: dict) -> Activity: - return self.activities_dao.create_activity(activity_data) + def create(self, activity_data: Activity) -> Activity: + return self.activities_dao.create(activity_data) diff --git a/V2/time_tracker/activities/_domain/_use_cases/_create_activity_use_case.py b/V2/time_tracker/activities/_domain/_use_cases/_create_activity_use_case.py index 241718db..26d0f475 100644 --- a/V2/time_tracker/activities/_domain/_use_cases/_create_activity_use_case.py +++ b/V2/time_tracker/activities/_domain/_use_cases/_create_activity_use_case.py @@ -5,5 +5,5 @@ class CreateActivityUseCase: def __init__(self, activity_service: ActivityService): self.activity_service = activity_service - def create_activity(self, activity_data: dict) -> Activity: - return self.activity_service.create_activity(activity_data) + def create_activity(self, activity_data: Activity) -> Activity: + return self.activity_service.create(activity_data) diff --git a/V2/time_tracker/activities/_domain/_use_cases/_delete_activity_use_case.py b/V2/time_tracker/activities/_domain/_use_cases/_delete_activity_use_case.py index 5af54ee8..67fcf31c 100644 --- a/V2/time_tracker/activities/_domain/_use_cases/_delete_activity_use_case.py +++ b/V2/time_tracker/activities/_domain/_use_cases/_delete_activity_use_case.py @@ -5,5 +5,5 @@ class DeleteActivityUseCase: def __init__(self, activity_service: ActivityService): self.activity_service = activity_service - def delete_activity(self, id: str) -> Activity: + def delete_activity(self, id: int) -> Activity: return self.activity_service.delete(id) diff --git a/V2/time_tracker/activities/_domain/_use_cases/_get_activity_by_id_use_case.py b/V2/time_tracker/activities/_domain/_use_cases/_get_activity_by_id_use_case.py index 04ca442e..45dbbad0 100644 --- a/V2/time_tracker/activities/_domain/_use_cases/_get_activity_by_id_use_case.py +++ b/V2/time_tracker/activities/_domain/_use_cases/_get_activity_by_id_use_case.py @@ -5,5 +5,5 @@ class GetActivityUseCase: def __init__(self, activity_service: ActivityService): self.activity_service = activity_service - def get_activity_by_id(self, id: str) -> Activity: + def get_activity_by_id(self, id: int) -> Activity: return self.activity_service.get_by_id(id) diff --git a/V2/time_tracker/activities/_domain/_use_cases/_update_activity_use_case.py b/V2/time_tracker/activities/_domain/_use_cases/_update_activity_use_case.py index a890d85f..c270f465 100644 --- a/V2/time_tracker/activities/_domain/_use_cases/_update_activity_use_case.py +++ b/V2/time_tracker/activities/_domain/_use_cases/_update_activity_use_case.py @@ -6,6 +6,6 @@ def __init__(self, activity_service: ActivityService): self.activity_service = activity_service def update_activity( - self, activity_id: str, new_activity: dict + self, activity_id: int, name: str, description: str, status: int, deleted: bool ) -> Activity: - return self.activity_service.update(activity_id, new_activity) + return self.activity_service.update(activity_id, name, description, status, deleted) diff --git a/V2/time_tracker/activities/_infrastructure/__init__.py b/V2/time_tracker/activities/_infrastructure/__init__.py index 1734e5b8..b3896baf 100644 --- a/V2/time_tracker/activities/_infrastructure/__init__.py +++ b/V2/time_tracker/activities/_infrastructure/__init__.py @@ -1,2 +1,2 @@ # flake8: noqa -from ._data_persistence import ActivitiesJsonDao +from ._data_persistence import ActivitiesSQLDao diff --git a/V2/time_tracker/activities/_infrastructure/_data_persistence/__init__.py b/V2/time_tracker/activities/_infrastructure/_data_persistence/__init__.py index d2a77fc4..1e7220c5 100644 --- a/V2/time_tracker/activities/_infrastructure/_data_persistence/__init__.py +++ b/V2/time_tracker/activities/_infrastructure/_data_persistence/__init__.py @@ -1,2 +1,2 @@ # flake8: noqa -from ._activities_json_dao import ActivitiesJsonDao +from ._activities_sql_dao import ActivitiesSQLDao diff --git a/V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_json_dao.py b/V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_json_dao.py deleted file mode 100644 index 60859a15..00000000 --- a/V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_json_dao.py +++ /dev/null @@ -1,105 +0,0 @@ -from time_tracker.activities._domain import ActivitiesDao, Activity -import dataclasses -import json -import typing - - -class ActivitiesJsonDao(ActivitiesDao): - def __init__(self, json_data_file_path: str): - self.json_data_file_path = json_data_file_path - self.activity_keys = [ - field.name for field in dataclasses.fields(Activity) - ] - - def get_by_id(self, activity_id: str) -> Activity: - activity = { - activity.get('id'): activity - for activity in self.__get_activities_from_file() - }.get(activity_id) - - return self.__create_activity_dto(activity) if activity else None - - def get_all(self) -> typing.List[Activity]: - return [ - self.__create_activity_dto(activity) - for activity in self.__get_activities_from_file() - ] - - def delete(self, activity_id: str) -> Activity: - activity = self.get_by_id(activity_id) - if activity: - activity_deleted = {**activity.__dict__, 'status': 'inactive'} - activities_updated = list( - map( - lambda activity: activity - if activity.get('id') != activity_id - else activity_deleted, - self.__get_activities_from_file(), - ) - ) - - try: - file = open(self.json_data_file_path, 'w') - json.dump(activities_updated, file) - file.close() - - return self.__create_activity_dto(activity_deleted) - - except FileNotFoundError: - return None - - else: - return None - - def update(self, activity_id: str, new_activity: dict) -> Activity: - activity = self.get_by_id(activity_id) - if not activity: - return None - - new_activity = {**activity.__dict__, **new_activity} - - activities_updated = list( - map( - lambda activity: activity - if activity.get('id') != activity_id - else new_activity, - self.__get_activities_from_file(), - ) - ) - - try: - file = open(self.json_data_file_path, 'w') - json.dump(activities_updated, file) - file.close() - - return self.__create_activity_dto(new_activity) - - except FileNotFoundError: - return None - - def create_activity(self, activity_data: dict) -> Activity: - activities = self.__get_activities_from_file() - activities.append(activity_data) - - try: - with open(self.json_data_file_path, 'w') as outfile: - json.dump(activities, outfile) - - return self.__create_activity_dto(activity_data) - except FileNotFoundError: - print("Can not create activity") - - def __get_activities_from_file(self) -> typing.List[dict]: - try: - file = open(self.json_data_file_path) - activities = json.load(file) - file.close() - - return activities - - except FileNotFoundError: - return [] - - def __create_activity_dto(self, activity: dict) -> Activity: - activity = {key: activity.get(key) for key in self.activity_keys} - return Activity(**activity) diff --git a/V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_sql_dao.py b/V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_sql_dao.py new file mode 100644 index 00000000..e69dd1a4 --- /dev/null +++ b/V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_sql_dao.py @@ -0,0 +1,67 @@ +import dataclasses +import typing + +import sqlalchemy +import sqlalchemy.sql as sql + +import time_tracker.activities._domain as domain +from time_tracker._infrastructure import _db + + +class ActivitiesSQLDao(domain.ActivitiesDao): + + def __init__(self, database: _db.DB): + self.activity_keys = [ + field.name for field in dataclasses.fields(domain.Activity) + ] + self.db = database + self.activity = sqlalchemy.Table( + 'activity', + self.db.metadata, + sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True, autoincrement=True), + sqlalchemy.Column('name', sqlalchemy.String), + sqlalchemy.Column('description', sqlalchemy.String), + sqlalchemy.Column('deleted', sqlalchemy.Boolean), + sqlalchemy.Column('status', sqlalchemy.SmallInteger), + extend_existing=True, + ) + + def get_by_id(self, activity_id: int) -> domain.Activity: + query = sql.select(self.activity).where(self.activity.c.id == activity_id) + activity = self.db.get_session().execute(query).one_or_none() + return self.__create_activity_dto(dict(activity)) if activity else None + + def get_all(self) -> typing.List[domain.Activity]: + query = sql.select(self.activity) + result = self.db.get_session().execute(query) + return [ + self.__create_activity_dto(dict(activity)) + for activity in result + ] + + def create(self, activity_data: domain.Activity) -> domain.Activity: + new_activity = activity_data.__dict__ + new_activity.pop('id', None) + new_activity.update({"status": 1, "deleted": False}) + + query = self.activity.insert().values(new_activity).return_defaults() + activity = self.db.get_session().execute(query) + new_activity.update({"id": activity.inserted_primary_key[0]}) + return self.__create_activity_dto(new_activity) + + def delete(self, activity_id: int) -> domain.Activity: + query = self.activity.update().where(self.activity.c.id == activity_id).values({"status": 0, "deleted": True}) + self.db.get_session().execute(query) + return self.get_by_id(activity_id) + + def update(self, activity_id: int, name: str, description: str, status: int, deleted: bool) -> domain.Activity: + new_activity = {"name": name, "description": description, "status": status, "deleted": deleted} + activity_validated = {key: value for (key, value) in new_activity.items() if value is not None} + + query = self.activity.update().where(self.activity.c.id == activity_id).values(activity_validated) + self.db.get_session().execute(query) + return self.get_by_id(activity_id) + + def __create_activity_dto(self, activity: dict) -> domain.Activity: + activity = {key: activity.get(key)for key in self.activity_keys} + return domain.Activity(**activity) diff --git a/V2/time_tracker/activities/_infrastructure/_data_persistence/activities_data.json b/V2/time_tracker/activities/_infrastructure/_data_persistence/activities_data.json deleted file mode 100644 index 961251db..00000000 --- a/V2/time_tracker/activities/_infrastructure/_data_persistence/activities_data.json +++ /dev/null @@ -1,65 +0,0 @@ -[ - { - "name": "Development", - "description": "Development", - "tenant_id": "cc925a5d-9644-4a4f-8d99-0bee49aadd05", - "id": "c61a4a49-3364-49a3-a7f7-0c5f2d15072b", - "_rid": "QUwFAPuumiRhAAAAAAAAAA==", - "_self": "dbs/QUwFAA==/colls/QUwFAPuumiQ=/docs/QUwFAPuumiRhAAAAAAAAAA==/", - "_etag": "\"4e006cc9-0000-0500-0000-607dcc0d0000\"", - "_attachments": "attachments/", - "_last_event_ctx": { - "user_id": "dd76e5d6-3949-46fd-b418-f15bf7c354fa", - "tenant_id": "cc925a5d-9644-4a4f-8d99-0bee49aadd05", - "action": "delete", - "description": null, - "container_id": "activity", - "session_id": null - }, - "deleted": "b4327ba6-9f96-49ee-a9ac-3c1edf525172", - "status": null, - "_ts": 1618856973 - }, - { - "name": "Management", - "description": null, - "tenant_id": "cc925a5d-9644-4a4f-8d99-0bee49aadd05", - "id": "94ec92e2-a500-4700-a9f6-e41eb7b5507c", - "_last_event_ctx": { - "user_id": "dd76e5d6-3949-46fd-b418-f15bf7c354fa", - "tenant_id": "cc925a5d-9644-4a4f-8d99-0bee49aadd05", - "action": "delete", - "description": null, - "container_id": "activity", - "session_id": null - }, - "_rid": "QUwFAPuumiRfAAAAAAAAAA==", - "_self": "dbs/QUwFAA==/colls/QUwFAPuumiQ=/docs/QUwFAPuumiRfAAAAAAAAAA==/", - "_etag": "\"4e0069c9-0000-0500-0000-607dcc0d0000\"", - "_attachments": "attachments/", - "deleted": "7cf6efe5-a221-4fe4-b94f-8945127a489a", - "status": null, - "_ts": 1618856973 - }, - { - "name": "Operations", - "description": "Operation activities performed.", - "tenant_id": "cc925a5d-9644-4a4f-8d99-0bee49aadd05", - "id": "d45c770a-b1a0-4bd8-a713-22c01a23e41b", - "_rid": "QUwFAPuumiRjAAAAAAAAAA==", - "_self": "dbs/QUwFAA==/colls/QUwFAPuumiQ=/docs/QUwFAPuumiRjAAAAAAAAAA==/", - "_etag": "\"09009a4d-0000-0500-0000-614b66fb0000\"", - "_attachments": "attachments/", - "_last_event_ctx": { - "user_id": "82ed0f65-051c-4898-890f-870805900e21", - "tenant_id": "cc925a5d-9644-4a4f-8d99-0bee49aadd05", - "action": "update", - "description": null, - "container_id": "activity", - "session_id": null - }, - "deleted": "7cf6efe5-a221-4fe4-b94f-8945127a489a", - "status": "active", - "_ts": 1632331515 - } -] diff --git a/V2/time_tracker/activities/interface.py b/V2/time_tracker/activities/interface.py index 877b631e..24c888ad 100644 --- a/V2/time_tracker/activities/interface.py +++ b/V2/time_tracker/activities/interface.py @@ -2,4 +2,4 @@ from ._application import get_activities from ._application import delete_activity from ._application import update_activity -from ._application import create_activity \ No newline at end of file +from ._application import create_activity diff --git a/V2/update_activity/function.json b/V2/update_activity/function.json new file mode 100644 index 00000000..97c9fb49 --- /dev/null +++ b/V2/update_activity/function.json @@ -0,0 +1,22 @@ +{ + "disabled": false, + "bindings": [ + { + "type": "httpTrigger", + "direction": "in", + "name": "req", + "route": "activities/{id}", + "authLevel": "anonymous", + "methods": [ + "PUT" + ] + }, + { + "type": "http", + "direction": "out", + "name": "$return" + } + ], + "entryPoint": "update_activity", + "scriptFile": "../time_tracker/activities/interface.py" +} \ No newline at end of file From b869c09f890b6867a923b5a11331b1902870126f Mon Sep 17 00:00:00 2001 From: Sandro Castillo Date: Mon, 15 Nov 2021 10:57:53 -0500 Subject: [PATCH 04/33] fix: TT-384 Revert get all activities from blob storage (#348) (#349) This reverts commit 80f4ed136b81c14f4265384bdd888bff2b3c6206. --- time_tracker_api/activities/activities_model.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/time_tracker_api/activities/activities_model.py b/time_tracker_api/activities/activities_model.py index ddb46411..158c8053 100644 --- a/time_tracker_api/activities/activities_model.py +++ b/time_tracker_api/activities/activities_model.py @@ -143,7 +143,7 @@ def get_all_with_id_in_list( activity_ids, ) - def get_all_v1( + def get_all( self, conditions: dict = None, activities_id: List = None, @@ -162,7 +162,7 @@ def get_all_v1( ) return activities - def get_all(self, conditions: dict = None) -> list: + def get_all_test(self, conditions: dict = None) -> list: event_ctx = self.create_event_context("read-many") activities = self.repository.find_all_from_blob_storage(event_context=event_ctx) return activities From df3fe5caf7e4dc2e6b8f35590848e17673fc5a38 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Mon, 15 Nov 2021 16:15:18 +0000 Subject: [PATCH 05/33] 0.44.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 7 +++++++ time_tracker_api/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index de620596..b6b74353 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,13 @@ +## v0.44.0 (2021-11-15) +### Feature +* TT-357 Create V2 Activities Azure DAO ([#334](https://github.com/ioet/time-tracker-backend/issues/334)) ([`3a99add`](https://github.com/ioet/time-tracker-backend/commit/3a99add39a3130c540d86b02c5a69dbda8536e8e)) + +### Fix +* TT-384 Revert get all activities from blob storage (#348) ([#349](https://github.com/ioet/time-tracker-backend/issues/349)) ([`b869c09`](https://github.com/ioet/time-tracker-backend/commit/b869c09f890b6867a923b5a11331b1902870126f)) + ## v0.43.1 (2021-11-12) ### Fix * TT-393 userid convert to list ([#339](https://github.com/ioet/time-tracker-backend/issues/339)) ([`6e2108e`](https://github.com/ioet/time-tracker-backend/commit/6e2108ee03dcfd48fa9676a69591248a2467f27c)) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index d5f90b8c..a262ca73 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.43.1' +__version__ = '0.44.0' From 1448fc2bc8dce7d8f50c758a910182d7fe9c011a Mon Sep 17 00:00:00 2001 From: Sandro Castillo Date: Wed, 17 Nov 2021 16:00:01 -0500 Subject: [PATCH 06/33] ci: TT-411 inject secrets environment and test_db_connection (#351) --- .github/workflows/python-package.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 36bff27a..2f64bc87 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -29,6 +29,14 @@ jobs: run: | pip install -r V2/requirements.txt + - name: Inject Secrets + env: + ENVIRONMENT: ${{ secrets.environment }} + TEST_DB_CONNECTION: ${{ secrets.test_db_connection }} + run: | + echo $ENVIRONMENT + echo $TEST_DB_CONNECTION + - name: Lint with flake8 run: | cd V2 From 32ee36f39e81866c2f0767cf243c61afde6841c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gabriel=20Cobe=C3=B1a=20Cede=C3=B1o?= Date: Thu, 18 Nov 2021 10:05:00 -0500 Subject: [PATCH 07/33] feat: TT-399 Config use makefile to executing tests (#350) * feat: TT-399 Config use makefile to executing tests * feat: TT-399 quit comment on line for pip upgrade * fix: TT-399 inject environment variable for tests Co-authored-by: Alexander --- V2/Makefile | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/V2/Makefile b/V2/Makefile index 45080238..135e96d0 100644 --- a/V2/Makefile +++ b/V2/Makefile @@ -1,8 +1,33 @@ +.PHONY: help +help: + @echo "---------------HELP-----------------" + @echo "To install the dependencies type make install" + @echo "To test the project type make test" + @echo "To run the local database type make start-local" + @echo "To run all comands type make ci" + @echo "------------------------------------" + +.PHONY: install install: - @echo "Installing Time Tracker" + @echo "=========================================Installing dependencies Time Tracker=========================================" npm install pip install --upgrade pip pip install -r requirements.txt @echo "Completed! " + +.PHONY: test +test: export ENVIRONMENT = test +test: export TEST_DB_CONNECTION = sqlite:///:memory: +test: + @echo "=========================================Lint with flake8=========================================" + flake8 . --show-source --statistics + @echo "Completed flake8!" + @echo "=========================================Test with pytest=========================================" + python -m pytest -v + @echo "Completed test!" + start-local: - docker compose up \ No newline at end of file + docker compose up + +.PHONY: ci +ci: install test \ No newline at end of file From 10cc4269e4e60c6eff77bf1cf02cdf0d31dac86f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gabriel=20Cobe=C3=B1a=20Cede=C3=B1o?= Date: Thu, 18 Nov 2021 17:49:50 -0500 Subject: [PATCH 08/33] docs: TT-399 Readme update how to use makefile (#354) * docs: TT-399 Readme update how to use makefile * docs: TT-399 Readme update reqs to use makefile * docs: TT-399 Text correction --- V2/README.md | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/V2/README.md b/V2/README.md index e84c0268..f414079d 100644 --- a/V2/README.md +++ b/V2/README.md @@ -1,3 +1,23 @@ -# Azure Functions +# time-tracker-api V2 Refer to [Serverless docs](https://serverless.com/framework/docs/providers/azure/guide/intro/) for more information. + +## Requirements to use makefile + +- Python version 3.6 or 3.7. + +- Use an environment to install requirements (pyenv). + +## How to use makefile + +Execute the next command to show makefile help: + +```shell +make help +``` + +- To install the dependencies type the command ```make install``` + +- To test the project type the command ```make test``` + +- To run the local database type the command ```make start-local``` From 5f107f33cb640f7fa8e498db2157efb2d11f401d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Rafael=20Arcos=20G=C3=B3mez?= <37599693+ararcos@users.noreply.github.com> Date: Fri, 19 Nov 2021 09:37:54 -0500 Subject: [PATCH 09/33] feat: TT-401-Post-v2-time-entries (#344) * feat: TT-401 Implemented service, end-point, dao, test- time entries * feat: TT-401 validated request create time entry * fix: TT-401 implemented faker url * refactor: TT-401 changed the variable name * fix: implementation of the database connection * refactor: TT-401 fixtures changed * fix: TT-401 solution of comments, change of config * ci: TT-401 fix inject secrets * refactor: TT-401 rename of functions and imports * fix: TT-401 changed test db storage --- .github/workflows/python-package.yml | 11 +--- V2/create_activity/function.json | 22 ------- V2/delete_activity/function.json | 22 ------- V2/get_activities/function.json | 22 ------- V2/serverless.yml | 10 +++ .../azure/activity_azure_endpoints_test.py | 49 +++++--------- .../azure/time_entry_azure_endpoints_test.py | 28 ++++++++ V2/tests/conftest.py | 3 +- V2/tests/fixtures.py | 64 +++++++++++++++---- .../daos/activities_sql_dao_test.py | 21 +++--- .../integration/daos/time_entries_dao_test.py | 48 ++++++++++++++ .../unit/services/time_entry_service_test.py | 14 ++++ .../use_cases/time_entries_use_case_test.py | 18 ++++++ V2/time_tracker/_infrastructure/_config.py | 15 ++--- V2/time_tracker/_infrastructure/_db.py | 2 +- .../time_entries/_application/__init__.py | 2 + .../_application/_time_entries/__init__.py | 2 + .../_time_entries/_create_time_entry.py | 63 ++++++++++++++++++ .../time_entries/_domain/__init__.py | 7 ++ .../_domain/_entities/__init__.py | 2 + .../_domain/_entities/_time_entry.py | 17 +++++ .../_persistence_contracts/__init__.py | 2 + .../_time_entries_dao.py | 9 +++ .../_domain/_services/__init__.py | 2 + .../_domain/_services/_time_entry.py | 10 +++ .../_domain/_use_cases/__init__.py | 2 + .../_use_cases/_create_time_entry_use_case.py | 10 +++ .../time_entries/_infrastructure/__init__.py | 2 + .../_data_persistence/__init__.py | 2 + .../_data_persistence/_time_entries_dao.py | 49 ++++++++++++++ V2/time_tracker/time_entries/interface.py | 2 + V2/update_activity/function.json | 22 ------- 32 files changed, 388 insertions(+), 166 deletions(-) delete mode 100644 V2/create_activity/function.json delete mode 100644 V2/delete_activity/function.json delete mode 100644 V2/get_activities/function.json create mode 100644 V2/tests/api/azure/time_entry_azure_endpoints_test.py create mode 100644 V2/tests/integration/daos/time_entries_dao_test.py create mode 100644 V2/tests/unit/services/time_entry_service_test.py create mode 100644 V2/tests/unit/use_cases/time_entries_use_case_test.py create mode 100644 V2/time_tracker/time_entries/_application/__init__.py create mode 100644 V2/time_tracker/time_entries/_application/_time_entries/__init__.py create mode 100644 V2/time_tracker/time_entries/_application/_time_entries/_create_time_entry.py create mode 100644 V2/time_tracker/time_entries/_domain/__init__.py create mode 100644 V2/time_tracker/time_entries/_domain/_entities/__init__.py create mode 100644 V2/time_tracker/time_entries/_domain/_entities/_time_entry.py create mode 100644 V2/time_tracker/time_entries/_domain/_persistence_contracts/__init__.py create mode 100644 V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py create mode 100644 V2/time_tracker/time_entries/_domain/_services/__init__.py create mode 100644 V2/time_tracker/time_entries/_domain/_services/_time_entry.py create mode 100644 V2/time_tracker/time_entries/_domain/_use_cases/__init__.py create mode 100644 V2/time_tracker/time_entries/_domain/_use_cases/_create_time_entry_use_case.py create mode 100644 V2/time_tracker/time_entries/_infrastructure/__init__.py create mode 100644 V2/time_tracker/time_entries/_infrastructure/_data_persistence/__init__.py create mode 100644 V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py create mode 100644 V2/time_tracker/time_entries/interface.py delete mode 100644 V2/update_activity/function.json diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 2f64bc87..1c700563 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -14,6 +14,9 @@ jobs: runs-on: ubuntu-latest strategy: max-parallel: 5 + env: + ENVIRONMENT: ${{ secrets.ENVIRONMENT }} + TEST_DB_CONNECTION: ${{ secrets.TEST_DB_CONNECTION }} steps: - uses: actions/checkout@v2 - name: Set up Python 3.10.0 @@ -29,14 +32,6 @@ jobs: run: | pip install -r V2/requirements.txt - - name: Inject Secrets - env: - ENVIRONMENT: ${{ secrets.environment }} - TEST_DB_CONNECTION: ${{ secrets.test_db_connection }} - run: | - echo $ENVIRONMENT - echo $TEST_DB_CONNECTION - - name: Lint with flake8 run: | cd V2 diff --git a/V2/create_activity/function.json b/V2/create_activity/function.json deleted file mode 100644 index ed3454a9..00000000 --- a/V2/create_activity/function.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "disabled": false, - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "route": "activities/", - "authLevel": "anonymous", - "methods": [ - "POST" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ], - "entryPoint": "create_activity", - "scriptFile": "../time_tracker/activities/interface.py" -} \ No newline at end of file diff --git a/V2/delete_activity/function.json b/V2/delete_activity/function.json deleted file mode 100644 index d51170fd..00000000 --- a/V2/delete_activity/function.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "disabled": false, - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "route": "activities/{id}", - "authLevel": "anonymous", - "methods": [ - "DELETE" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ], - "entryPoint": "delete_activity", - "scriptFile": "../time_tracker/activities/interface.py" -} \ No newline at end of file diff --git a/V2/get_activities/function.json b/V2/get_activities/function.json deleted file mode 100644 index ee1efe53..00000000 --- a/V2/get_activities/function.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "disabled": false, - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "route": "activities/{id:?}", - "authLevel": "anonymous", - "methods": [ - "GET" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ], - "entryPoint": "get_activities", - "scriptFile": "../time_tracker/activities/interface.py" -} \ No newline at end of file diff --git a/V2/serverless.yml b/V2/serverless.yml index 0eb3f42f..c6c5e34b 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -76,3 +76,13 @@ functions: - POST route: activities/ authLevel: anonymous + + create_time_entry: + handler: time_tracker/time_entries/interface.create_time_entry + events: + - http: true + x-azure-settings: + methods: + - POST + route: time-entries/ + authLevel: anonymous diff --git a/V2/tests/api/azure/activity_azure_endpoints_test.py b/V2/tests/api/azure/activity_azure_endpoints_test.py index 9b2618a8..994c74c7 100644 --- a/V2/tests/api/azure/activity_azure_endpoints_test.py +++ b/V2/tests/api/azure/activity_azure_endpoints_test.py @@ -1,37 +1,24 @@ -import pytest import json from faker import Faker import azure.functions as func import time_tracker.activities._application._activities as azure_activities -import time_tracker.activities._infrastructure as infrastructure -from time_tracker._infrastructure import DB -from time_tracker.activities import _domain ACTIVITY_URL = '/api/activities/' -@pytest.fixture(name='insert_activity') -def _insert_activity() -> dict: - def _new_activity(activity: _domain.Activity, database: DB): - dao = infrastructure.ActivitiesSQLDao(database) - new_activity = dao.create(activity) - return new_activity.__dict__ - return _new_activity - - def test__activity_azure_endpoint__returns_all_activities( - create_fake_database, activity_factory, insert_activity + test_db, activity_factory, insert_activity ): - fake_database = create_fake_database existent_activities = [activity_factory(), activity_factory()] inserted_activities = [ - insert_activity(existent_activities[0], fake_database), - insert_activity(existent_activities[1], fake_database) + insert_activity(existent_activities[0], test_db).__dict__, + insert_activity(existent_activities[1], test_db).__dict__ ] - azure_activities._get_activities.DATABASE = fake_database + azure_activities._get_activities.DATABASE = test_db + req = func.HttpRequest(method='GET', body=None, url=ACTIVITY_URL) response = azure_activities._get_activities.get_activities(req) activities_json_data = response.get_body().decode("utf-8") @@ -41,13 +28,12 @@ def test__activity_azure_endpoint__returns_all_activities( def test__activity_azure_endpoint__returns_an_activity__when_activity_matches_its_id( - create_fake_database, activity_factory, insert_activity + test_db, activity_factory, insert_activity ): - fake_database = create_fake_database existent_activity = activity_factory() - inserted_activity = insert_activity(existent_activity, fake_database) + inserted_activity = insert_activity(existent_activity, test_db).__dict__ - azure_activities._get_activities.DATABASE = fake_database + azure_activities._get_activities.DATABASE = test_db req = func.HttpRequest( method='GET', body=None, @@ -63,13 +49,12 @@ def test__activity_azure_endpoint__returns_an_activity__when_activity_matches_it def test__activity_azure_endpoint__returns_an_activity_with_inactive_status__when_an_activity_matching_its_id_is_found( - create_fake_database, activity_factory, insert_activity + test_db, activity_factory, insert_activity ): - fake_database = create_fake_database existent_activity = activity_factory() - inserted_activity = insert_activity(existent_activity, fake_database) + inserted_activity = insert_activity(existent_activity, test_db).__dict__ - azure_activities._delete_activity.DATABASE = fake_database + azure_activities._delete_activity.DATABASE = test_db req = func.HttpRequest( method='DELETE', body=None, @@ -86,13 +71,12 @@ def test__activity_azure_endpoint__returns_an_activity_with_inactive_status__whe def test__update_activity_azure_endpoint__returns_an_activity__when_found_an_activity_to_update( - create_fake_database, activity_factory, insert_activity + test_db, activity_factory, insert_activity ): - fake_database = create_fake_database existent_activity = activity_factory() - inserted_activity = insert_activity(existent_activity, fake_database) + inserted_activity = insert_activity(existent_activity, test_db).__dict__ - azure_activities._update_activity.DATABASE = fake_database + azure_activities._update_activity.DATABASE = test_db activity_body = {"description": Faker().sentence()} req = func.HttpRequest( method='PUT', @@ -109,10 +93,7 @@ def test__update_activity_azure_endpoint__returns_an_activity__when_found_an_act assert activitiy_json_data == json.dumps(inserted_activity) -def test__activity_azure_endpoint__creates_an_activity__when_activity_has_all_attributes( - create_fake_database, - ): - azure_activities._create_activity.DATABASE = create_fake_database +def test__activity_azure_endpoint__creates_an_activity__when_activity_has_all_attributes(): activity_body = { 'id': None, 'name': Faker().user_name(), diff --git a/V2/tests/api/azure/time_entry_azure_endpoints_test.py b/V2/tests/api/azure/time_entry_azure_endpoints_test.py new file mode 100644 index 00000000..f801dad9 --- /dev/null +++ b/V2/tests/api/azure/time_entry_azure_endpoints_test.py @@ -0,0 +1,28 @@ +import json + +import azure.functions as func + +import time_tracker.time_entries._application._time_entries as azure_time_entries + +TIME_ENTRY_URL = "/api/time-entries/" + + +def test__time_entry_azure_endpoint__creates_an_time_entry__when_time_entry_has_all_attributes( + test_db, time_entry_factory, activity_factory, insert_activity +): + inserted_activity = insert_activity(activity_factory(), test_db) + time_entry_body = time_entry_factory(activity_id=inserted_activity.id, technologies="[jira,sql]").__dict__ + + body = json.dumps(time_entry_body).encode("utf-8") + req = func.HttpRequest( + method='POST', + body=body, + url=TIME_ENTRY_URL, + ) + + response = azure_time_entries._create_time_entry.create_time_entry(req) + time_entry_json_data = json.loads(response.get_body()) + time_entry_body['id'] = time_entry_json_data['id'] + + assert response.status_code == 201 + assert time_entry_json_data == time_entry_body diff --git a/V2/tests/conftest.py b/V2/tests/conftest.py index d1c4928f..cf6e362f 100644 --- a/V2/tests/conftest.py +++ b/V2/tests/conftest.py @@ -1,2 +1,3 @@ # flake8: noqa -from fixtures import _activity_factory, _create_fake_dao, _create_fake_database \ No newline at end of file +from fixtures import _activity_factory, _test_db, _insert_activity +from fixtures import _time_entry_factory diff --git a/V2/tests/fixtures.py b/V2/tests/fixtures.py index d9539035..51ee5e5d 100644 --- a/V2/tests/fixtures.py +++ b/V2/tests/fixtures.py @@ -1,17 +1,18 @@ import pytest +from faker import Faker -import time_tracker.activities._domain as domain -import time_tracker.activities._infrastructure as infrastructure +import time_tracker.activities._domain as activities_domain +import time_tracker.activities._infrastructure as activities_infrastructure +import time_tracker.time_entries._domain as time_entries_domain from time_tracker._infrastructure import DB -from faker import Faker @pytest.fixture(name='activity_factory') -def _activity_factory() -> domain.Activity: +def _activity_factory() -> activities_domain.Activity: def _make_activity( name: str = Faker().name(), description: str = Faker().sentence(), deleted: bool = False, status: int = 1 ): - activity = domain.Activity( + activity = activities_domain.Activity( id=None, name=name, description=description, @@ -22,14 +23,49 @@ def _make_activity( return _make_activity -@pytest.fixture(name='create_fake_dao') -def _create_fake_dao() -> domain.ActivitiesDao: - db_fake = DB('sqlite:///:memory:') - dao = infrastructure.ActivitiesSQLDao(db_fake) - return dao +@pytest.fixture(name='test_db') +def _test_db() -> DB: + db_fake = DB() + db_fake.get_session().execute("pragma foreign_keys=ON") + return db_fake -@pytest.fixture(name='create_fake_database') -def _create_fake_database() -> domain.ActivitiesDao: - db_fake = DB('sqlite:///:memory:') - return db_fake +@pytest.fixture(name='time_entry_factory') +def _time_entry_factory() -> time_entries_domain.TimeEntry: + def _make_time_entry( + id=Faker().random_int(), + start_date=str(Faker().date_time()), + owner_id=Faker().random_int(), + description=Faker().sentence(), + activity_id=Faker().random_int(), + uri=Faker().domain_name(), + technologies=["jira", "git"], + end_date=str(Faker().date_time()), + deleted=False, + timezone_offset="300", + project_id=Faker().random_int(), + ): + time_entry = time_entries_domain.TimeEntry( + id=id, + start_date=start_date, + owner_id=owner_id, + description=description, + activity_id=activity_id, + uri=uri, + technologies=technologies, + end_date=end_date, + deleted=deleted, + timezone_offset=timezone_offset, + project_id=project_id, + ) + return time_entry + return _make_time_entry + + +@pytest.fixture(name='insert_activity') +def _insert_activity() -> dict: + def _new_activity(activity: activities_domain.Activity, database: DB): + dao = activities_infrastructure.ActivitiesSQLDao(database) + new_activity = dao.create(activity) + return new_activity + return _new_activity diff --git a/V2/tests/integration/daos/activities_sql_dao_test.py b/V2/tests/integration/daos/activities_sql_dao_test.py index 25f62500..0f0170af 100644 --- a/V2/tests/integration/daos/activities_sql_dao_test.py +++ b/V2/tests/integration/daos/activities_sql_dao_test.py @@ -7,12 +7,11 @@ from time_tracker._infrastructure import DB -@pytest.fixture(name='insert_activity') -def _insert_activity() -> domain.Activity: - def _new_activity(activity: domain.Activity, dao: domain.ActivitiesDao): - new_activity = dao.create(activity) - return new_activity - return _new_activity +@pytest.fixture(name='create_fake_dao') +def _create_fake_dao() -> domain.ActivitiesDao: + db_fake = DB('sqlite:///:memory:') + dao = infrastructure.ActivitiesSQLDao(db_fake) + return dao @pytest.fixture(name='clean_database', autouse=True) @@ -41,7 +40,7 @@ def test_update__returns_an_update_activity__when_an_activity_matching_its_id_is ): dao = create_fake_dao existent_activity = activity_factory() - inserted_activity = insert_activity(existent_activity, dao) + inserted_activity = insert_activity(existent_activity, dao.db) expected_description = Faker().sentence() updated_activity = dao.update(inserted_activity.id, None, expected_description, None, None) @@ -68,8 +67,8 @@ def test__get_all__returns_a_list_of_activity_dto_objects__when_one_or_more_acti dao = create_fake_dao existent_activities = [activity_factory(), activity_factory()] inserted_activities = [ - insert_activity(existent_activities[0], dao), - insert_activity(existent_activities[1], dao) + insert_activity(existent_activities[0], dao.db), + insert_activity(existent_activities[1], dao.db) ] activities = dao.get_all() @@ -83,7 +82,7 @@ def test_get_by_id__returns_an_activity_dto__when_found_one_activity_that_matche ): dao = create_fake_dao existent_activity = activity_factory() - inserted_activity = insert_activity(existent_activity, dao) + inserted_activity = insert_activity(existent_activity, dao.db) activity = dao.get_by_id(inserted_activity.id) @@ -117,7 +116,7 @@ def test_delete__returns_an_activity_with_inactive_status__when_an_activity_matc ): dao = create_fake_dao existent_activity = activity_factory() - inserted_activity = insert_activity(existent_activity, dao) + inserted_activity = insert_activity(existent_activity, dao.db) activity = dao.delete(inserted_activity.id) diff --git a/V2/tests/integration/daos/time_entries_dao_test.py b/V2/tests/integration/daos/time_entries_dao_test.py new file mode 100644 index 00000000..403f80c6 --- /dev/null +++ b/V2/tests/integration/daos/time_entries_dao_test.py @@ -0,0 +1,48 @@ +import pytest + + +import time_tracker.time_entries._domain as domain +import time_tracker.time_entries._infrastructure as infrastructure +from time_tracker._infrastructure import DB + + +@pytest.fixture(name='create_fake_dao') +def _fake_dao() -> domain.TimeEntriesDao: + def _create_fake_dao(db_fake: DB) -> domain.TimeEntriesDao: + dao = infrastructure.TimeEntriesSQLDao(db_fake) + return dao + return _create_fake_dao + + +@pytest.fixture(name='clean_database', autouse=True) +def _clean_database(): + yield + db_fake = DB() + dao = infrastructure.TimeEntriesSQLDao(db_fake) + query = dao.time_entry.delete() + dao.db.get_session().execute(query) + + +def test__time_entry__returns_a_time_entry_dto__when_saves_correctly_with_sql_database( + test_db, time_entry_factory, create_fake_dao, insert_activity, activity_factory +): + dao = create_fake_dao(test_db) + inserted_activity = insert_activity(activity_factory(), dao.db) + + time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id, technologies="[jira,sql]") + + inserted_time_entry = dao.create(time_entry_to_insert) + + assert isinstance(inserted_time_entry, domain.TimeEntry) + assert inserted_time_entry == time_entry_to_insert + + +def test__time_entry__returns_None__when_not_saves_correctly( + time_entry_factory, create_fake_dao, test_db +): + dao = create_fake_dao(test_db) + time_entry_to_insert = time_entry_factory(activity_id=1203, technologies="[jira,sql]") + + inserted_time_entry = dao.create(time_entry_to_insert) + + assert inserted_time_entry is None diff --git a/V2/tests/unit/services/time_entry_service_test.py b/V2/tests/unit/services/time_entry_service_test.py new file mode 100644 index 00000000..bd5ce085 --- /dev/null +++ b/V2/tests/unit/services/time_entry_service_test.py @@ -0,0 +1,14 @@ +from time_tracker.time_entries._domain import TimeEntryService + + +def test__create_time_entries__uses_the_time_entry_dao__to_create_an_time_entry(mocker, time_entry_factory): + expected_time_entry = mocker.Mock() + time_entry_dao = mocker.Mock( + create=mocker.Mock(return_value=expected_time_entry) + ) + time_entry_service = TimeEntryService(time_entry_dao) + + actual_time_entry = time_entry_service.create(time_entry_factory()) + + assert time_entry_dao.create.called + assert expected_time_entry == actual_time_entry diff --git a/V2/tests/unit/use_cases/time_entries_use_case_test.py b/V2/tests/unit/use_cases/time_entries_use_case_test.py new file mode 100644 index 00000000..d2a31eb7 --- /dev/null +++ b/V2/tests/unit/use_cases/time_entries_use_case_test.py @@ -0,0 +1,18 @@ +from pytest_mock import MockFixture + +from time_tracker.time_entries._domain import _use_cases + + +def test__create_time_entry_function__uses_the_time_entries_service__to_create_time_entry( + mocker: MockFixture, time_entry_factory +): + expected_time_entry = mocker.Mock() + time_entry_service = mocker.Mock( + create=mocker.Mock(return_value=expected_time_entry) + ) + + time_entry_use_case = _use_cases.CreateTimeEntryUseCase(time_entry_service) + actual_time_entry = time_entry_use_case.create_time_entry(time_entry_factory()) + + assert time_entry_service.create.called + assert expected_time_entry == actual_time_entry diff --git a/V2/time_tracker/_infrastructure/_config.py b/V2/time_tracker/_infrastructure/_config.py index 7f8c8fa7..cf4f19bf 100644 --- a/V2/time_tracker/_infrastructure/_config.py +++ b/V2/time_tracker/_infrastructure/_config.py @@ -1,20 +1,17 @@ import typing import os -CONNECTION_STRING = 'postgresql://root:root@localhost:5433/timetracker' - class Config(typing.NamedTuple): DB_CONNECTION_STRING: str - DB_USER: str - DB_PASS: str - DB_NAME: str def load_config(): + if os.environ.get("ENVIRONMENT") == "development": + connection: str = os.environ.get("DB_CONNECTION") + else: + connection: str = os.environ.get("TEST_DB_CONNECTION") + return Config( - CONNECTION_STRING if os.environ.get("DB_CONNECTION_STRING") is None else os.environ.get("DB_CONNECTION_STRING"), - os.environ.get("DB_USER"), - os.environ.get("DB_PASS"), - os.environ.get("DB_NAME") + connection ) diff --git a/V2/time_tracker/_infrastructure/_db.py b/V2/time_tracker/_infrastructure/_db.py index 8fe5cef1..6f3a9f9a 100644 --- a/V2/time_tracker/_infrastructure/_db.py +++ b/V2/time_tracker/_infrastructure/_db.py @@ -14,7 +14,7 @@ def __init__(self, conn_string: str = conn_string): self.engine = sqlalchemy.create_engine(conn_string) def get_session(self): + self.metadata.create_all(self.engine) if self.connection is None: - self.metadata.create_all(self.engine) self.connection = self.engine.connect() return self.connection diff --git a/V2/time_tracker/time_entries/_application/__init__.py b/V2/time_tracker/time_entries/_application/__init__.py new file mode 100644 index 00000000..6e4ba9c3 --- /dev/null +++ b/V2/time_tracker/time_entries/_application/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._time_entries import create_time_entry \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py new file mode 100644 index 00000000..b46cddce --- /dev/null +++ b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._create_time_entry import create_time_entry \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_application/_time_entries/_create_time_entry.py b/V2/time_tracker/time_entries/_application/_time_entries/_create_time_entry.py new file mode 100644 index 00000000..a06c212c --- /dev/null +++ b/V2/time_tracker/time_entries/_application/_time_entries/_create_time_entry.py @@ -0,0 +1,63 @@ +import dataclasses +import json +import typing + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + + +def create_time_entry(req: func.HttpRequest) -> func.HttpResponse: + database = DB() + time_entry_dao = _infrastructure.TimeEntriesSQLDao(database) + time_entry_service = _domain.TimeEntryService(time_entry_dao) + use_case = _domain._use_cases.CreateTimeEntryUseCase(time_entry_service) + + time_entry_data = req.get_json() + + validation_errors = _validate_time_entry(time_entry_data) + if validation_errors: + return func.HttpResponse( + body=json.dumps(validation_errors), status_code=400, mimetype="application/json" + ) + + time_entry_to_create = _domain.TimeEntry( + id=None, + start_date=time_entry_data["start_date"], + owner_id=time_entry_data["owner_id"], + description=time_entry_data["description"], + activity_id=time_entry_data["activity_id"], + uri=time_entry_data["uri"], + technologies=time_entry_data["technologies"], + end_date=time_entry_data["end_date"], + deleted=False, + timezone_offset=time_entry_data["timezone_offset"], + project_id=time_entry_data["project_id"] + ) + + created_time_entry = use_case.create_time_entry(time_entry_to_create) + + if not created_time_entry: + return func.HttpResponse( + body=json.dumps({'error': 'time_entry could not be created'}), + status_code=500, + mimetype="application/json" + ) + + return func.HttpResponse( + body=json.dumps(created_time_entry.__dict__), + status_code=201, + mimetype="application/json" + ) + + +def _validate_time_entry(time_entry_data: dict) -> typing.List[str]: + time_entry_fields = [field.name for field in dataclasses.fields(_domain.TimeEntry)] + time_entry_fields.pop(8) + missing_keys = [field for field in time_entry_fields if field not in time_entry_data] + return [ + f'The {missing_key} key is missing in the input data' + for missing_key in missing_keys + ] diff --git a/V2/time_tracker/time_entries/_domain/__init__.py b/V2/time_tracker/time_entries/_domain/__init__.py new file mode 100644 index 00000000..a8b2081c --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/__init__.py @@ -0,0 +1,7 @@ +# flake8: noqa +from ._entities import TimeEntry +from ._persistence_contracts import TimeEntriesDao +from ._services import TimeEntryService +from ._use_cases import ( + CreateTimeEntryUseCase, +) \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_domain/_entities/__init__.py b/V2/time_tracker/time_entries/_domain/_entities/__init__.py new file mode 100644 index 00000000..88b4a739 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_entities/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._time_entry import TimeEntry \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_domain/_entities/_time_entry.py b/V2/time_tracker/time_entries/_domain/_entities/_time_entry.py new file mode 100644 index 00000000..aa73a879 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_entities/_time_entry.py @@ -0,0 +1,17 @@ +from dataclasses import dataclass +from typing import List + + +@dataclass(frozen=True) +class TimeEntry: + id: int + start_date: str + owner_id: int + description: str + activity_id: int + uri: str + technologies: List[str] + end_date: str + deleted: bool + timezone_offset: str + project_id: int diff --git a/V2/time_tracker/time_entries/_domain/_persistence_contracts/__init__.py b/V2/time_tracker/time_entries/_domain/_persistence_contracts/__init__.py new file mode 100644 index 00000000..e10700ce --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_persistence_contracts/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._time_entries_dao import TimeEntriesDao \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py b/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py new file mode 100644 index 00000000..5d04c861 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py @@ -0,0 +1,9 @@ +import abc + +from time_tracker.time_entries._domain import TimeEntry + + +class TimeEntriesDao(abc.ABC): + @abc.abstractmethod + def create(self, time_entry_data: TimeEntry) -> TimeEntry: + pass diff --git a/V2/time_tracker/time_entries/_domain/_services/__init__.py b/V2/time_tracker/time_entries/_domain/_services/__init__.py new file mode 100644 index 00000000..e5e6ba1b --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_services/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._time_entry import TimeEntryService \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_domain/_services/_time_entry.py b/V2/time_tracker/time_entries/_domain/_services/_time_entry.py new file mode 100644 index 00000000..d7aaf3ba --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_services/_time_entry.py @@ -0,0 +1,10 @@ +from time_tracker.time_entries._domain import TimeEntry, TimeEntriesDao + + +class TimeEntryService: + + def __init__(self, time_entry_dao: TimeEntriesDao): + self.time_entry_dao = time_entry_dao + + def create(self, time_entry_data: TimeEntry) -> TimeEntry: + return self.time_entry_dao.create(time_entry_data) diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py new file mode 100644 index 00000000..41aca738 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._create_time_entry_use_case import CreateTimeEntryUseCase diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/_create_time_entry_use_case.py b/V2/time_tracker/time_entries/_domain/_use_cases/_create_time_entry_use_case.py new file mode 100644 index 00000000..f2258468 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_use_cases/_create_time_entry_use_case.py @@ -0,0 +1,10 @@ +from time_tracker.time_entries._domain import TimeEntry, TimeEntryService + + +class CreateTimeEntryUseCase: + + def __init__(self, time_entry_service: TimeEntryService): + self.time_entry_service = time_entry_service + + def create_time_entry(self, time_entry_data: TimeEntry) -> TimeEntry: + return self.time_entry_service.create(time_entry_data) diff --git a/V2/time_tracker/time_entries/_infrastructure/__init__.py b/V2/time_tracker/time_entries/_infrastructure/__init__.py new file mode 100644 index 00000000..1c7a7d6d --- /dev/null +++ b/V2/time_tracker/time_entries/_infrastructure/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._data_persistence import TimeEntriesSQLDao diff --git a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/__init__.py b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/__init__.py new file mode 100644 index 00000000..b999febe --- /dev/null +++ b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._time_entries_dao import TimeEntriesSQLDao diff --git a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py new file mode 100644 index 00000000..d233f3e9 --- /dev/null +++ b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py @@ -0,0 +1,49 @@ +import dataclasses + +import sqlalchemy + +import time_tracker.time_entries._domain as domain +from time_tracker._infrastructure import _db + + +class TimeEntriesSQLDao(domain.TimeEntriesDao): + + def __init__(self, database: _db.DB): + self.time_entry_key = [field.name for field in dataclasses.fields(domain.TimeEntry)] + self.db = database + self.time_entry = sqlalchemy.Table( + 'time_entry', + self.db.metadata, + sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True, autoincrement=True), + sqlalchemy.Column('start_date', sqlalchemy.DateTime().with_variant(sqlalchemy.String, "sqlite")), + sqlalchemy.Column('owner_id', sqlalchemy.Integer), + sqlalchemy.Column('description', sqlalchemy.String), + sqlalchemy.Column('activity_id', sqlalchemy.Integer, sqlalchemy.ForeignKey('activity.id')), + sqlalchemy.Column('uri', sqlalchemy.String), + sqlalchemy.Column( + 'technologies', + sqlalchemy.ARRAY(sqlalchemy.String).with_variant(sqlalchemy.String, "sqlite") + ), + sqlalchemy.Column('end_date', sqlalchemy.DateTime().with_variant(sqlalchemy.String, "sqlite")), + sqlalchemy.Column('deleted', sqlalchemy.Boolean), + sqlalchemy.Column('timezone_offset', sqlalchemy.String), + sqlalchemy.Column('project_id', sqlalchemy.Integer), + extend_existing=True, + ) + + def create(self, time_entry_data: domain.TimeEntry) -> domain.TimeEntry: + try: + new_time_entry = time_entry_data.__dict__ + new_time_entry.pop('id', None) + + query = self.time_entry.insert().values(new_time_entry).return_defaults() + time_entry = self.db.get_session().execute(query) + new_time_entry.update({"id": time_entry.inserted_primary_key[0]}) + return self.__create_time_entry_dto(new_time_entry) + + except sqlalchemy.exc.SQLAlchemyError: + return None + + def __create_time_entry_dto(self, time_entry: dict) -> domain.TimeEntry: + time_entry = {key: time_entry.get(key) for key in self.time_entry_key} + return domain.TimeEntry(**time_entry) diff --git a/V2/time_tracker/time_entries/interface.py b/V2/time_tracker/time_entries/interface.py new file mode 100644 index 00000000..d0182780 --- /dev/null +++ b/V2/time_tracker/time_entries/interface.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._application import create_time_entry \ No newline at end of file diff --git a/V2/update_activity/function.json b/V2/update_activity/function.json deleted file mode 100644 index 97c9fb49..00000000 --- a/V2/update_activity/function.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "disabled": false, - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "route": "activities/{id}", - "authLevel": "anonymous", - "methods": [ - "PUT" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ], - "entryPoint": "update_activity", - "scriptFile": "../time_tracker/activities/interface.py" -} \ No newline at end of file From 60a0dc7015f98b24a3429b1ceabf31e722741649 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Rafael=20Arcos=20G=C3=B3mez?= <37599693+ararcos@users.noreply.github.com> Date: Mon, 22 Nov 2021 10:21:34 -0500 Subject: [PATCH 10/33] feat: TT-403 delete v2 time entries (#346) * feat: TT-401 Implemented service, end-point, dao, test- time entries * feat: TT-401 validated request create time entry * fix: TT-401 implemented faker url * feat: TT-403 created end-point to DELETE of time_entries * fix: TT-403 validation of id as integer * fix: TT-403 remove method POST * feat: TT-403 rebase with master * feat: TT-403 tests added * refactor: TT-403 correct flake8 lint syntax * fix: TT-403 comments solved * fix: TT-403 correction of rebase * refactor: TT-403 renamed of delete test Co-authored-by: mandres2015 --- V2/serverless.yml | 10 ++++ .../azure/time_entry_azure_endpoints_test.py | 50 +++++++++++++++++++ V2/tests/fixtures.py | 6 ++- .../integration/daos/time_entries_dao_test.py | 25 +++++++++- .../unit/services/time_entry_service_test.py | 17 +++++++ .../use_cases/time_entries_use_case_test.py | 14 ++++++ .../time_entries/_application/__init__.py | 2 +- .../_application/_time_entries/__init__.py | 3 +- .../_time_entries/_delete_time_entry.py | 36 +++++++++++++ .../time_entries/_domain/__init__.py | 1 + .../_time_entries_dao.py | 4 ++ .../_domain/_services/_time_entry.py | 3 ++ .../_domain/_use_cases/__init__.py | 1 + .../_use_cases/_delete_time_entry_use_case.py | 10 ++++ .../_data_persistence/_time_entries_dao.py | 11 ++++ V2/time_tracker/time_entries/interface.py | 3 +- 16 files changed, 191 insertions(+), 5 deletions(-) create mode 100644 V2/time_tracker/time_entries/_application/_time_entries/_delete_time_entry.py create mode 100644 V2/time_tracker/time_entries/_domain/_use_cases/_delete_time_entry_use_case.py diff --git a/V2/serverless.yml b/V2/serverless.yml index c6c5e34b..fc5942d9 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -86,3 +86,13 @@ functions: - POST route: time-entries/ authLevel: anonymous + + delete_time_entry: + handler: time_tracker/time_entries/interface.delete_time_entry + events: + - http: true + x-azure-settings: + methods: + - DELETE + route: time-entries/{id} + authLevel: anonymous diff --git a/V2/tests/api/azure/time_entry_azure_endpoints_test.py b/V2/tests/api/azure/time_entry_azure_endpoints_test.py index f801dad9..8422c4b5 100644 --- a/V2/tests/api/azure/time_entry_azure_endpoints_test.py +++ b/V2/tests/api/azure/time_entry_azure_endpoints_test.py @@ -1,12 +1,26 @@ +import pytest import json import azure.functions as func import time_tracker.time_entries._application._time_entries as azure_time_entries +from time_tracker._infrastructure import DB +from time_tracker.time_entries import _domain as domain_time_entries +from time_tracker.time_entries import _infrastructure as infrastructure_time_entries + TIME_ENTRY_URL = "/api/time-entries/" +@pytest.fixture(name='insert_time_entry') +def _insert_time_entry() -> domain_time_entries.TimeEntry: + def _new_time_entry(time_entry: domain_time_entries.TimeEntry, database: DB): + dao = infrastructure_time_entries.TimeEntriesSQLDao(database) + new_time_entry = dao.create(time_entry) + return new_time_entry + return _new_time_entry + + def test__time_entry_azure_endpoint__creates_an_time_entry__when_time_entry_has_all_attributes( test_db, time_entry_factory, activity_factory, insert_activity ): @@ -26,3 +40,39 @@ def test__time_entry_azure_endpoint__creates_an_time_entry__when_time_entry_has_ assert response.status_code == 201 assert time_entry_json_data == time_entry_body + + +def test__delete_time_entries_azure_endpoint__returns_an_time_entry_with_true_deleted__when_its_id_is_found( + test_db, time_entry_factory, insert_time_entry, insert_activity, activity_factory, +): + inserted_activity = insert_activity(activity_factory(), test_db).__dict__ + time_entry_body = time_entry_factory(activity_id=inserted_activity["id"], technologies="[jira,sql]") + inserted_time_entry = insert_time_entry(time_entry_body, test_db) + + req = func.HttpRequest( + method='DELETE', + body=None, + url=TIME_ENTRY_URL, + route_params={"id": inserted_time_entry.id}, + ) + + response = azure_time_entries._delete_time_entry.delete_time_entry(req) + time_entry_json_data = json.loads(response.get_body().decode("utf-8")) + + assert response.status_code == 200 + assert time_entry_json_data['deleted'] is True + + +def test__delete_time_entries_azure_endpoint__returns_a_status_code_400__when_time_entry_recive_invalid_id( +): + req = func.HttpRequest( + method="DELETE", + body=None, + url=TIME_ENTRY_URL, + route_params={"id": "invalid id"}, + ) + + response = azure_time_entries._delete_time_entry.delete_time_entry(req) + + assert response.status_code == 400 + assert response.get_body() == b'Invalid Format ID' diff --git a/V2/tests/fixtures.py b/V2/tests/fixtures.py index 51ee5e5d..8568bdb6 100644 --- a/V2/tests/fixtures.py +++ b/V2/tests/fixtures.py @@ -10,7 +10,10 @@ @pytest.fixture(name='activity_factory') def _activity_factory() -> activities_domain.Activity: def _make_activity( - name: str = Faker().name(), description: str = Faker().sentence(), deleted: bool = False, status: int = 1 + name: str = Faker().name(), + description: str = Faker().sentence(), + deleted: bool = False, + status: int = 1, ): activity = activities_domain.Activity( id=None, @@ -20,6 +23,7 @@ def _make_activity( status=status ) return activity + return _make_activity diff --git a/V2/tests/integration/daos/time_entries_dao_test.py b/V2/tests/integration/daos/time_entries_dao_test.py index 403f80c6..901bce34 100644 --- a/V2/tests/integration/daos/time_entries_dao_test.py +++ b/V2/tests/integration/daos/time_entries_dao_test.py @@ -1,5 +1,5 @@ import pytest - +from faker import Faker import time_tracker.time_entries._domain as domain import time_tracker.time_entries._infrastructure as infrastructure @@ -46,3 +46,26 @@ def test__time_entry__returns_None__when_not_saves_correctly( inserted_time_entry = dao.create(time_entry_to_insert) assert inserted_time_entry is None + + +def test_delete__returns_an_time_entry_with_true_deleted__when_an_time_entry_matching_its_id_is_found( + create_fake_dao, test_db, time_entry_factory, insert_activity, activity_factory +): + dao = create_fake_dao(test_db) + inserted_activity = insert_activity(activity_factory(), dao.db) + existent_time_entry = time_entry_factory(activity_id=inserted_activity.id, technologies="[jira,sql]") + inserted_time_entry = dao.create(existent_time_entry) + + result = dao.delete(inserted_time_entry.id) + + assert result.deleted is True + + +def test_delete__returns_none__when_no_time_entry_matching_its_id_is_found( + create_fake_dao, test_db +): + dao = create_fake_dao(test_db) + + result = dao.delete(Faker().pyint()) + + assert result is None diff --git a/V2/tests/unit/services/time_entry_service_test.py b/V2/tests/unit/services/time_entry_service_test.py index bd5ce085..e83b6afb 100644 --- a/V2/tests/unit/services/time_entry_service_test.py +++ b/V2/tests/unit/services/time_entry_service_test.py @@ -1,3 +1,5 @@ +from faker import Faker + from time_tracker.time_entries._domain import TimeEntryService @@ -12,3 +14,18 @@ def test__create_time_entries__uses_the_time_entry_dao__to_create_an_time_entry( assert time_entry_dao.create.called assert expected_time_entry == actual_time_entry + + +def test__delete_time_entry__uses_the_time_entry_dao__to_delete_time_entry_selected( + mocker, +): + expected_time_entry = mocker.Mock() + time_entry_dao = mocker.Mock( + delete=mocker.Mock(return_value=expected_time_entry) + ) + + time_entry_service = TimeEntryService(time_entry_dao) + deleted_time_entry = time_entry_service.delete(Faker().pyint()) + + assert time_entry_dao.delete.called + assert expected_time_entry == deleted_time_entry diff --git a/V2/tests/unit/use_cases/time_entries_use_case_test.py b/V2/tests/unit/use_cases/time_entries_use_case_test.py index d2a31eb7..e0994df4 100644 --- a/V2/tests/unit/use_cases/time_entries_use_case_test.py +++ b/V2/tests/unit/use_cases/time_entries_use_case_test.py @@ -1,4 +1,5 @@ from pytest_mock import MockFixture +from faker import Faker from time_tracker.time_entries._domain import _use_cases @@ -16,3 +17,16 @@ def test__create_time_entry_function__uses_the_time_entries_service__to_create_t assert time_entry_service.create.called assert expected_time_entry == actual_time_entry + + +def test__delete_time_entry_function__uses_the_time_entry_service__to_delete_time_entry_selected( + mocker: MockFixture, +): + expected_time_entry = mocker.Mock() + time_entry_service = mocker.Mock(delete=mocker.Mock(return_value=expected_time_entry)) + + time_entry_use_case = _use_cases.DeleteTimeEntryUseCase(time_entry_service) + deleted_time_entry = time_entry_use_case.delete_time_entry(Faker().pyint()) + + assert time_entry_service.delete.called + assert expected_time_entry == deleted_time_entry diff --git a/V2/time_tracker/time_entries/_application/__init__.py b/V2/time_tracker/time_entries/_application/__init__.py index 6e4ba9c3..2810c87d 100644 --- a/V2/time_tracker/time_entries/_application/__init__.py +++ b/V2/time_tracker/time_entries/_application/__init__.py @@ -1,2 +1,2 @@ # flake8: noqa -from ._time_entries import create_time_entry \ No newline at end of file +from ._time_entries import create_time_entry, delete_time_entry \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py index b46cddce..4cb4d4b0 100644 --- a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py +++ b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py @@ -1,2 +1,3 @@ # flake8: noqa -from ._create_time_entry import create_time_entry \ No newline at end of file +from ._create_time_entry import create_time_entry +from ._delete_time_entry import delete_time_entry \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_application/_time_entries/_delete_time_entry.py b/V2/time_tracker/time_entries/_application/_time_entries/_delete_time_entry.py new file mode 100644 index 00000000..bbf76eab --- /dev/null +++ b/V2/time_tracker/time_entries/_application/_time_entries/_delete_time_entry.py @@ -0,0 +1,36 @@ +import json + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + + +def delete_time_entry(req: func.HttpRequest) -> func.HttpResponse: + time_entry_dao = _infrastructure.TimeEntriesSQLDao(DB()) + time_entry_service = _domain.TimeEntryService(time_entry_dao) + use_case = _domain._use_cases.DeleteTimeEntryUseCase(time_entry_service) + + try: + time_entry_id = int(req.route_params.get("id")) + deleted_time_entry = use_case.delete_time_entry(time_entry_id) + if not deleted_time_entry: + return func.HttpResponse( + body="Not found", + status_code=404, + mimetype="application/json" + ) + + return func.HttpResponse( + body=json.dumps(deleted_time_entry.__dict__, default=str), + status_code=200, + mimetype="application/json", + ) + + except ValueError: + return func.HttpResponse( + body=b"Invalid Format ID", + status_code=400, + mimetype="application/json" + ) diff --git a/V2/time_tracker/time_entries/_domain/__init__.py b/V2/time_tracker/time_entries/_domain/__init__.py index a8b2081c..ad927811 100644 --- a/V2/time_tracker/time_entries/_domain/__init__.py +++ b/V2/time_tracker/time_entries/_domain/__init__.py @@ -4,4 +4,5 @@ from ._services import TimeEntryService from ._use_cases import ( CreateTimeEntryUseCase, + DeleteTimeEntryUseCase ) \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py b/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py index 5d04c861..e7d94608 100644 --- a/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py +++ b/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py @@ -7,3 +7,7 @@ class TimeEntriesDao(abc.ABC): @abc.abstractmethod def create(self, time_entry_data: TimeEntry) -> TimeEntry: pass + + @abc.abstractmethod + def delete(self, id: int) -> TimeEntry: + pass diff --git a/V2/time_tracker/time_entries/_domain/_services/_time_entry.py b/V2/time_tracker/time_entries/_domain/_services/_time_entry.py index d7aaf3ba..9d47d5e0 100644 --- a/V2/time_tracker/time_entries/_domain/_services/_time_entry.py +++ b/V2/time_tracker/time_entries/_domain/_services/_time_entry.py @@ -8,3 +8,6 @@ def __init__(self, time_entry_dao: TimeEntriesDao): def create(self, time_entry_data: TimeEntry) -> TimeEntry: return self.time_entry_dao.create(time_entry_data) + + def delete(self, id: int) -> TimeEntry: + return self.time_entry_dao.delete(id) diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py index 41aca738..17b2442a 100644 --- a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py +++ b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py @@ -1,2 +1,3 @@ # flake8: noqa from ._create_time_entry_use_case import CreateTimeEntryUseCase +from ._delete_time_entry_use_case import DeleteTimeEntryUseCase diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/_delete_time_entry_use_case.py b/V2/time_tracker/time_entries/_domain/_use_cases/_delete_time_entry_use_case.py new file mode 100644 index 00000000..a195c303 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_use_cases/_delete_time_entry_use_case.py @@ -0,0 +1,10 @@ +from time_tracker.time_entries._domain import TimeEntry, TimeEntryService + + +class DeleteTimeEntryUseCase: + + def __init__(self, time_entry_service: TimeEntryService): + self.time_entry_service = time_entry_service + + def delete_time_entry(self, id: int) -> TimeEntry: + return self.time_entry_service.delete(id) diff --git a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py index d233f3e9..6037af9f 100644 --- a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py +++ b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py @@ -47,3 +47,14 @@ def create(self, time_entry_data: domain.TimeEntry) -> domain.TimeEntry: def __create_time_entry_dto(self, time_entry: dict) -> domain.TimeEntry: time_entry = {key: time_entry.get(key) for key in self.time_entry_key} return domain.TimeEntry(**time_entry) + + def delete(self, time_entry_id: int) -> domain.TimeEntry: + query = ( + self.time_entry.update() + .where(self.time_entry.c.id == time_entry_id) + .values({"deleted": True}) + ) + self.db.get_session().execute(query) + query_deleted_time_entry = sqlalchemy.sql.select(self.time_entry).where(self.time_entry.c.id == time_entry_id) + time_entry = self.db.get_session().execute(query_deleted_time_entry).one_or_none() + return self.__create_time_entry_dto(dict(time_entry)) if time_entry else None diff --git a/V2/time_tracker/time_entries/interface.py b/V2/time_tracker/time_entries/interface.py index d0182780..773314bb 100644 --- a/V2/time_tracker/time_entries/interface.py +++ b/V2/time_tracker/time_entries/interface.py @@ -1,2 +1,3 @@ # flake8: noqa -from ._application import create_time_entry \ No newline at end of file +from ._application import create_time_entry +from ._application import delete_time_entry \ No newline at end of file From a6fcb3536a5fa6817a5dc28fab899557e5b9e4d8 Mon Sep 17 00:00:00 2001 From: Cristian Toaquiza Date: Mon, 22 Nov 2021 14:17:50 -0500 Subject: [PATCH 11/33] ci: TT-411 adds pull request (ci) workflow for time-tracker v1 (#356) * ci: [TT-412] adds ci workflow for time-tracker v1 * ci: [TT-412] injects secrets * ci: [TT-412] gets azure vault * ci: [TT-412] logins to azure * ci: [TT-412] adds other attempt to use secrets * ci: [TT-412] adds other attempt to use secrets as env var * ci: [TT-412] injects secrets to run tests * ci: [TT-412] injects USERID to run tests * ci: [TT-412] injects AZURE-APP-CONFIGURATION-CONNECTION-STRING to run tests * ci: [TT-412] injects DATABASE-ACCOUNT-URI to run tests * ci: [TT-412] injects DATABASE-MASTER-KEYO to run tests * ci: [TT-412] injects DATABASE-NAME to run tests * ci: [TT-412] injects AZURE-STORAGE-ACCOUNT-KEY to run tests * ci: [TT-412] adds step to build the app * ci: [TT-412] updates on section values * ci: [TT-412] renames file --- ...me-tracker-v1-on-pull-request-workflow.yml | 60 +++++++++++++++++++ 1 file changed, 60 insertions(+) create mode 100644 .github/workflows/time-tracker-v1-on-pull-request-workflow.yml diff --git a/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml b/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml new file mode 100644 index 00000000..af4d872a --- /dev/null +++ b/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml @@ -0,0 +1,60 @@ +name: Time Tacker V1 CI + +on: + pull_request: + branches: [master] + +jobs: + time-tracker-ci: + runs-on: ubuntu-latest + + strategy: + matrix: + python-version: [3.9] + + steps: + - name: Checking out code from the repository + uses: actions/checkout@v2 + + - name: Setting up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements/time_tracker_api/dev.txt + pip install -r requirements/time_tracker_events/dev.txt + + - name: Login to azure + uses: Azure/login@v1 + with: + creds: ${{ secrets.AZURE_CREDENTIALS }} + + - name: Get vault from azure + uses: Azure/get-keyvault-secrets@v1 + with: + keyvault: "time-tracker-secrets" + secrets: "MS-CLIENT-ID, MS-AUTHORITY, MS-SCOPE, MS-SECRET, MS-ENDPOINT, USERID, AZURE-APP-CONFIGURATION-CONNECTION-STRING, DATABASE-ACCOUNT-URI, DATABASE-MASTER-KEY, DATABASE-NAME, AZURE-STORAGE-ACCOUNT-KEY" + id: timeTrackerAzureVault + + - name: Run tests + env: + MS_AUTHORITY: ${{ steps.timeTrackerAzureVault.outputs.MS-AUTHORITY }} + MS_CLIENT_ID: ${{ steps.timeTrackerAzureVault.outputs.MS-CLIENT-ID }} + MS_SCOPE: ${{ steps.timeTrackerAzureVault.outputs.MS-SCOPE }} + MS_SECRET: ${{ steps.timeTrackerAzureVault.outputs.MS-SECRET }} + MS_ENDPOINT: ${{ steps.timeTrackerAzureVault.outputs.MS-ENDPOINT }} + USERID: ${{ steps.timeTrackerAzureVault.outputs.USERID }} + AZURE_APP_CONFIGURATION_CONNECTION_STRING: ${{ steps.timeTrackerAzureVault.outputs.AZURE-APP-CONFIGURATION-CONNECTION-STRING }} + DATABASE_ACCOUNT_URI: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-ACCOUNT-URI }} + DATABASE_MASTER_KEY: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-MASTER-KEY }} + DATABASE_NAME: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-NAME }} + AZURE_STORAGE_ACCOUNT_KEY: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-ACCOUNT-KEY }} + run: | + pytest tests + + - name: Test the build of the app + run: | + docker build . From 6de0063b7bedacd79d5bd7dac9ebfe06e15f5f6c Mon Sep 17 00:00:00 2001 From: Cristian Toaquiza Date: Tue, 23 Nov 2021 13:26:38 -0500 Subject: [PATCH 12/33] ci: [TT-412] adds ci workflow when push to master --- ...me-tracker-v1-on-pull-request-workflow.yml | 4 +- .../time-tracker-v1-on-push-workflow.yml | 67 +++++++++++++++++++ 2 files changed, 69 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/time-tracker-v1-on-push-workflow.yml diff --git a/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml b/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml index af4d872a..c35be604 100644 --- a/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml +++ b/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml @@ -1,11 +1,11 @@ -name: Time Tacker V1 CI +name: Time Tacker V1 CI - ON PR on: pull_request: branches: [master] jobs: - time-tracker-ci: + time-tracker-ci-v1-on-pr: runs-on: ubuntu-latest strategy: diff --git a/.github/workflows/time-tracker-v1-on-push-workflow.yml b/.github/workflows/time-tracker-v1-on-push-workflow.yml new file mode 100644 index 00000000..ed673b64 --- /dev/null +++ b/.github/workflows/time-tracker-v1-on-push-workflow.yml @@ -0,0 +1,67 @@ +name: Time Tacker V1 CI - ON PUSH + +on: + push: + # update to master + branches: [TT-412-onpush] + +jobs: + time-tracker-ci-v1-on-push: + runs-on: ubuntu-latest + + strategy: + matrix: + python-version: [3.9] + + steps: + - name: Checking out code from the repository + uses: actions/checkout@v2 + + - name: Setting up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements/time_tracker_api/dev.txt + pip install -r requirements/time_tracker_events/dev.txt + + - name: Login to azure + uses: Azure/login@v1 + with: + creds: ${{ secrets.AZURE_CREDENTIALS }} + + - name: Get vault from azure + uses: Azure/get-keyvault-secrets@v1 + with: + keyvault: "time-tracker-secrets" + secrets: "MS-CLIENT-ID, MS-AUTHORITY, MS-SCOPE, MS-SECRET, MS-ENDPOINT, USERID, AZURE-APP-CONFIGURATION-CONNECTION-STRING, DATABASE-ACCOUNT-URI, DATABASE-MASTER-KEY, DATABASE-NAME, AZURE-STORAGE-ACCOUNT-KEY" + id: timeTrackerAzureVault + + - name: Run tests + env: + MS_AUTHORITY: ${{ steps.timeTrackerAzureVault.outputs.MS-AUTHORITY }} + MS_CLIENT_ID: ${{ steps.timeTrackerAzureVault.outputs.MS-CLIENT-ID }} + MS_SCOPE: ${{ steps.timeTrackerAzureVault.outputs.MS-SCOPE }} + MS_SECRET: ${{ steps.timeTrackerAzureVault.outputs.MS-SECRET }} + MS_ENDPOINT: ${{ steps.timeTrackerAzureVault.outputs.MS-ENDPOINT }} + USERID: ${{ steps.timeTrackerAzureVault.outputs.USERID }} + AZURE_APP_CONFIGURATION_CONNECTION_STRING: ${{ steps.timeTrackerAzureVault.outputs.AZURE-APP-CONFIGURATION-CONNECTION-STRING }} + DATABASE_ACCOUNT_URI: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-ACCOUNT-URI }} + DATABASE_MASTER_KEY: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-MASTER-KEY }} + DATABASE_NAME: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-NAME }} + AZURE_STORAGE_ACCOUNT_KEY: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-ACCOUNT-KEY }} + run: | + pytest tests + + - name: Build and push image + uses: azure/docker-login@v1 + with: + login-server: ${{ secrets.REGISTRY_LOGIN_SERVER }} + username: ${{ secrets.REGISTRY_USERNAME }} + password: ${{ secrets.REGISTRY_PASSWORD }} + run: | + docker build . -t ${{ secrets.REGISTRY_LOGIN_SERVER }}/timetrackerapi:${{ github.sha }} + docker push ${{ secrets.REGISTRY_LOGIN_SERVER }}/timetrackerapi:${{ github.sha }} From d6c4c4d67e72db867f197af8c7f8147839d6c178 Mon Sep 17 00:00:00 2001 From: mandres2015 <32377408+mandres2015@users.noreply.github.com> Date: Wed, 24 Nov 2021 09:25:55 -0500 Subject: [PATCH 13/33] feat: TT-418 CRUD customer v2 (#359) * feat: TT-418 create added * feat: TT-418 create tests added * fix: TT-418 problems solved --- V2/serverless.yml | 10 ++++ .../azure/customer_azure_endpoints_test.py | 49 ++++++++++++++++ V2/tests/conftest.py | 1 + V2/tests/fixtures.py | 23 +++++++- .../integration/daos/customers_dao_test.py | 35 ++++++++++++ .../unit/services/customer_service_test.py | 14 +++++ .../unit/use_cases/customers_use_case_test.py | 18 ++++++ .../customers/_application/__init__.py | 2 + .../_application/_customers/__init__.py | 2 + .../_customers/_create_customer.py | 57 +++++++++++++++++++ V2/time_tracker/customers/_domain/__init__.py | 7 +++ .../customers/_domain/_entities/__init__.py | 2 + .../customers/_domain/_entities/_customer.py | 11 ++++ .../_persistence_contracts/__init__.py | 2 + .../_persistence_contracts/_customers_dao.py | 9 +++ .../customers/_domain/_services/__init__.py | 2 + .../customers/_domain/_services/_customer.py | 10 ++++ .../customers/_domain/_use_cases/__init__.py | 2 + .../_use_cases/_create_customer_use_case.py | 10 ++++ .../customers/_infrastructure/__init__.py | 2 + .../_data_persistence/__init__.py | 2 + .../_data_persistence/_customer_dao.py | 41 +++++++++++++ V2/time_tracker/customers/interface.py | 2 + 23 files changed, 312 insertions(+), 1 deletion(-) create mode 100644 V2/tests/api/azure/customer_azure_endpoints_test.py create mode 100644 V2/tests/integration/daos/customers_dao_test.py create mode 100644 V2/tests/unit/services/customer_service_test.py create mode 100644 V2/tests/unit/use_cases/customers_use_case_test.py create mode 100644 V2/time_tracker/customers/_application/__init__.py create mode 100644 V2/time_tracker/customers/_application/_customers/__init__.py create mode 100644 V2/time_tracker/customers/_application/_customers/_create_customer.py create mode 100644 V2/time_tracker/customers/_domain/__init__.py create mode 100644 V2/time_tracker/customers/_domain/_entities/__init__.py create mode 100644 V2/time_tracker/customers/_domain/_entities/_customer.py create mode 100644 V2/time_tracker/customers/_domain/_persistence_contracts/__init__.py create mode 100644 V2/time_tracker/customers/_domain/_persistence_contracts/_customers_dao.py create mode 100644 V2/time_tracker/customers/_domain/_services/__init__.py create mode 100644 V2/time_tracker/customers/_domain/_services/_customer.py create mode 100644 V2/time_tracker/customers/_domain/_use_cases/__init__.py create mode 100644 V2/time_tracker/customers/_domain/_use_cases/_create_customer_use_case.py create mode 100644 V2/time_tracker/customers/_infrastructure/__init__.py create mode 100644 V2/time_tracker/customers/_infrastructure/_data_persistence/__init__.py create mode 100644 V2/time_tracker/customers/_infrastructure/_data_persistence/_customer_dao.py create mode 100644 V2/time_tracker/customers/interface.py diff --git a/V2/serverless.yml b/V2/serverless.yml index fc5942d9..e5dea8e9 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -96,3 +96,13 @@ functions: - DELETE route: time-entries/{id} authLevel: anonymous + + create_customer: + handler: time_tracker/customers/interface.create_customer + events: + - http: true + x-azure-settings: + methods: + - POST + route: customers/ + authLevel: anonymous diff --git a/V2/tests/api/azure/customer_azure_endpoints_test.py b/V2/tests/api/azure/customer_azure_endpoints_test.py new file mode 100644 index 00000000..47a619d5 --- /dev/null +++ b/V2/tests/api/azure/customer_azure_endpoints_test.py @@ -0,0 +1,49 @@ +import json +from faker import Faker + +import azure.functions as func + +import time_tracker.customers._application._customers as azure_customers + +CUSTOMER_URL = "/api/customers/" + + +def test__customer_azure_endpoint__creates_a_customer__when_customer_has_all_necesary_attributes( + customer_factory +): + customer_body = customer_factory().__dict__ + + body = json.dumps(customer_body).encode("utf-8") + req = func.HttpRequest( + method='POST', + body=body, + url=CUSTOMER_URL, + ) + + response = azure_customers._create_customer.create_customer(req) + customer_json_data = json.loads(response.get_body()) + customer_body['id'] = customer_json_data['id'] + + assert response.status_code == 201 + assert customer_json_data == customer_body + + +def test__customer_azure_endpoint__returns_a_status_400__when_dont_recieve_all_necessary_attributes(): + customer_to_insert = { + "id": None, + "name": Faker().user_name(), + "deleted": False, + "status": 1 + } + + body = json.dumps(customer_to_insert).encode("utf-8") + req = func.HttpRequest( + method='POST', + body=body, + url=CUSTOMER_URL, + ) + + response = azure_customers._create_customer.create_customer(req) + + assert response.status_code == 400 + assert response.get_body() == b'Invalid format or structure of the attributes of the customer' diff --git a/V2/tests/conftest.py b/V2/tests/conftest.py index cf6e362f..4ad03c51 100644 --- a/V2/tests/conftest.py +++ b/V2/tests/conftest.py @@ -1,3 +1,4 @@ # flake8: noqa from fixtures import _activity_factory, _test_db, _insert_activity from fixtures import _time_entry_factory +from fixtures import _customer_factory diff --git a/V2/tests/fixtures.py b/V2/tests/fixtures.py index 8568bdb6..a02a74bb 100644 --- a/V2/tests/fixtures.py +++ b/V2/tests/fixtures.py @@ -2,8 +2,9 @@ from faker import Faker import time_tracker.activities._domain as activities_domain -import time_tracker.activities._infrastructure as activities_infrastructure import time_tracker.time_entries._domain as time_entries_domain +import time_tracker.customers._domain as customers_domain +import time_tracker.activities._infrastructure as activities_infrastructure from time_tracker._infrastructure import DB @@ -73,3 +74,23 @@ def _new_activity(activity: activities_domain.Activity, database: DB): new_activity = dao.create(activity) return new_activity return _new_activity + + +@pytest.fixture(name='customer_factory') +def _customer_factory() -> customers_domain.Customer: + def _make_customer( + name: str = Faker().name(), + description: str = Faker().sentence(), + deleted: bool = False, + status: int = 1, + ): + customer = customers_domain.Customer( + id=None, + name=name, + description=description, + deleted=deleted, + status=status + ) + return customer + + return _make_customer diff --git a/V2/tests/integration/daos/customers_dao_test.py b/V2/tests/integration/daos/customers_dao_test.py new file mode 100644 index 00000000..b85cd3e3 --- /dev/null +++ b/V2/tests/integration/daos/customers_dao_test.py @@ -0,0 +1,35 @@ +import pytest + +import time_tracker.customers._domain as domain +import time_tracker.customers._infrastructure as infrastructure +from time_tracker._infrastructure import DB + + +@pytest.fixture(name='create_fake_dao') +def _fake_dao() -> domain.CustomersDao: + def _create_fake_dao(db_fake: DB) -> domain.CustomersDao: + dao = infrastructure.CustomersSQLDao(db_fake) + return dao + return _create_fake_dao + + +@pytest.fixture(name='clean_database', autouse=True) +def _clean_database(): + yield + db_fake = DB() + dao = infrastructure.CustomersSQLDao(db_fake) + query = dao.customer.delete() + dao.db.get_session().execute(query) + + +def test__customer_dao__returns_a_customer_dto__when_saves_correctly_with_sql_database( + test_db, customer_factory, create_fake_dao +): + dao = create_fake_dao(test_db) + + customer_to_insert = customer_factory() + + inserted_customer = dao.create(customer_to_insert) + + assert isinstance(inserted_customer, domain.Customer) + assert inserted_customer == customer_to_insert diff --git a/V2/tests/unit/services/customer_service_test.py b/V2/tests/unit/services/customer_service_test.py new file mode 100644 index 00000000..bb25070f --- /dev/null +++ b/V2/tests/unit/services/customer_service_test.py @@ -0,0 +1,14 @@ +from time_tracker.customers._domain import CustomerService + + +def test__create_customer__uses_the_customer_dao__to_create_a_customer(mocker, customer_factory): + expected_customer = mocker.Mock() + customer_dao = mocker.Mock( + create=mocker.Mock(return_value=expected_customer) + ) + customer_service = CustomerService(customer_dao) + + new_customer = customer_service.create(customer_factory()) + + assert customer_dao.create.called + assert expected_customer == new_customer diff --git a/V2/tests/unit/use_cases/customers_use_case_test.py b/V2/tests/unit/use_cases/customers_use_case_test.py new file mode 100644 index 00000000..3b8566a9 --- /dev/null +++ b/V2/tests/unit/use_cases/customers_use_case_test.py @@ -0,0 +1,18 @@ +from pytest_mock import MockFixture + +from time_tracker.customers._domain import _use_cases + + +def test__create_customer_function__uses_the_customer_service__to_create_a_customer( + mocker: MockFixture, customer_factory +): + expected_customer = mocker.Mock() + customer_service = mocker.Mock( + create=mocker.Mock(return_value=expected_customer) + ) + + customer_use_case = _use_cases.CreateCustomerUseCase(customer_service) + new_customer = customer_use_case.create_customer(customer_factory()) + + assert customer_service.create.called + assert expected_customer == new_customer diff --git a/V2/time_tracker/customers/_application/__init__.py b/V2/time_tracker/customers/_application/__init__.py new file mode 100644 index 00000000..db2c2c15 --- /dev/null +++ b/V2/time_tracker/customers/_application/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._customers import create_customer \ No newline at end of file diff --git a/V2/time_tracker/customers/_application/_customers/__init__.py b/V2/time_tracker/customers/_application/_customers/__init__.py new file mode 100644 index 00000000..bf1f8460 --- /dev/null +++ b/V2/time_tracker/customers/_application/_customers/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._create_customer import create_customer \ No newline at end of file diff --git a/V2/time_tracker/customers/_application/_customers/_create_customer.py b/V2/time_tracker/customers/_application/_customers/_create_customer.py new file mode 100644 index 00000000..919c34cb --- /dev/null +++ b/V2/time_tracker/customers/_application/_customers/_create_customer.py @@ -0,0 +1,57 @@ +import dataclasses +import json +import typing + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + + +def create_customer(req: func.HttpRequest) -> func.HttpResponse: + try: + database = DB() + customer_dao = _infrastructure.CustomersSQLDao(database) + customer_service = _domain.CustomerService(customer_dao) + use_case = _domain._use_cases.CreateCustomerUseCase(customer_service) + customer_data = req.get_json() + + customer_is_valid = _validate_customer(customer_data) + if not customer_is_valid: + raise ValueError + + customer_to_create = _domain.Customer( + id=None, + deleted=None, + status=None, + name=str(customer_data["name"]).strip(), + description=str(customer_data["description"]), + ) + created_customer = use_case.create_customer(customer_to_create) + + if created_customer: + body = json.dumps(created_customer.__dict__) + status_code = 201 + else: + body = b'This customer already exists' + status_code = 409 + + return func.HttpResponse( + body=body, + status_code=status_code, + mimetype="application/json" + ) + except ValueError: + return func.HttpResponse( + body=b'Invalid format or structure of the attributes of the customer', + status_code=400, + mimetype="application/json" + ) + + +def _validate_customer(customer_data: dict) -> bool: + if [field.name for field in dataclasses.fields(_domain.Customer) + if (field.name not in customer_data) and (field.type != typing.Optional[field.type])]: + return False + return True diff --git a/V2/time_tracker/customers/_domain/__init__.py b/V2/time_tracker/customers/_domain/__init__.py new file mode 100644 index 00000000..8392b8e9 --- /dev/null +++ b/V2/time_tracker/customers/_domain/__init__.py @@ -0,0 +1,7 @@ +# flake8: noqa +from ._entities import Customer +from ._persistence_contracts import CustomersDao +from ._services import CustomerService +from ._use_cases import ( + CreateCustomerUseCase, +) \ No newline at end of file diff --git a/V2/time_tracker/customers/_domain/_entities/__init__.py b/V2/time_tracker/customers/_domain/_entities/__init__.py new file mode 100644 index 00000000..2a23e12c --- /dev/null +++ b/V2/time_tracker/customers/_domain/_entities/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._customer import Customer \ No newline at end of file diff --git a/V2/time_tracker/customers/_domain/_entities/_customer.py b/V2/time_tracker/customers/_domain/_entities/_customer.py new file mode 100644 index 00000000..fedc0835 --- /dev/null +++ b/V2/time_tracker/customers/_domain/_entities/_customer.py @@ -0,0 +1,11 @@ +from dataclasses import dataclass +import typing + + +@dataclass(frozen=True) +class Customer: + id: typing.Optional[int] + name: str + description: str + deleted: typing.Optional[bool] + status: typing.Optional[int] diff --git a/V2/time_tracker/customers/_domain/_persistence_contracts/__init__.py b/V2/time_tracker/customers/_domain/_persistence_contracts/__init__.py new file mode 100644 index 00000000..8b1b02fd --- /dev/null +++ b/V2/time_tracker/customers/_domain/_persistence_contracts/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._customers_dao import CustomersDao \ No newline at end of file diff --git a/V2/time_tracker/customers/_domain/_persistence_contracts/_customers_dao.py b/V2/time_tracker/customers/_domain/_persistence_contracts/_customers_dao.py new file mode 100644 index 00000000..35a7a7e9 --- /dev/null +++ b/V2/time_tracker/customers/_domain/_persistence_contracts/_customers_dao.py @@ -0,0 +1,9 @@ +import abc + +from time_tracker.customers._domain import Customer + + +class CustomersDao(abc.ABC): + @abc.abstractmethod + def create(self, data: Customer) -> Customer: + pass diff --git a/V2/time_tracker/customers/_domain/_services/__init__.py b/V2/time_tracker/customers/_domain/_services/__init__.py new file mode 100644 index 00000000..84ed66cf --- /dev/null +++ b/V2/time_tracker/customers/_domain/_services/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._customer import CustomerService \ No newline at end of file diff --git a/V2/time_tracker/customers/_domain/_services/_customer.py b/V2/time_tracker/customers/_domain/_services/_customer.py new file mode 100644 index 00000000..88633a08 --- /dev/null +++ b/V2/time_tracker/customers/_domain/_services/_customer.py @@ -0,0 +1,10 @@ +from time_tracker.customers._domain import Customer, CustomersDao + + +class CustomerService: + + def __init__(self, customer_dao: CustomersDao): + self.customer_dao = customer_dao + + def create(self, data: Customer) -> Customer: + return self.customer_dao.create(data) diff --git a/V2/time_tracker/customers/_domain/_use_cases/__init__.py b/V2/time_tracker/customers/_domain/_use_cases/__init__.py new file mode 100644 index 00000000..accd4281 --- /dev/null +++ b/V2/time_tracker/customers/_domain/_use_cases/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._create_customer_use_case import CreateCustomerUseCase \ No newline at end of file diff --git a/V2/time_tracker/customers/_domain/_use_cases/_create_customer_use_case.py b/V2/time_tracker/customers/_domain/_use_cases/_create_customer_use_case.py new file mode 100644 index 00000000..8aeefa2b --- /dev/null +++ b/V2/time_tracker/customers/_domain/_use_cases/_create_customer_use_case.py @@ -0,0 +1,10 @@ +from time_tracker.customers._domain import Customer, CustomerService + + +class CreateCustomerUseCase: + + def __init__(self, customer_service: CustomerService): + self.customer_service = customer_service + + def create_customer(self, data: Customer) -> Customer: + return self.customer_service.create(data) diff --git a/V2/time_tracker/customers/_infrastructure/__init__.py b/V2/time_tracker/customers/_infrastructure/__init__.py new file mode 100644 index 00000000..220e8f60 --- /dev/null +++ b/V2/time_tracker/customers/_infrastructure/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._data_persistence import CustomersSQLDao diff --git a/V2/time_tracker/customers/_infrastructure/_data_persistence/__init__.py b/V2/time_tracker/customers/_infrastructure/_data_persistence/__init__.py new file mode 100644 index 00000000..c3c24a98 --- /dev/null +++ b/V2/time_tracker/customers/_infrastructure/_data_persistence/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._customer_dao import CustomersSQLDao diff --git a/V2/time_tracker/customers/_infrastructure/_data_persistence/_customer_dao.py b/V2/time_tracker/customers/_infrastructure/_data_persistence/_customer_dao.py new file mode 100644 index 00000000..2b1f4c0d --- /dev/null +++ b/V2/time_tracker/customers/_infrastructure/_data_persistence/_customer_dao.py @@ -0,0 +1,41 @@ +import dataclasses + +import sqlalchemy as sq + +import time_tracker.customers._domain as domain +from time_tracker._infrastructure import _db + + +class CustomersSQLDao(domain.CustomersDao): + + def __init__(self, database: _db.DB): + self.customer_key = [field.name for field in dataclasses.fields(domain.Customer)] + self.db = database + self.customer = sq.Table( + 'customer', + self.db.metadata, + sq.Column('id', sq.Integer, primary_key=True, autoincrement=True), + sq.Column('name', sq.String, unique=True, nullable=False), + sq.Column('description', sq.String), + sq.Column('deleted', sq.Boolean), + sq.Column('status', sq.Integer), + extend_existing=True, + ) + + def create(self, data: domain.Customer) -> domain.Customer: + try: + new_customer = data.__dict__ + new_customer.pop('id', None) + new_customer['deleted'] = False + new_customer['status'] = 1 + + query = self.customer.insert().values(new_customer).return_defaults() + customer = self.db.get_session().execute(query) + new_customer.update({"id": customer.inserted_primary_key[0]}) + return self.__create_customer_dto(new_customer) + except sq.exc.IntegrityError: + return None + + def __create_customer_dto(self, customer: dict) -> domain.Customer: + customer = {key: customer.get(key) for key in self.customer_key} + return domain.Customer(**customer) diff --git a/V2/time_tracker/customers/interface.py b/V2/time_tracker/customers/interface.py new file mode 100644 index 00000000..e36b8172 --- /dev/null +++ b/V2/time_tracker/customers/interface.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._application import create_customer From d898e1b0ed42390133a7b5dded3bcf281fc2f1ba Mon Sep 17 00:00:00 2001 From: Cristian Toaquiza Date: Wed, 24 Nov 2021 12:06:52 -0500 Subject: [PATCH 14/33] ci: [TT-412] adds ci workflow when push to master (#358) --- .github/workflows/time-tracker-v1-on-push-workflow.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/time-tracker-v1-on-push-workflow.yml b/.github/workflows/time-tracker-v1-on-push-workflow.yml index ed673b64..152998b4 100644 --- a/.github/workflows/time-tracker-v1-on-push-workflow.yml +++ b/.github/workflows/time-tracker-v1-on-push-workflow.yml @@ -2,8 +2,7 @@ name: Time Tacker V1 CI - ON PUSH on: push: - # update to master - branches: [TT-412-onpush] + branches: [master] jobs: time-tracker-ci-v1-on-push: @@ -56,12 +55,13 @@ jobs: run: | pytest tests - - name: Build and push image + - name: Login to docker registry uses: azure/docker-login@v1 with: login-server: ${{ secrets.REGISTRY_LOGIN_SERVER }} username: ${{ secrets.REGISTRY_USERNAME }} password: ${{ secrets.REGISTRY_PASSWORD }} + - name: Build and push image run: | docker build . -t ${{ secrets.REGISTRY_LOGIN_SERVER }}/timetrackerapi:${{ github.sha }} docker push ${{ secrets.REGISTRY_LOGIN_SERVER }}/timetrackerapi:${{ github.sha }} From b81319fe12bff57816dac1d0354000bfc6674c1c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Rafael=20Arcos=20G=C3=B3mez?= <37599693+ararcos@users.noreply.github.com> Date: Wed, 24 Nov 2021 16:47:03 -0500 Subject: [PATCH 15/33] fix: TT-401 change in activity database instance and refactor test (#355) * fix: TT-401 change in activity database instance and refactor test * fix: TT-401 resolved comments --- V2/Makefile | 16 ++++++++++------ .../api/azure/activity_azure_endpoints_test.py | 5 ----- ...es_sql_dao_test.py => activities_dao_test.py} | 4 ++-- .../_application/_activities/_create_activity.py | 5 ++--- .../_application/_activities/_delete_activity.py | 5 ++--- .../_application/_activities/_get_activities.py | 15 +++++++-------- .../_application/_activities/_update_activity.py | 5 ++--- .../_data_persistence/__init__.py | 2 +- ..._activities_sql_dao.py => _activities_dao.py} | 0 .../_time_entries/_create_time_entry.py | 4 ++-- .../_domain/_entities/_time_entry.py | 6 +++--- 11 files changed, 31 insertions(+), 36 deletions(-) rename V2/tests/integration/daos/{activities_sql_dao_test.py => activities_dao_test.py} (98%) rename V2/time_tracker/activities/_infrastructure/_data_persistence/{_activities_sql_dao.py => _activities_dao.py} (100%) diff --git a/V2/Makefile b/V2/Makefile index 135e96d0..cf02904b 100644 --- a/V2/Makefile +++ b/V2/Makefile @@ -1,10 +1,11 @@ .PHONY: help help: @echo "---------------HELP-----------------" - @echo "To install the dependencies type make install" - @echo "To test the project type make test" - @echo "To run the local database type make start-local" - @echo "To run all comands type make ci" + @echo "- make install --> Install the dependencies" + @echo "- make test --> Run all tests" + @echo "- make test specific_test= --> Run specific test" + @echo "- make start-local --> Run local database" + @echo "- make ci --> Install the dependencies and run all tests" @echo "------------------------------------" .PHONY: install @@ -17,13 +18,16 @@ install: .PHONY: test test: export ENVIRONMENT = test -test: export TEST_DB_CONNECTION = sqlite:///:memory: test: @echo "=========================================Lint with flake8=========================================" flake8 . --show-source --statistics @echo "Completed flake8!" @echo "=========================================Test with pytest=========================================" - python -m pytest -v + @if [ "$(specific_test)" ]; then \ + python -m pytest -vv -s -k $(specific_test);\ + else \ + python -m pytest -v;\ + fi @echo "Completed test!" start-local: diff --git a/V2/tests/api/azure/activity_azure_endpoints_test.py b/V2/tests/api/azure/activity_azure_endpoints_test.py index 994c74c7..7c0de311 100644 --- a/V2/tests/api/azure/activity_azure_endpoints_test.py +++ b/V2/tests/api/azure/activity_azure_endpoints_test.py @@ -17,8 +17,6 @@ def test__activity_azure_endpoint__returns_all_activities( insert_activity(existent_activities[1], test_db).__dict__ ] - azure_activities._get_activities.DATABASE = test_db - req = func.HttpRequest(method='GET', body=None, url=ACTIVITY_URL) response = azure_activities._get_activities.get_activities(req) activities_json_data = response.get_body().decode("utf-8") @@ -33,7 +31,6 @@ def test__activity_azure_endpoint__returns_an_activity__when_activity_matches_it existent_activity = activity_factory() inserted_activity = insert_activity(existent_activity, test_db).__dict__ - azure_activities._get_activities.DATABASE = test_db req = func.HttpRequest( method='GET', body=None, @@ -54,7 +51,6 @@ def test__activity_azure_endpoint__returns_an_activity_with_inactive_status__whe existent_activity = activity_factory() inserted_activity = insert_activity(existent_activity, test_db).__dict__ - azure_activities._delete_activity.DATABASE = test_db req = func.HttpRequest( method='DELETE', body=None, @@ -76,7 +72,6 @@ def test__update_activity_azure_endpoint__returns_an_activity__when_found_an_act existent_activity = activity_factory() inserted_activity = insert_activity(existent_activity, test_db).__dict__ - azure_activities._update_activity.DATABASE = test_db activity_body = {"description": Faker().sentence()} req = func.HttpRequest( method='PUT', diff --git a/V2/tests/integration/daos/activities_sql_dao_test.py b/V2/tests/integration/daos/activities_dao_test.py similarity index 98% rename from V2/tests/integration/daos/activities_sql_dao_test.py rename to V2/tests/integration/daos/activities_dao_test.py index 0f0170af..637a7799 100644 --- a/V2/tests/integration/daos/activities_sql_dao_test.py +++ b/V2/tests/integration/daos/activities_dao_test.py @@ -9,7 +9,7 @@ @pytest.fixture(name='create_fake_dao') def _create_fake_dao() -> domain.ActivitiesDao: - db_fake = DB('sqlite:///:memory:') + db_fake = DB() dao = infrastructure.ActivitiesSQLDao(db_fake) return dao @@ -17,7 +17,7 @@ def _create_fake_dao() -> domain.ActivitiesDao: @pytest.fixture(name='clean_database', autouse=True) def _clean_database(): yield - db_fake = DB('sqlite:///:memory:') + db_fake = DB() dao = infrastructure.ActivitiesSQLDao(db_fake) query = dao.activity.delete() dao.db.get_session().execute(query) diff --git a/V2/time_tracker/activities/_application/_activities/_create_activity.py b/V2/time_tracker/activities/_application/_activities/_create_activity.py index 94f3701d..8d5b912d 100644 --- a/V2/time_tracker/activities/_application/_activities/_create_activity.py +++ b/V2/time_tracker/activities/_application/_activities/_create_activity.py @@ -8,11 +8,10 @@ from ... import _infrastructure from time_tracker._infrastructure import DB -DATABASE = DB() - def create_activity(req: func.HttpRequest) -> func.HttpResponse: - activity_dao = _infrastructure.ActivitiesSQLDao(DATABASE) + database = DB() + activity_dao = _infrastructure.ActivitiesSQLDao(database) activity_service = _domain.ActivityService(activity_dao) use_case = _domain._use_cases.CreateActivityUseCase(activity_service) diff --git a/V2/time_tracker/activities/_application/_activities/_delete_activity.py b/V2/time_tracker/activities/_application/_activities/_delete_activity.py index 14ada8ab..746b1073 100644 --- a/V2/time_tracker/activities/_application/_activities/_delete_activity.py +++ b/V2/time_tracker/activities/_application/_activities/_delete_activity.py @@ -7,8 +7,6 @@ from ... import _infrastructure from time_tracker._infrastructure import DB -DATABASE = DB() - def delete_activity(req: func.HttpRequest) -> func.HttpResponse: logging.info( @@ -29,8 +27,9 @@ def delete_activity(req: func.HttpRequest) -> func.HttpResponse: def _delete(activity_id: int) -> str: + database = DB() activity_use_case = _domain._use_cases.DeleteActivityUseCase( - _create_activity_service(DATABASE) + _create_activity_service(database) ) activity = activity_use_case.delete_activity(activity_id) return json.dumps(activity.__dict__) if activity else b'Not found' diff --git a/V2/time_tracker/activities/_application/_activities/_get_activities.py b/V2/time_tracker/activities/_application/_activities/_get_activities.py index d92503dd..dd6053b0 100644 --- a/V2/time_tracker/activities/_application/_activities/_get_activities.py +++ b/V2/time_tracker/activities/_application/_activities/_get_activities.py @@ -7,10 +7,9 @@ from ... import _infrastructure from time_tracker._infrastructure import DB -DATABASE = DB() - def get_activities(req: func.HttpRequest) -> func.HttpResponse: + database = DB() logging.info( 'Python HTTP trigger function processed a request to get an activity.' ) @@ -19,11 +18,11 @@ def get_activities(req: func.HttpRequest) -> func.HttpResponse: try: if activity_id: - response = _get_by_id(int(activity_id)) + response = _get_by_id(int(activity_id), database) if response == b'Not Found': status_code = 404 else: - response = _get_all() + response = _get_all(database) return func.HttpResponse( body=response, status_code=status_code, mimetype="application/json" @@ -34,18 +33,18 @@ def get_activities(req: func.HttpRequest) -> func.HttpResponse: ) -def _get_by_id(activity_id: int) -> str: +def _get_by_id(activity_id: int, database: DB) -> str: activity_use_case = _domain._use_cases.GetActivityUseCase( - _create_activity_service(DATABASE) + _create_activity_service(database) ) activity = activity_use_case.get_activity_by_id(activity_id) return json.dumps(activity.__dict__) if activity else b'Not Found' -def _get_all() -> str: +def _get_all(database: DB) -> str: activities_use_case = _domain._use_cases.GetActivitiesUseCase( - _create_activity_service(DATABASE) + _create_activity_service(database) ) return json.dumps( [ diff --git a/V2/time_tracker/activities/_application/_activities/_update_activity.py b/V2/time_tracker/activities/_application/_activities/_update_activity.py index 0933fd72..4717042c 100644 --- a/V2/time_tracker/activities/_application/_activities/_update_activity.py +++ b/V2/time_tracker/activities/_application/_activities/_update_activity.py @@ -8,8 +8,6 @@ from ... import _infrastructure from time_tracker._infrastructure import DB -DATABASE = DB() - def update_activity(req: func.HttpRequest) -> func.HttpResponse: logging.info( @@ -37,8 +35,9 @@ def update_activity(req: func.HttpRequest) -> func.HttpResponse: def _update(activity_id: int, activity_data: dict) -> str: + database = DB() activity_use_case = _domain._use_cases.UpdateActivityUseCase( - _create_activity_service(DATABASE) + _create_activity_service(database) ) activity = activity_use_case.update_activity( activity_id, activity_data.get("name"), diff --git a/V2/time_tracker/activities/_infrastructure/_data_persistence/__init__.py b/V2/time_tracker/activities/_infrastructure/_data_persistence/__init__.py index 1e7220c5..35c209db 100644 --- a/V2/time_tracker/activities/_infrastructure/_data_persistence/__init__.py +++ b/V2/time_tracker/activities/_infrastructure/_data_persistence/__init__.py @@ -1,2 +1,2 @@ # flake8: noqa -from ._activities_sql_dao import ActivitiesSQLDao +from ._activities_dao import ActivitiesSQLDao diff --git a/V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_sql_dao.py b/V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_dao.py similarity index 100% rename from V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_sql_dao.py rename to V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_dao.py diff --git a/V2/time_tracker/time_entries/_application/_time_entries/_create_time_entry.py b/V2/time_tracker/time_entries/_application/_time_entries/_create_time_entry.py index a06c212c..95149c55 100644 --- a/V2/time_tracker/time_entries/_application/_time_entries/_create_time_entry.py +++ b/V2/time_tracker/time_entries/_application/_time_entries/_create_time_entry.py @@ -54,8 +54,8 @@ def create_time_entry(req: func.HttpRequest) -> func.HttpResponse: def _validate_time_entry(time_entry_data: dict) -> typing.List[str]: - time_entry_fields = [field.name for field in dataclasses.fields(_domain.TimeEntry)] - time_entry_fields.pop(8) + time_entry_fields = [field.name for field in dataclasses.fields(_domain.TimeEntry) + if field.type != typing.Optional[field.type]] missing_keys = [field for field in time_entry_fields if field not in time_entry_data] return [ f'The {missing_key} key is missing in the input data' diff --git a/V2/time_tracker/time_entries/_domain/_entities/_time_entry.py b/V2/time_tracker/time_entries/_domain/_entities/_time_entry.py index aa73a879..08df5f8f 100644 --- a/V2/time_tracker/time_entries/_domain/_entities/_time_entry.py +++ b/V2/time_tracker/time_entries/_domain/_entities/_time_entry.py @@ -1,10 +1,10 @@ from dataclasses import dataclass -from typing import List +from typing import List, Optional @dataclass(frozen=True) class TimeEntry: - id: int + id: Optional[int] start_date: str owner_id: int description: str @@ -12,6 +12,6 @@ class TimeEntry: uri: str technologies: List[str] end_date: str - deleted: bool + deleted: Optional[bool] timezone_offset: str project_id: int From 6dd85055b666888c7a22ffa1635b2e53903e7942 Mon Sep 17 00:00:00 2001 From: Sandro Castillo Date: Wed, 24 Nov 2021 17:00:25 -0500 Subject: [PATCH 16/33] docs: TT-419 update readme V2 (#357) --- V2/README.md | 103 ++++++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 94 insertions(+), 9 deletions(-) diff --git a/V2/README.md b/V2/README.md index f414079d..64726b30 100644 --- a/V2/README.md +++ b/V2/README.md @@ -1,23 +1,108 @@ -# time-tracker-api V2 +# **Time-tracker-api V2 Architecture** +Architecture +The application follows a DDD approach with a hexagonal clean architecture. BIG WORDS!, what does it mean? it means the following: -Refer to [Serverless docs](https://serverless.com/framework/docs/providers/azure/guide/intro/) for more information. +We have a directory for each domain entitiy (i.e. time entries, technologies, activities, etc) +Inside each entity directory we have other 3 directories (application, domain and infrastructure) +I'll leave this drawing to understand how these three folders work and what logic should be included in these directories -## Requirements to use makefile +![ddd.png](https://raw.githubusercontent.com/eguezgustavo/time_tracker_app_skeleton/master/ddd.png) +More information [Here](https://github.com/eguezgustavo/time_tracker_app_skeleton) -- Python version 3.6 or 3.7. +## **Stack Technologies** + - [Serverless](https://serverless.com/framework/docs/providers/azure/guide/intro/) + - Python + - Pytest + - Docker Compose + +Recommended link [tdd_dojo](https://github.com/eguezgustavo/tdd_dojo) -- Use an environment to install requirements (pyenv). +## **Setup environment** -## How to use makefile +### **Requeriments** + +- Install python 3.6 or 3.7 (recommendation to install python [pyenv](https://github.com/pyenv/pyenv)) +- Install node (recommendation to install node [nvm](https://www.digitalocean.com/community/tutorials/how-to-install-node-js-on-ubuntu-20-04-es#:~:text=de%20Node.js.-,Opci%C3%B3n%203%3A%20Instalar%20Node%20usando%20el%20administrador%20de%20versiones%20de%20Node,-Otra%20forma%20de)) + +### **Add variables** +In the root directory /time-tracker-backend create a file .env with these values + +``` +export MS_AUTHORITY=XXX +export MS_CLIENT_ID=XXX +export MS_SCOPE=XXX +export MS_SECRET=XXX +export MS_ENDPOINT=XXX +export DATABASE_ACCOUNT_URI=XXX +export DATABASE_MASTER_KEY=XXX +export DATABASE_NAME=XXX +export FLASK_APP=XXX +export FLASK_ENV=XXX +export AZURE_APP_CONFIGURATION_CONNECTION_STRING=XXX +export USERID=XXX +export FLASK_DEBUG=True +export PYTHONPATH=XXX +export DB_CONNECTION=XXX +export ENVIRONMENT=XXX +``` + +In the directory /V2 create a file .env with these values +``` +DB_USER=XXX +DB_PASS=XXX +DB_NAME=XXX +``` + +### **Install dependencies** +In the Directory /V2 +``` +make install +``` + +## **Start Project** +In the directory /V2 +``` +npm run offline +docker compose up or make start-local +``` + + +## **Makefile to run a locally CI** Execute the next command to show makefile help: ```shell -make help +$ make help ``` - To install the dependencies type the command ```make install``` - - To test the project type the command ```make test``` - - To run the local database type the command ```make start-local``` + +## **How to contribute to the project** +Clone the repository and from the master branch create a new branch for each new task. +### **Branch names format** +For example if your task in Jira is **TT-48 implement semantic versioning** your branch name is: +``` + TT-48-implement-semantic-versioning +``` +### **Commit messages format** + + + Below there are some common examples you can use for your commit messages [semantic version](https://semver.org/) : + + - **feat**: A new feature. + - **fix**: A bug fix. + - **perf**: A code change that improves performance. + - **build**: Changes that affect the build system or external dependencies (example scopes: npm, ts configuration). + - **ci**: Changes to our CI or CD configuration files and scripts (example scopes: Azure devops, github actions). + - **docs**: Documentation only changes. + - **refactor**: A code change that neither fixes a bug nor adds a feature. + It is important to mention that this key is not related to css styles. + - **test**: Adding missing tests or correcting existing tests. + + ### Example + fix: TT-48 implement semantic versioning + + Prefix to use in the space fix: + `(fix: |feat: |perf: |build: |ci: |docs: |refactor: |style: |test: )` \ No newline at end of file From 48f641170a968c7f12bc60f7882b0f4eda6cede2 Mon Sep 17 00:00:00 2001 From: Jipson Murillo <38593785+Jobzi@users.noreply.github.com> Date: Thu, 25 Nov 2021 12:38:09 -0500 Subject: [PATCH 17/33] feat: TT-402 put v2 time entries (#347) * feat: TT-402 add put time entries * refactor: TT-402 rebase post time entry * test: TT-402 add integration test of UPDATE * refactor: TT-402 delete time_entires_sql_dao_test * refactor: TT-404 revert changes _db.py * refactor: TT-402 Andres's resolve comments * fix: TT-402 refactor azure update endpoint * fix: TT-402 change name test --- V2/serverless.yml | 12 +++ .../azure/time_entry_azure_endpoints_test.py | 77 ++++++++++++++++++- V2/tests/fixtures.py | 2 +- .../integration/daos/time_entries_dao_test.py | 35 ++++++++- .../unit/services/time_entry_service_test.py | 17 ++++ .../use_cases/time_entries_use_case_test.py | 13 ++++ .../time_entries/_application/__init__.py | 4 +- .../_application/_time_entries/__init__.py | 3 +- .../_time_entries/_update_time_entry.py | 46 +++++++++++ .../time_entries/_domain/__init__.py | 3 +- .../_domain/_entities/__init__.py | 2 +- .../_persistence_contracts/__init__.py | 2 +- .../_time_entries_dao.py | 4 + .../_domain/_services/__init__.py | 2 +- .../_domain/_services/_time_entry.py | 3 + .../_domain/_use_cases/__init__.py | 1 + .../_use_cases/_update_time_entry_use_case.py | 11 +++ .../_data_persistence/_time_entries_dao.py | 23 +++++- V2/time_tracker/time_entries/interface.py | 3 +- 19 files changed, 247 insertions(+), 16 deletions(-) create mode 100644 V2/time_tracker/time_entries/_application/_time_entries/_update_time_entry.py create mode 100644 V2/time_tracker/time_entries/_domain/_use_cases/_update_time_entry_use_case.py diff --git a/V2/serverless.yml b/V2/serverless.yml index e5dea8e9..bac6bcc9 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -97,6 +97,17 @@ functions: route: time-entries/{id} authLevel: anonymous + + update_time_entry: + handler: time_tracker/time_entries/interface.update_time_entry + events: + - http: true + x-azure-settings: + methods: + - PUT + route: time-entries/{id} + authLevel: anonymous + create_customer: handler: time_tracker/customers/interface.create_customer events: @@ -106,3 +117,4 @@ functions: - POST route: customers/ authLevel: anonymous + \ No newline at end of file diff --git a/V2/tests/api/azure/time_entry_azure_endpoints_test.py b/V2/tests/api/azure/time_entry_azure_endpoints_test.py index 8422c4b5..f57db585 100644 --- a/V2/tests/api/azure/time_entry_azure_endpoints_test.py +++ b/V2/tests/api/azure/time_entry_azure_endpoints_test.py @@ -1,5 +1,6 @@ import pytest import json +from faker import Faker import azure.functions as func @@ -25,7 +26,7 @@ def test__time_entry_azure_endpoint__creates_an_time_entry__when_time_entry_has_ test_db, time_entry_factory, activity_factory, insert_activity ): inserted_activity = insert_activity(activity_factory(), test_db) - time_entry_body = time_entry_factory(activity_id=inserted_activity.id, technologies="[jira,sql]").__dict__ + time_entry_body = time_entry_factory(activity_id=inserted_activity.id).__dict__ body = json.dumps(time_entry_body).encode("utf-8") req = func.HttpRequest( @@ -46,7 +47,7 @@ def test__delete_time_entries_azure_endpoint__returns_an_time_entry_with_true_de test_db, time_entry_factory, insert_time_entry, insert_activity, activity_factory, ): inserted_activity = insert_activity(activity_factory(), test_db).__dict__ - time_entry_body = time_entry_factory(activity_id=inserted_activity["id"], technologies="[jira,sql]") + time_entry_body = time_entry_factory(activity_id=inserted_activity["id"]) inserted_time_entry = insert_time_entry(time_entry_body, test_db) req = func.HttpRequest( @@ -76,3 +77,75 @@ def test__delete_time_entries_azure_endpoint__returns_a_status_code_400__when_ti assert response.status_code == 400 assert response.get_body() == b'Invalid Format ID' + + +def test__update_time_entry_azure_endpoint__returns_an_time_entry__when_found_an_time_entry_to_update( + test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity +): + inserted_activity = insert_activity(activity_factory(), test_db) + existent_time_entries = time_entry_factory(activity_id=inserted_activity.id) + inserted_time_entries = insert_time_entry(existent_time_entries, test_db).__dict__ + + time_entry_body = {"description": Faker().sentence()} + + req = func.HttpRequest( + method='PUT', + body=json.dumps(time_entry_body).encode("utf-8"), + url=TIME_ENTRY_URL, + route_params={"id": inserted_time_entries["id"]}, + ) + + response = azure_time_entries._update_time_entry.update_time_entry(req) + activitiy_json_data = response.get_body().decode("utf-8") + inserted_time_entries.update(time_entry_body) + + assert response.status_code == 200 + assert activitiy_json_data == json.dumps(inserted_time_entries) + + +def test__update_time_entries_azure_endpoint__returns_a_status_code_400__when_time_entry_recive_invalid_format_id(): + time_entry_body = {"description": Faker().sentence()} + + req = func.HttpRequest( + method="PUT", + body=json.dumps(time_entry_body).encode("utf-8"), + url=TIME_ENTRY_URL, + route_params={"id": Faker().sentence()}, + ) + + response = azure_time_entries._update_time_entry.update_time_entry(req) + + assert response.status_code == 400 + assert response.get_body() == b'Invalid Format ID' + + +def test__update_time_entries_azure_endpoint__returns_a_status_code_404__when_not_found_an_time_entry_to_update(): + time_entry_body = {"description": Faker().sentence()} + + req = func.HttpRequest( + method="PUT", + body=json.dumps(time_entry_body).encode("utf-8"), + url=TIME_ENTRY_URL, + route_params={"id": Faker().pyint()}, + ) + + response = azure_time_entries._update_time_entry.update_time_entry(req) + + assert response.status_code == 404 + assert response.get_body() == b'Not found' + + +def test__update_time_entries_azure_endpoint__returns_a_status_code_400__when_time_entry_recive_invalid_body(): + + time_entry_body = Faker().pydict(5, True, str) + req = func.HttpRequest( + method="PUT", + body=json.dumps(time_entry_body).encode("utf-8"), + url=TIME_ENTRY_URL, + route_params={"id": Faker().pyint()}, + ) + + response = azure_time_entries._update_time_entry.update_time_entry(req) + + assert response.status_code == 400 + assert response.get_body() == b'Incorrect time entry body' diff --git a/V2/tests/fixtures.py b/V2/tests/fixtures.py index a02a74bb..91b0a801 100644 --- a/V2/tests/fixtures.py +++ b/V2/tests/fixtures.py @@ -44,7 +44,7 @@ def _make_time_entry( description=Faker().sentence(), activity_id=Faker().random_int(), uri=Faker().domain_name(), - technologies=["jira", "git"], + technologies=str(Faker().pylist()), end_date=str(Faker().date_time()), deleted=False, timezone_offset="300", diff --git a/V2/tests/integration/daos/time_entries_dao_test.py b/V2/tests/integration/daos/time_entries_dao_test.py index 901bce34..fbe5a7ed 100644 --- a/V2/tests/integration/daos/time_entries_dao_test.py +++ b/V2/tests/integration/daos/time_entries_dao_test.py @@ -29,7 +29,7 @@ def test__time_entry__returns_a_time_entry_dto__when_saves_correctly_with_sql_da dao = create_fake_dao(test_db) inserted_activity = insert_activity(activity_factory(), dao.db) - time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id, technologies="[jira,sql]") + time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id) inserted_time_entry = dao.create(time_entry_to_insert) @@ -41,7 +41,7 @@ def test__time_entry__returns_None__when_not_saves_correctly( time_entry_factory, create_fake_dao, test_db ): dao = create_fake_dao(test_db) - time_entry_to_insert = time_entry_factory(activity_id=1203, technologies="[jira,sql]") + time_entry_to_insert = time_entry_factory(activity_id=1203) inserted_time_entry = dao.create(time_entry_to_insert) @@ -53,7 +53,7 @@ def test_delete__returns_an_time_entry_with_true_deleted__when_an_time_entry_mat ): dao = create_fake_dao(test_db) inserted_activity = insert_activity(activity_factory(), dao.db) - existent_time_entry = time_entry_factory(activity_id=inserted_activity.id, technologies="[jira,sql]") + existent_time_entry = time_entry_factory(activity_id=inserted_activity.id) inserted_time_entry = dao.create(existent_time_entry) result = dao.delete(inserted_time_entry.id) @@ -69,3 +69,32 @@ def test_delete__returns_none__when_no_time_entry_matching_its_id_is_found( result = dao.delete(Faker().pyint()) assert result is None + + +def test_update__returns_an_time_entry_dto__when_found_one_time_entry_to_update( + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory +): + dao = create_fake_dao(test_db) + inserted_activity = insert_activity(activity_factory(), dao.db) + existent_time_entries = time_entry_factory(activity_id=inserted_activity.id) + inserted_time_entries = dao.create(existent_time_entries).__dict__ + time_entry_id = inserted_time_entries["id"] + inserted_time_entries.update({"description": "description updated"}) + + time_entry = dao.update(time_entry_id=time_entry_id, time_entry_data=inserted_time_entries) + + assert time_entry.id == time_entry_id + assert time_entry.description == inserted_time_entries.get("description") + + +def test_update__returns_none__when_doesnt_found_one_time_entry_to_update( + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory +): + dao = create_fake_dao(test_db) + inserted_activity = insert_activity(activity_factory(), dao.db) + existent_time_entries = time_entry_factory(activity_id=inserted_activity.id) + inserted_time_entries = dao.create(existent_time_entries).__dict__ + + time_entry = dao.update(0, inserted_time_entries) + + assert time_entry is None diff --git a/V2/tests/unit/services/time_entry_service_test.py b/V2/tests/unit/services/time_entry_service_test.py index e83b6afb..0952f8a9 100644 --- a/V2/tests/unit/services/time_entry_service_test.py +++ b/V2/tests/unit/services/time_entry_service_test.py @@ -29,3 +29,20 @@ def test__delete_time_entry__uses_the_time_entry_dao__to_delete_time_entry_selec assert time_entry_dao.delete.called assert expected_time_entry == deleted_time_entry + + +def test__update_time_entry__uses_the_time_entry_dao__to_update_one_time_entry( + mocker, +): + expected_time_entry = mocker.Mock() + time_entry_dao = mocker.Mock( + update=mocker.Mock(return_value=expected_time_entry) + ) + time_entry_service = TimeEntryService(time_entry_dao) + + updated_time_entry = time_entry_service.update( + Faker().pyint(), Faker().pydict() + ) + + assert time_entry_dao.update.called + assert expected_time_entry == updated_time_entry diff --git a/V2/tests/unit/use_cases/time_entries_use_case_test.py b/V2/tests/unit/use_cases/time_entries_use_case_test.py index e0994df4..1a679f37 100644 --- a/V2/tests/unit/use_cases/time_entries_use_case_test.py +++ b/V2/tests/unit/use_cases/time_entries_use_case_test.py @@ -30,3 +30,16 @@ def test__delete_time_entry_function__uses_the_time_entry_service__to_delete_tim assert time_entry_service.delete.called assert expected_time_entry == deleted_time_entry + + +def test__update_time_entries_function__uses_the_time_entry_service__to_update_an_time_entry( + mocker: MockFixture, +): + expected_time_entry = mocker.Mock() + time_entry_service = mocker.Mock(update=mocker.Mock(return_value=expected_time_entry)) + + time_entry_use_case = _use_cases.UpdateTimeEntryUseCase(time_entry_service) + updated_time_entry = time_entry_use_case.update_time_entry(Faker().uuid4(), Faker().pydict()) + + assert time_entry_service.update.called + assert expected_time_entry == updated_time_entry diff --git a/V2/time_tracker/time_entries/_application/__init__.py b/V2/time_tracker/time_entries/_application/__init__.py index 2810c87d..0ca4e272 100644 --- a/V2/time_tracker/time_entries/_application/__init__.py +++ b/V2/time_tracker/time_entries/_application/__init__.py @@ -1,2 +1,4 @@ # flake8: noqa -from ._time_entries import create_time_entry, delete_time_entry \ No newline at end of file +from ._time_entries import create_time_entry +from ._time_entries import delete_time_entry +from ._time_entries import update_time_entry diff --git a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py index 4cb4d4b0..0f6cf2db 100644 --- a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py +++ b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py @@ -1,3 +1,4 @@ # flake8: noqa from ._create_time_entry import create_time_entry -from ._delete_time_entry import delete_time_entry \ No newline at end of file +from ._delete_time_entry import delete_time_entry +from ._update_time_entry import update_time_entry diff --git a/V2/time_tracker/time_entries/_application/_time_entries/_update_time_entry.py b/V2/time_tracker/time_entries/_application/_time_entries/_update_time_entry.py new file mode 100644 index 00000000..63366869 --- /dev/null +++ b/V2/time_tracker/time_entries/_application/_time_entries/_update_time_entry.py @@ -0,0 +1,46 @@ +import dataclasses +import json + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + + +def update_time_entry(req: func.HttpRequest) -> func.HttpResponse: + database = DB() + time_entry_dao = _infrastructure.TimeEntriesSQLDao(database) + time_entry_service = _domain.TimeEntryService(time_entry_dao) + use_case = _domain._use_cases.UpdateTimeEntryUseCase(time_entry_service) + + try: + time_entry_id = int(req.route_params.get("id")) + time_entry_data = req.get_json() + + if not _validate_time_entry(time_entry_data): + status_code = 400 + response = b"Incorrect time entry body" + else: + updated_time_entry = use_case.update_time_entry(time_entry_id, time_entry_data) + status_code, response = [ + 404, b"Not found" + ] if not updated_time_entry else [200, json.dumps(updated_time_entry.__dict__)] + + return func.HttpResponse( + body=response, + status_code=status_code, + mimetype="application/json", + ) + + except ValueError: + return func.HttpResponse( + body=b"Invalid Format ID", + status_code=400, + mimetype="application/json" + ) + + +def _validate_time_entry(time_entry_data: dict) -> bool: + time_entry_keys = [field.name for field in dataclasses.fields(_domain.TimeEntry)] + return all(key in time_entry_keys for key in time_entry_data.keys()) diff --git a/V2/time_tracker/time_entries/_domain/__init__.py b/V2/time_tracker/time_entries/_domain/__init__.py index ad927811..de58675c 100644 --- a/V2/time_tracker/time_entries/_domain/__init__.py +++ b/V2/time_tracker/time_entries/_domain/__init__.py @@ -4,5 +4,6 @@ from ._services import TimeEntryService from ._use_cases import ( CreateTimeEntryUseCase, - DeleteTimeEntryUseCase + DeleteTimeEntryUseCase, + UpdateTimeEntryUseCase, ) \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_domain/_entities/__init__.py b/V2/time_tracker/time_entries/_domain/_entities/__init__.py index 88b4a739..3245a461 100644 --- a/V2/time_tracker/time_entries/_domain/_entities/__init__.py +++ b/V2/time_tracker/time_entries/_domain/_entities/__init__.py @@ -1,2 +1,2 @@ # flake8: noqa -from ._time_entry import TimeEntry \ No newline at end of file +from ._time_entry import TimeEntry diff --git a/V2/time_tracker/time_entries/_domain/_persistence_contracts/__init__.py b/V2/time_tracker/time_entries/_domain/_persistence_contracts/__init__.py index e10700ce..3f17d5ee 100644 --- a/V2/time_tracker/time_entries/_domain/_persistence_contracts/__init__.py +++ b/V2/time_tracker/time_entries/_domain/_persistence_contracts/__init__.py @@ -1,2 +1,2 @@ # flake8: noqa -from ._time_entries_dao import TimeEntriesDao \ No newline at end of file +from ._time_entries_dao import TimeEntriesDao diff --git a/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py b/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py index e7d94608..8c1dc9d9 100644 --- a/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py +++ b/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py @@ -11,3 +11,7 @@ def create(self, time_entry_data: TimeEntry) -> TimeEntry: @abc.abstractmethod def delete(self, id: int) -> TimeEntry: pass + + @abc.abstractmethod + def update(self, id: int, new_time_entry: dict) -> TimeEntry: + pass diff --git a/V2/time_tracker/time_entries/_domain/_services/__init__.py b/V2/time_tracker/time_entries/_domain/_services/__init__.py index e5e6ba1b..1a06f65b 100644 --- a/V2/time_tracker/time_entries/_domain/_services/__init__.py +++ b/V2/time_tracker/time_entries/_domain/_services/__init__.py @@ -1,2 +1,2 @@ # flake8: noqa -from ._time_entry import TimeEntryService \ No newline at end of file +from ._time_entry import TimeEntryService diff --git a/V2/time_tracker/time_entries/_domain/_services/_time_entry.py b/V2/time_tracker/time_entries/_domain/_services/_time_entry.py index 9d47d5e0..5c32c1e3 100644 --- a/V2/time_tracker/time_entries/_domain/_services/_time_entry.py +++ b/V2/time_tracker/time_entries/_domain/_services/_time_entry.py @@ -11,3 +11,6 @@ def create(self, time_entry_data: TimeEntry) -> TimeEntry: def delete(self, id: int) -> TimeEntry: return self.time_entry_dao.delete(id) + + def update(self, time_entry_id: int, new_time_entry: dict) -> TimeEntry: + return self.time_entry_dao.update(time_entry_id, new_time_entry) diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py index 17b2442a..4f0ac92e 100644 --- a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py +++ b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py @@ -1,3 +1,4 @@ # flake8: noqa from ._create_time_entry_use_case import CreateTimeEntryUseCase from ._delete_time_entry_use_case import DeleteTimeEntryUseCase +from ._update_time_entry_use_case import UpdateTimeEntryUseCase diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/_update_time_entry_use_case.py b/V2/time_tracker/time_entries/_domain/_use_cases/_update_time_entry_use_case.py new file mode 100644 index 00000000..0e2cdf70 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_use_cases/_update_time_entry_use_case.py @@ -0,0 +1,11 @@ +from time_tracker.time_entries._domain import TimeEntryService, TimeEntry + + +class UpdateTimeEntryUseCase: + def __init__(self, time_entry_service: TimeEntryService): + self.time_entry_service = time_entry_service + + def update_time_entry( + self, time_entry_id: int, new_time_entry: dict + ) -> TimeEntry: + return self.time_entry_service.update(time_entry_id, new_time_entry) diff --git a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py index 6037af9f..9c0740fa 100644 --- a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py +++ b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py @@ -44,9 +44,19 @@ def create(self, time_entry_data: domain.TimeEntry) -> domain.TimeEntry: except sqlalchemy.exc.SQLAlchemyError: return None - def __create_time_entry_dto(self, time_entry: dict) -> domain.TimeEntry: - time_entry = {key: time_entry.get(key) for key in self.time_entry_key} - return domain.TimeEntry(**time_entry) + def update(self, time_entry_id: int, time_entry_data: dict) -> domain.TimeEntry: + try: + query = self.time_entry.update().where(self.time_entry.c.id == time_entry_id).values(time_entry_data) + self.db.get_session().execute(query) + query_updated_time_entry = ( + sqlalchemy.sql.select(self.time_entry) + .where(self.time_entry.c.id == time_entry_id) + ) + time_entry = self.db.get_session().execute(query_updated_time_entry).one_or_none() + + return self.__create_time_entry_dto(dict(time_entry)) if time_entry else None + except sqlalchemy.exc.SQLAlchemyError: + return None def delete(self, time_entry_id: int) -> domain.TimeEntry: query = ( @@ -58,3 +68,10 @@ def delete(self, time_entry_id: int) -> domain.TimeEntry: query_deleted_time_entry = sqlalchemy.sql.select(self.time_entry).where(self.time_entry.c.id == time_entry_id) time_entry = self.db.get_session().execute(query_deleted_time_entry).one_or_none() return self.__create_time_entry_dto(dict(time_entry)) if time_entry else None + + def __create_time_entry_dto(self, time_entry: dict) -> domain.TimeEntry: + time_entry.update({ + "start_date": str(time_entry.get("start_date")), + "end_date": str(time_entry.get("end_date"))}) + time_entry = {key: time_entry.get(key) for key in self.time_entry_key} + return domain.TimeEntry(**time_entry) diff --git a/V2/time_tracker/time_entries/interface.py b/V2/time_tracker/time_entries/interface.py index 773314bb..7e1be4ef 100644 --- a/V2/time_tracker/time_entries/interface.py +++ b/V2/time_tracker/time_entries/interface.py @@ -1,3 +1,4 @@ # flake8: noqa from ._application import create_time_entry -from ._application import delete_time_entry \ No newline at end of file +from ._application import delete_time_entry +from ._application import update_time_entry From 10ec2bb9e2b5f67358c00b549a376b7f610041de Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Rafael=20Arcos=20G=C3=B3mez?= <37599693+ararcos@users.noreply.github.com> Date: Thu, 25 Nov 2021 17:57:44 -0500 Subject: [PATCH 18/33] feat: TT-417-crud-v2-projects (#360) * feat: TT-417 created CRUD project * test: TT-417 add test with customer id * refactor: TT-417 created enums and use * test: TT-417 add missing tests and resolve comments * refactor: TT-417 add HTTPStatus from http * refactor: TT-417 test name correction Co-authored-by: Jipson Murillo --- V2/serverless.yml | 42 ++- .../api/azure/project_azure_endpoints_test.py | 251 ++++++++++++++++++ V2/tests/conftest.py | 3 +- V2/tests/fixtures.py | 39 ++- .../integration/daos/projects_dao_test.py | 149 +++++++++++ .../unit/services/project_service_test.py | 74 ++++++ .../unit/use_cases/projects_use_case_test.py | 80 ++++++ .../projects/_application/__init__.py | 5 + .../_application/_projects/__init__.py | 5 + .../_application/_projects/_create_project.py | 57 ++++ .../_application/_projects/_delete_project.py | 35 +++ .../_application/_projects/_get_projects.py | 56 ++++ .../_application/_projects/_update_project.py | 53 ++++ V2/time_tracker/projects/_domain/__init__.py | 11 + .../projects/_domain/_entities/__init__.py | 2 + .../projects/_domain/_entities/_project.py | 14 + .../_persistence_contracts/__init__.py | 2 + .../_persistence_contracts/_projects_dao.py | 25 ++ .../projects/_domain/_services/__init__.py | 2 + .../projects/_domain/_services/_project.py | 24 ++ .../projects/_domain/_use_cases/__init__.py | 6 + .../_use_cases/_create_project_use_case.py | 10 + .../_use_cases/_delete_project_use_case.py | 10 + .../_use_cases/_get_project_by_id_use_case.py | 9 + .../_use_cases/_get_projects_use_case.py | 11 + .../_use_cases/_update_project_use_case.py | 9 + .../projects/_infrastructure/__init__.py | 2 + .../_data_persistence/__init__.py | 2 + .../_data_persistence/_projects_dao.py | 77 ++++++ V2/time_tracker/projects/interface.py | 5 + 30 files changed, 1066 insertions(+), 4 deletions(-) create mode 100644 V2/tests/api/azure/project_azure_endpoints_test.py create mode 100644 V2/tests/integration/daos/projects_dao_test.py create mode 100644 V2/tests/unit/services/project_service_test.py create mode 100644 V2/tests/unit/use_cases/projects_use_case_test.py create mode 100644 V2/time_tracker/projects/_application/__init__.py create mode 100644 V2/time_tracker/projects/_application/_projects/__init__.py create mode 100644 V2/time_tracker/projects/_application/_projects/_create_project.py create mode 100644 V2/time_tracker/projects/_application/_projects/_delete_project.py create mode 100644 V2/time_tracker/projects/_application/_projects/_get_projects.py create mode 100644 V2/time_tracker/projects/_application/_projects/_update_project.py create mode 100644 V2/time_tracker/projects/_domain/__init__.py create mode 100644 V2/time_tracker/projects/_domain/_entities/__init__.py create mode 100644 V2/time_tracker/projects/_domain/_entities/_project.py create mode 100644 V2/time_tracker/projects/_domain/_persistence_contracts/__init__.py create mode 100644 V2/time_tracker/projects/_domain/_persistence_contracts/_projects_dao.py create mode 100644 V2/time_tracker/projects/_domain/_services/__init__.py create mode 100644 V2/time_tracker/projects/_domain/_services/_project.py create mode 100644 V2/time_tracker/projects/_domain/_use_cases/__init__.py create mode 100644 V2/time_tracker/projects/_domain/_use_cases/_create_project_use_case.py create mode 100644 V2/time_tracker/projects/_domain/_use_cases/_delete_project_use_case.py create mode 100644 V2/time_tracker/projects/_domain/_use_cases/_get_project_by_id_use_case.py create mode 100644 V2/time_tracker/projects/_domain/_use_cases/_get_projects_use_case.py create mode 100644 V2/time_tracker/projects/_domain/_use_cases/_update_project_use_case.py create mode 100644 V2/time_tracker/projects/_infrastructure/__init__.py create mode 100644 V2/time_tracker/projects/_infrastructure/_data_persistence/__init__.py create mode 100644 V2/time_tracker/projects/_infrastructure/_data_persistence/_projects_dao.py create mode 100644 V2/time_tracker/projects/interface.py diff --git a/V2/serverless.yml b/V2/serverless.yml index bac6bcc9..e02e8fb6 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -97,7 +97,6 @@ functions: route: time-entries/{id} authLevel: anonymous - update_time_entry: handler: time_tracker/time_entries/interface.update_time_entry events: @@ -117,4 +116,43 @@ functions: - POST route: customers/ authLevel: anonymous - \ No newline at end of file + + get_projects: + handler: time_tracker/projects/interface.get_projects + events: + - http: true + x-azure-settings: + methods: + - GET + route: projects/{id:?} + authLevel: anonymous + + delete_project: + handler: time_tracker/projects/interface.delete_project + events: + - http: true + x-azure-settings: + methods: + - DELETE + route: projects/{id} + authLevel: anonymous + + update_project: + handler: time_tracker/projects/interface.update_project + events: + - http: true + x-azure-settings: + methods: + - PUT + route: projects/{id} + authLevel: anonymous + + create_project: + handler: time_tracker/projects/interface.create_project + events: + - http: true + x-azure-settings: + methods: + - POST + route: projects/ + authLevel: anonymous diff --git a/V2/tests/api/azure/project_azure_endpoints_test.py b/V2/tests/api/azure/project_azure_endpoints_test.py new file mode 100644 index 00000000..232462b7 --- /dev/null +++ b/V2/tests/api/azure/project_azure_endpoints_test.py @@ -0,0 +1,251 @@ +import json +from http import HTTPStatus + +import pytest +from faker import Faker +import azure.functions as func + +from time_tracker.projects._application import _projects as azure_projects +from time_tracker.projects import _domain as domain +from time_tracker.projects import _infrastructure as infrastructure + +PROJECT_URL = '/api/projects/' + + +@pytest.fixture(name='insert_project') +def _insert_project(test_db, insert_customer, project_factory, customer_factory) -> domain.Project: + inserted_customer = insert_customer(customer_factory(), test_db) + + def _new_project(): + project_to_insert = project_factory(customer_id=inserted_customer.id) + dao = infrastructure.ProjectsSQLDao(test_db) + inserted_project = dao.create(project_to_insert) + return inserted_project + return _new_project + + +def test__project_azure_endpoint__returns_all_projects( + insert_project +): + inserted_projects = [ + insert_project().__dict__, + insert_project().__dict__ + ] + + req = func.HttpRequest(method='GET', body=None, url=PROJECT_URL) + response = azure_projects._get_projects.get_projects(req) + projects_json_data = response.get_body().decode("utf-8") + + assert response.status_code == HTTPStatus.OK + assert projects_json_data == json.dumps(inserted_projects) + + +def test__project_azure_endpoint__returns_a_project__when_project_matches_its_id( + insert_project +): + inserted_project = insert_project().__dict__ + + req = func.HttpRequest( + method='GET', + body=None, + url=PROJECT_URL, + route_params={"id": inserted_project["id"]}, + ) + + response = azure_projects._get_projects.get_projects(req) + activitiy_json_data = response.get_body().decode("utf-8") + + assert response.status_code == HTTPStatus.OK + assert activitiy_json_data == json.dumps(inserted_project) + + +def test__projects_azure_endpoint__returns_a_status_code_400__when_project_receive_invalid_id( +): + req = func.HttpRequest( + method="GET", + body=None, + url=PROJECT_URL, + route_params={"id": "invalid id"}, + ) + + response = azure_projects._get_projects.get_projects(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b"Invalid Format ID" + + +def test__project_azure_endpoint__returns_a_project_with_inactive_status__when_a_project_matching_its_id_is_found( + insert_project +): + inserted_project = insert_project().__dict__ + + req = func.HttpRequest( + method='DELETE', + body=None, + url=PROJECT_URL, + route_params={"id": inserted_project["id"]}, + ) + + response = azure_projects._delete_project.delete_project(req) + project_json_data = json.loads(response.get_body().decode("utf-8")) + + assert response.status_code == HTTPStatus.OK + assert project_json_data['status'] == 0 + assert project_json_data['deleted'] is True + + +def test__delete_projects_azure_endpoint__returns_a_status_code_400__when_project_receive_invalid_id( +): + req = func.HttpRequest( + method="DELETE", + body=None, + url=PROJECT_URL, + route_params={"id": "invalid id"}, + ) + + response = azure_projects._delete_project.delete_project(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b"Invalid Format ID" + + +def test__delete_projects_azure_endpoint__returns_a_status_code_404__when_no_found_a_project_to_delete( +): + req = func.HttpRequest( + method="DELETE", + body=None, + url=PROJECT_URL, + route_params={"id": Faker().pyint()}, + ) + + response = azure_projects._delete_project.delete_project(req) + + assert response.status_code == HTTPStatus.NOT_FOUND + assert response.get_body() == b"Not found" + + +def test__update_project_azure_endpoint__returns_a_project__when_found_a_project_to_update( + insert_project +): + inserted_project = insert_project().__dict__ + + project_body = {"description": Faker().sentence()} + req = func.HttpRequest( + method='PUT', + body=json.dumps(project_body).encode("utf-8"), + url=PROJECT_URL, + route_params={"id": inserted_project["id"]}, + ) + + response = azure_projects._update_project.update_project(req) + activitiy_json_data = response.get_body().decode("utf-8") + inserted_project.update(project_body) + + assert response.status_code == HTTPStatus.OK + assert activitiy_json_data == json.dumps(inserted_project) + + +def test__update_projects_azure_endpoint__returns_a_status_code_404__when_no_found_a_project_to_update( + project_factory +): + project_body = project_factory().__dict__ + + req = func.HttpRequest( + method="PUT", + body=json.dumps(project_body).encode("utf-8"), + url=PROJECT_URL, + route_params={"id": project_body["id"]}, + ) + + response = azure_projects._update_project.update_project(req) + + assert response.status_code == HTTPStatus.NOT_FOUND + assert response.get_body() == b"Not found" + + +def test__update_projects_azure_endpoint__returns_a_status_code_400__when_receive_an_incorrect_body( +): + project_body = Faker().pydict(5, True, str) + req = func.HttpRequest( + method="PUT", + body=json.dumps(project_body).encode("utf-8"), + url=PROJECT_URL, + route_params={"id": Faker().pyint()}, + ) + + response = azure_projects._update_project.update_project(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b"Incorrect body" + + +def test__update_projects_azure_endpoint__returns_a_status_code_400__when_project_receive_invalid_id( +): + req = func.HttpRequest( + method="PUT", + body=None, + url=PROJECT_URL, + route_params={"id": "invalid id"}, + ) + + response = azure_projects._update_project.update_project(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b"Invalid Format ID" + + +def test__project_azure_endpoint__creates_a_project__when_project_has_all_attributes( + test_db, project_factory, insert_customer, customer_factory +): + inserted = insert_customer(customer_factory(), test_db) + project_body = project_factory(inserted.id).__dict__ + + req = func.HttpRequest( + method='POST', + body=json.dumps(project_body).encode("utf-8"), + url=PROJECT_URL, + ) + + response = azure_projects._create_project.create_project(req) + project_json_data = json.loads(response.get_body()) + project_body['id'] = project_json_data['id'] + + assert response.status_code == HTTPStatus.CREATED + assert project_json_data == project_body + + +def test__project_azure_endpoint__returns_a_status_code_400__when_project_does_not_all_attributes( + test_db, project_factory, insert_customer, customer_factory +): + inserted_customer = insert_customer(customer_factory(), test_db) + project_body = project_factory(customer_id=inserted_customer.id).__dict__ + project_body.pop('name') + + req = func.HttpRequest( + method='POST', + body=json.dumps(project_body).encode("utf-8"), + url=PROJECT_URL, + ) + + response = azure_projects._create_project.create_project(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == json.dumps(['The name key is missing in the input data']).encode() + + +def test__project_azure_endpoint__returns_a_status_code_500__when_project_receive_incorrect_type_data( + project_factory, insert_customer, customer_factory, test_db +): + insert_customer(customer_factory(), test_db) + project_body = project_factory(technologies=Faker().pylist(2, True, str)).__dict__ + + req = func.HttpRequest( + method='POST', + body=json.dumps(project_body).encode("utf-8"), + url=PROJECT_URL, + ) + + response = azure_projects._create_project.create_project(req) + + assert response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR + assert response.get_body() == b"could not be created" diff --git a/V2/tests/conftest.py b/V2/tests/conftest.py index 4ad03c51..ff67203c 100644 --- a/V2/tests/conftest.py +++ b/V2/tests/conftest.py @@ -1,4 +1,5 @@ # flake8: noqa from fixtures import _activity_factory, _test_db, _insert_activity from fixtures import _time_entry_factory -from fixtures import _customer_factory +from fixtures import _customer_factory, _insert_customer +from fixtures import _project_factory diff --git a/V2/tests/fixtures.py b/V2/tests/fixtures.py index 91b0a801..2eae7b16 100644 --- a/V2/tests/fixtures.py +++ b/V2/tests/fixtures.py @@ -2,9 +2,11 @@ from faker import Faker import time_tracker.activities._domain as activities_domain +import time_tracker.activities._infrastructure as activities_infrastructure import time_tracker.time_entries._domain as time_entries_domain import time_tracker.customers._domain as customers_domain -import time_tracker.activities._infrastructure as activities_infrastructure +import time_tracker.customers._infrastructure as customers_infrastructure +import time_tracker.projects._domain as projects_domain from time_tracker._infrastructure import DB @@ -94,3 +96,38 @@ def _make_customer( return customer return _make_customer + + +@pytest.fixture(name='project_factory') +def _project_factory() -> projects_domain.Project: + def _make_project( + id=Faker().pyint(), + name=Faker().name(), + description=Faker().sentence(), + project_type_id=Faker().pyint(), + customer_id=Faker().pyint(), + status=Faker().pyint(), + deleted=False, + technologies=str(Faker().pylist()) + ): + project = projects_domain.Project( + id=id, + name=name, + description=description, + project_type_id=project_type_id, + customer_id=customer_id, + status=status, + deleted=deleted, + technologies=technologies + ) + return project + return _make_project + + +@pytest.fixture(name='insert_customer') +def _insert_customer() -> customers_domain.Customer: + def _new_customer(customer: customers_domain.Customer, database: DB): + dao = customers_infrastructure.CustomersSQLDao(database) + new_customer = dao.create(customer) + return new_customer + return _new_customer diff --git a/V2/tests/integration/daos/projects_dao_test.py b/V2/tests/integration/daos/projects_dao_test.py new file mode 100644 index 00000000..64837e37 --- /dev/null +++ b/V2/tests/integration/daos/projects_dao_test.py @@ -0,0 +1,149 @@ +import pytest +import typing +from faker import Faker + +from time_tracker.projects import _domain as domain +from time_tracker.projects import _infrastructure as infrastructure +from time_tracker._infrastructure import DB + + +@pytest.fixture(name='insert_project') +def _insert_project(customer_factory, test_db, insert_customer, create_fake_dao, project_factory) -> domain.Project: + inserted_customer = insert_customer(customer_factory(), test_db) + + def _new_project(): + project_to_insert = project_factory(customer_id=inserted_customer.id) + inserted_project = create_fake_dao.create(project_to_insert) + return inserted_project + + return _new_project + + +@pytest.fixture(name='create_fake_dao') +def _create_fake_dao() -> domain.ProjectsDao: + db_fake = DB() + dao = infrastructure.ProjectsSQLDao(db_fake) + return dao + + +@pytest.fixture(name='clean_database', autouse=True) +def _clean_database(): + yield + db_fake = DB() + dao = infrastructure.ProjectsSQLDao(db_fake) + query = dao.project.delete() + dao.db.get_session().execute(query) + + +def test__create_project__returns_a_project_dto__when_saves_correctly_with_sql_database( + create_fake_dao, project_factory, insert_customer, customer_factory +): + dao = create_fake_dao + inserted_customer = insert_customer(customer_factory(), dao.db) + project_to_insert = project_factory(customer_id=inserted_customer.id) + + inserted_project = dao.create(project_to_insert) + + assert isinstance(inserted_project, domain.Project) + assert inserted_project == project_to_insert + + +def test_update__returns_an_update_project__when_an_project_matching_its_id_is_found_with_sql_database( + create_fake_dao, insert_project +): + dao = create_fake_dao + + inserted_project = insert_project() + + expected_description = Faker().sentence() + updated_project = dao.update(inserted_project.id, {"description": expected_description}) + + assert isinstance(updated_project, domain.Project) + assert updated_project.id == inserted_project.id + assert updated_project.description == expected_description + + +def test_update__returns_none__when_no_project_matching_its_id_is_found_with_sql_database( + create_fake_dao, project_factory +): + dao = create_fake_dao + project_to_insert = project_factory() + + results = dao.update(project_to_insert.id, {"description": Faker().sentence()}) + + assert results is None + + +def test__get_all__returns_a_list_of_project_dto_objects__when_one_or_more_projects_are_found_with_sql_database( + create_fake_dao, insert_project +): + dao = create_fake_dao + + inserted_projects = [ + insert_project(), + insert_project() + ] + + projects = dao.get_all() + assert isinstance(projects, typing.List) + assert projects == inserted_projects + + +def test_get_by_id__returns_an_project_dto__when_found_one_project_that_matches_its_id_with_sql_database( + create_fake_dao, insert_project +): + dao = create_fake_dao + + inserted_project = insert_project() + + project = dao.get_by_id(inserted_project.id) + + assert isinstance(project, domain.Project) + assert project.id == inserted_project.id + assert project == inserted_project + + +def test__get_by_id__returns_none__when_no_project_matches_its_id_with_sql_database( + create_fake_dao, project_factory +): + dao = create_fake_dao + project_to_insert = project_factory() + + project = dao.get_by_id(project_to_insert.id) + + assert project is None + + +def test_get_all__returns_an_empty_list__when_doesnt_found_any_projects_with_sql_database( + create_fake_dao +): + projects = create_fake_dao.get_all() + + assert isinstance(projects, typing.List) + assert projects == [] + + +def test_delete__returns_an_project_with_inactive_status__when_an_project_matching_its_id_is_found_with_sql_database( + create_fake_dao, insert_project +): + dao = create_fake_dao + + inserted_project = insert_project() + + project = dao.delete(inserted_project.id) + + assert isinstance(project, domain.Project) + assert project.id == inserted_project.id + assert project.status == 0 + assert project.deleted is True + + +def test_delete__returns_none__when_no_project_matching_its_id_is_found_with_sql_database( + create_fake_dao, project_factory +): + dao = create_fake_dao + project_to_insert = project_factory() + + results = dao.delete(project_to_insert.id) + + assert results is None diff --git a/V2/tests/unit/services/project_service_test.py b/V2/tests/unit/services/project_service_test.py new file mode 100644 index 00000000..9baf657e --- /dev/null +++ b/V2/tests/unit/services/project_service_test.py @@ -0,0 +1,74 @@ +from faker import Faker + +from time_tracker.projects._domain import ProjectService + + +def test__get_all__uses_the_project_dao__to_retrieve_projects(mocker): + expected_projects = mocker.Mock() + project_dao = mocker.Mock( + get_all=mocker.Mock(return_value=expected_projects) + ) + project_service = ProjectService(project_dao) + + actual_projects = project_service.get_all() + + assert project_dao.get_all.called + assert expected_projects == actual_projects + + +def test__get_by_id__uses_the_project_dao__to_retrieve_one_project(mocker): + expected_project = mocker.Mock() + project_dao = mocker.Mock( + get_by_id=mocker.Mock(return_value=expected_project) + ) + project_service = ProjectService(project_dao) + + actual_project = project_service.get_by_id(Faker().pyint()) + + assert project_dao.get_by_id.called + assert expected_project == actual_project + + +def test__delete_project__uses_the_project_dao__to_change_project_status( + mocker, +): + expected_project = mocker.Mock() + project_dao = mocker.Mock( + delete=mocker.Mock(return_value=expected_project) + ) + + project_service = ProjectService(project_dao) + deleted_project = project_service.delete(Faker().pyint()) + + assert project_dao.delete.called + assert expected_project == deleted_project + + +def test__update_project__uses_the_project_dao__to_update_one_project( + mocker, +): + expected_project = mocker.Mock() + project_dao = mocker.Mock( + update=mocker.Mock(return_value=expected_project) + ) + project_service = ProjectService(project_dao) + + updated_project = project_service.update( + Faker().pyint(), Faker().pydict() + ) + + assert project_dao.update.called + assert expected_project == updated_project + + +def test__create_project__uses_the_project_dao__to_create_an_project(mocker, project_factory): + expected_project = mocker.Mock() + project_dao = mocker.Mock( + create=mocker.Mock(return_value=expected_project) + ) + project_service = ProjectService(project_dao) + + actual_project = project_service.create(project_factory()) + + assert project_dao.create.called + assert expected_project == actual_project diff --git a/V2/tests/unit/use_cases/projects_use_case_test.py b/V2/tests/unit/use_cases/projects_use_case_test.py new file mode 100644 index 00000000..22167418 --- /dev/null +++ b/V2/tests/unit/use_cases/projects_use_case_test.py @@ -0,0 +1,80 @@ +from pytest_mock import MockFixture +from faker import Faker + +from time_tracker.projects._domain import _use_cases + + +def test__create_project_function__uses_the_projects_service__to_create_project( + mocker: MockFixture, project_factory +): + expected_project = mocker.Mock() + project_service = mocker.Mock( + create=mocker.Mock(return_value=expected_project) + ) + + project_use_case = _use_cases.CreateProjectUseCase(project_service) + actual_project = project_use_case.create_project(project_factory()) + + assert project_service.create.called + assert expected_project == actual_project + + +def test__delete_project_function__uses_the_project_service__to_delete_project_selected( + mocker: MockFixture, +): + expected_project = mocker.Mock() + project_service = mocker.Mock(delete=mocker.Mock(return_value=expected_project)) + + project_use_case = _use_cases.DeleteProjectUseCase(project_service) + deleted_project = project_use_case.delete_project(Faker().pyint()) + + assert project_service.delete.called + assert expected_project == deleted_project + + +def test__get_list_projects_function__uses_the_project_service__to_retrieve_projects( + mocker: MockFixture, +): + expected_projects = mocker.Mock() + project_service = mocker.Mock( + get_all=mocker.Mock(return_value=expected_projects) + ) + + projects_use_case = _use_cases.GetProjectsUseCase(project_service) + actual_projects = projects_use_case.get_projects() + + assert project_service.get_all.called + assert expected_projects == actual_projects + + +def test__get_project_by_id_function__uses_the_project_service__to_retrieve_project( + mocker: MockFixture, +): + expected_project = mocker.Mock() + project_service = mocker.Mock( + get_by_id=mocker.Mock(return_value=expected_project) + ) + + project_use_case = _use_cases.GetProjectUseCase(project_service) + actual_project = project_use_case.get_project_by_id(Faker().pyint()) + + assert project_service.get_by_id.called + assert expected_project == actual_project + + +def test__update_project_function__uses_the_projects_service__to_update_an_project( + mocker: MockFixture, project_factory +): + expected_project = mocker.Mock() + project_service = mocker.Mock( + update=mocker.Mock(return_value=expected_project) + ) + project_to_update = project_factory() + + project_use_case = _use_cases.UpdateProjectUseCase(project_service) + updated_project = project_use_case.update_project( + Faker().pyint(), project_to_update.__dict__ + ) + + assert project_service.update.called + assert expected_project == updated_project diff --git a/V2/time_tracker/projects/_application/__init__.py b/V2/time_tracker/projects/_application/__init__.py new file mode 100644 index 00000000..6b48fb8a --- /dev/null +++ b/V2/time_tracker/projects/_application/__init__.py @@ -0,0 +1,5 @@ +# flake8: noqa +from ._projects import create_project +from ._projects import delete_project +from ._projects import get_projects +from ._projects import update_project \ No newline at end of file diff --git a/V2/time_tracker/projects/_application/_projects/__init__.py b/V2/time_tracker/projects/_application/_projects/__init__.py new file mode 100644 index 00000000..9f87eef2 --- /dev/null +++ b/V2/time_tracker/projects/_application/_projects/__init__.py @@ -0,0 +1,5 @@ +# flake8: noqa +from ._create_project import create_project +from ._delete_project import delete_project +from ._get_projects import get_projects +from ._update_project import update_project \ No newline at end of file diff --git a/V2/time_tracker/projects/_application/_projects/_create_project.py b/V2/time_tracker/projects/_application/_projects/_create_project.py new file mode 100644 index 00000000..559ba864 --- /dev/null +++ b/V2/time_tracker/projects/_application/_projects/_create_project.py @@ -0,0 +1,57 @@ +import dataclasses +import json +import typing +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB as database + + +def create_project(req: func.HttpRequest) -> func.HttpResponse: + + project_dao = _infrastructure.ProjectsSQLDao(database()) + project_service = _domain.ProjectService(project_dao) + use_case = _domain._use_cases.CreateProjectUseCase(project_service) + + project_data = req.get_json() + + validation_errors = _validate_project(project_data) + if validation_errors: + status_code = HTTPStatus.BAD_REQUEST + response = json.dumps(validation_errors) + else: + project_to_create = _domain.Project( + id=None, + name=project_data["name"], + description=project_data["description"], + project_type_id=project_data["project_type_id"], + customer_id=project_data["customer_id"], + status=project_data["status"], + deleted=False, + technologies=project_data["technologies"] + ) + + created_project = use_case.create_project(project_to_create) + + status_code, response = [ + HTTPStatus.INTERNAL_SERVER_ERROR, b"could not be created" + ] if not created_project else [HTTPStatus.CREATED, json.dumps(created_project.__dict__)] + + return func.HttpResponse( + body=response, + status_code=status_code, + mimetype="application/json" + ) + + +def _validate_project(project_data: dict) -> typing.List[str]: + project_fields = [field.name for field in dataclasses.fields(_domain.Project) + if field.type != typing.Optional[field.type]] + missing_keys = [field for field in project_fields if field not in project_data] + return [ + f'The {missing_key} key is missing in the input data' + for missing_key in missing_keys + ] diff --git a/V2/time_tracker/projects/_application/_projects/_delete_project.py b/V2/time_tracker/projects/_application/_projects/_delete_project.py new file mode 100644 index 00000000..5274b79f --- /dev/null +++ b/V2/time_tracker/projects/_application/_projects/_delete_project.py @@ -0,0 +1,35 @@ +import json +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB as database + + +def delete_project(req: func.HttpRequest) -> func.HttpResponse: + project_dao = _infrastructure.ProjectsSQLDao(database()) + project_service = _domain.ProjectService(project_dao) + use_case = _domain._use_cases.DeleteProjectUseCase(project_service) + + try: + project_id = int(req.route_params.get("id")) + deleted_project = use_case.delete_project(project_id) + + status_code, response = [ + HTTPStatus.NOT_FOUND, b"Not found" + ] if not deleted_project else [HTTPStatus.OK, json.dumps(deleted_project.__dict__)] + + return func.HttpResponse( + body=response, + status_code=status_code, + mimetype="application/json", + ) + + except ValueError: + return func.HttpResponse( + body=b"Invalid Format ID", + status_code=HTTPStatus.BAD_REQUEST, + mimetype="application/json" + ) diff --git a/V2/time_tracker/projects/_application/_projects/_get_projects.py b/V2/time_tracker/projects/_application/_projects/_get_projects.py new file mode 100644 index 00000000..c15efa1c --- /dev/null +++ b/V2/time_tracker/projects/_application/_projects/_get_projects.py @@ -0,0 +1,56 @@ +import json +import typing +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB as database + + +def get_projects(req: func.HttpRequest) -> func.HttpResponse: + project_dao = _infrastructure.ProjectsSQLDao(database()) + project_service = _domain.ProjectService(project_dao) + + project_id = req.route_params.get("id") + + try: + if project_id: + response = _get_by_id(int(project_id), project_service) + if not response: + return func.HttpResponse( + body=b"Not found", + status_code=HTTPStatus.NOT_FOUND, + mimetype="application/json" + ) + else: + response = _get_all(project_service) + + return func.HttpResponse( + body=json.dumps(response), + status_code=HTTPStatus.OK, + mimetype="application/json", + ) + + except ValueError: + return func.HttpResponse( + body=b"Invalid Format ID", + status_code=HTTPStatus.BAD_REQUEST, + mimetype="application/json" + ) + + +def _get_by_id(project_id: int, project_service: _domain.ProjectService) -> str: + use_case = _domain._use_cases.GetProjectUseCase(project_service) + project = use_case.get_project_by_id(project_id) + + return project.__dict__ if project else None + + +def _get_all(project_service: _domain.ProjectService) -> typing.List: + use_case = _domain._use_cases.GetProjectsUseCase(project_service) + return [ + project.__dict__ + for project in use_case.get_projects() + ] diff --git a/V2/time_tracker/projects/_application/_projects/_update_project.py b/V2/time_tracker/projects/_application/_projects/_update_project.py new file mode 100644 index 00000000..b2cc1e57 --- /dev/null +++ b/V2/time_tracker/projects/_application/_projects/_update_project.py @@ -0,0 +1,53 @@ +import dataclasses +import json +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB as database + + +def update_project(req: func.HttpRequest) -> func.HttpResponse: + project_dao = _infrastructure.ProjectsSQLDao(database()) + project_service = _domain.ProjectService(project_dao) + use_case = _domain._use_cases.UpdateProjectUseCase(project_service) + + try: + project_id = int(req.route_params.get("id")) + project_data = req.get_json() + + if not _validate_project(project_data): + status_code = HTTPStatus.BAD_REQUEST + response = b"Incorrect body" + + else: + updated_project = use_case.update_project(project_id, project_data) + status_code, response = [ + HTTPStatus.NOT_FOUND, b"Not found" + ] if not updated_project else [HTTPStatus.OK, json.dumps(updated_project.__dict__)] + + return func.HttpResponse( + body=response, + status_code=status_code, + mimetype="application/json", + ) + + except ValueError: + return func.HttpResponse( + body=b"Invalid Format ID", + status_code=HTTPStatus.BAD_REQUEST, + mimetype="application/json", + ) + except Exception as error: + return func.HttpResponse( + body=str(error).encode(), + status_code=HTTPStatus.BAD_REQUEST, + mimetype="application/json", + ) + + +def _validate_project(project_data: dict) -> bool: + project_keys = [field.name for field in dataclasses.fields(_domain.Project)] + return all(key in project_keys for key in project_data.keys()) diff --git a/V2/time_tracker/projects/_domain/__init__.py b/V2/time_tracker/projects/_domain/__init__.py new file mode 100644 index 00000000..c90dbcaf --- /dev/null +++ b/V2/time_tracker/projects/_domain/__init__.py @@ -0,0 +1,11 @@ +# flake8: noqa +from ._entities import Project +from ._persistence_contracts import ProjectsDao +from ._services import ProjectService +from ._use_cases import ( + CreateProjectUseCase, + DeleteProjectUseCase, + GetProjectsUseCase, + GetProjectUseCase, + UpdateProjectUseCase +) \ No newline at end of file diff --git a/V2/time_tracker/projects/_domain/_entities/__init__.py b/V2/time_tracker/projects/_domain/_entities/__init__.py new file mode 100644 index 00000000..693c3a41 --- /dev/null +++ b/V2/time_tracker/projects/_domain/_entities/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._project import Project \ No newline at end of file diff --git a/V2/time_tracker/projects/_domain/_entities/_project.py b/V2/time_tracker/projects/_domain/_entities/_project.py new file mode 100644 index 00000000..0b2ffe1a --- /dev/null +++ b/V2/time_tracker/projects/_domain/_entities/_project.py @@ -0,0 +1,14 @@ +from dataclasses import dataclass +from typing import List, Optional + + +@dataclass(frozen=True) +class Project: + id: Optional[int] + name: str + description: str + project_type_id: int + customer_id: int + status: int + deleted: Optional[bool] + technologies: List[str] diff --git a/V2/time_tracker/projects/_domain/_persistence_contracts/__init__.py b/V2/time_tracker/projects/_domain/_persistence_contracts/__init__.py new file mode 100644 index 00000000..b17214a7 --- /dev/null +++ b/V2/time_tracker/projects/_domain/_persistence_contracts/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._projects_dao import ProjectsDao \ No newline at end of file diff --git a/V2/time_tracker/projects/_domain/_persistence_contracts/_projects_dao.py b/V2/time_tracker/projects/_domain/_persistence_contracts/_projects_dao.py new file mode 100644 index 00000000..f38c8ebd --- /dev/null +++ b/V2/time_tracker/projects/_domain/_persistence_contracts/_projects_dao.py @@ -0,0 +1,25 @@ +import abc + +from .. import Project + + +class ProjectsDao(abc.ABC): + @abc.abstractmethod + def create(self, time_entry_data: Project) -> Project: + pass + + @abc.abstractmethod + def get_all(self) -> Project: + pass + + @abc.abstractmethod + def get_by_id(self, id: int) -> Project: + pass + + @abc.abstractmethod + def update(self, id: int, project_data: dict) -> Project: + pass + + @abc.abstractmethod + def delete(self, id: int) -> Project: + pass diff --git a/V2/time_tracker/projects/_domain/_services/__init__.py b/V2/time_tracker/projects/_domain/_services/__init__.py new file mode 100644 index 00000000..5eb9532b --- /dev/null +++ b/V2/time_tracker/projects/_domain/_services/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._project import ProjectService \ No newline at end of file diff --git a/V2/time_tracker/projects/_domain/_services/_project.py b/V2/time_tracker/projects/_domain/_services/_project.py new file mode 100644 index 00000000..0f99dafb --- /dev/null +++ b/V2/time_tracker/projects/_domain/_services/_project.py @@ -0,0 +1,24 @@ +import typing + +from .. import Project, ProjectsDao + + +class ProjectService: + + def __init__(self, project_dao: ProjectsDao): + self.project_dao = project_dao + + def create(self, project_data: Project) -> Project: + return self.project_dao.create(project_data) + + def get_all(self) -> typing.List[Project]: + return self.project_dao.get_all() + + def get_by_id(self, id: int) -> Project: + return self.project_dao.get_by_id(id) + + def update(self, id: int, project_data: dict) -> Project: + return self.project_dao.update(id, project_data) + + def delete(self, id: int) -> Project: + return self.project_dao.delete(id) diff --git a/V2/time_tracker/projects/_domain/_use_cases/__init__.py b/V2/time_tracker/projects/_domain/_use_cases/__init__.py new file mode 100644 index 00000000..defb127d --- /dev/null +++ b/V2/time_tracker/projects/_domain/_use_cases/__init__.py @@ -0,0 +1,6 @@ +# flake8: noqa +from ._create_project_use_case import CreateProjectUseCase +from ._delete_project_use_case import DeleteProjectUseCase +from ._get_project_by_id_use_case import GetProjectUseCase +from ._get_projects_use_case import GetProjectsUseCase +from ._update_project_use_case import UpdateProjectUseCase diff --git a/V2/time_tracker/projects/_domain/_use_cases/_create_project_use_case.py b/V2/time_tracker/projects/_domain/_use_cases/_create_project_use_case.py new file mode 100644 index 00000000..60b50687 --- /dev/null +++ b/V2/time_tracker/projects/_domain/_use_cases/_create_project_use_case.py @@ -0,0 +1,10 @@ +from .. import Project, ProjectService + + +class CreateProjectUseCase: + + def __init__(self, project_service: ProjectService): + self.project_service = project_service + + def create_project(self, project_data: Project) -> Project: + return self.project_service.create(project_data) diff --git a/V2/time_tracker/projects/_domain/_use_cases/_delete_project_use_case.py b/V2/time_tracker/projects/_domain/_use_cases/_delete_project_use_case.py new file mode 100644 index 00000000..9dd91d4b --- /dev/null +++ b/V2/time_tracker/projects/_domain/_use_cases/_delete_project_use_case.py @@ -0,0 +1,10 @@ +from .. import Project, ProjectService + + +class DeleteProjectUseCase: + + def __init__(self, project_service: ProjectService): + self.project_service = project_service + + def delete_project(self, id: int) -> Project: + return self.project_service.delete(id) diff --git a/V2/time_tracker/projects/_domain/_use_cases/_get_project_by_id_use_case.py b/V2/time_tracker/projects/_domain/_use_cases/_get_project_by_id_use_case.py new file mode 100644 index 00000000..94573496 --- /dev/null +++ b/V2/time_tracker/projects/_domain/_use_cases/_get_project_by_id_use_case.py @@ -0,0 +1,9 @@ +from .. import ProjectService, Project + + +class GetProjectUseCase: + def __init__(self, project_service: ProjectService): + self.project_service = project_service + + def get_project_by_id(self, id: int) -> Project: + return self.project_service.get_by_id(id) diff --git a/V2/time_tracker/projects/_domain/_use_cases/_get_projects_use_case.py b/V2/time_tracker/projects/_domain/_use_cases/_get_projects_use_case.py new file mode 100644 index 00000000..ccf0b3a4 --- /dev/null +++ b/V2/time_tracker/projects/_domain/_use_cases/_get_projects_use_case.py @@ -0,0 +1,11 @@ +import typing + +from .. import Project, ProjectService + + +class GetProjectsUseCase: + def __init__(self, project_service: ProjectService): + self.project_service = project_service + + def get_projects(self) -> typing.List[Project]: + return self.project_service.get_all() diff --git a/V2/time_tracker/projects/_domain/_use_cases/_update_project_use_case.py b/V2/time_tracker/projects/_domain/_use_cases/_update_project_use_case.py new file mode 100644 index 00000000..628d7437 --- /dev/null +++ b/V2/time_tracker/projects/_domain/_use_cases/_update_project_use_case.py @@ -0,0 +1,9 @@ +from .. import ProjectService, Project + + +class UpdateProjectUseCase: + def __init__(self, projects_service: ProjectService): + self.projects_service = projects_service + + def update_project(self, id: int, project_data: dict) -> Project: + return self.projects_service.update(id, project_data) diff --git a/V2/time_tracker/projects/_infrastructure/__init__.py b/V2/time_tracker/projects/_infrastructure/__init__.py new file mode 100644 index 00000000..b940cba3 --- /dev/null +++ b/V2/time_tracker/projects/_infrastructure/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._data_persistence import ProjectsSQLDao diff --git a/V2/time_tracker/projects/_infrastructure/_data_persistence/__init__.py b/V2/time_tracker/projects/_infrastructure/_data_persistence/__init__.py new file mode 100644 index 00000000..b73fcf44 --- /dev/null +++ b/V2/time_tracker/projects/_infrastructure/_data_persistence/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._projects_dao import ProjectsSQLDao diff --git a/V2/time_tracker/projects/_infrastructure/_data_persistence/_projects_dao.py b/V2/time_tracker/projects/_infrastructure/_data_persistence/_projects_dao.py new file mode 100644 index 00000000..2ec61186 --- /dev/null +++ b/V2/time_tracker/projects/_infrastructure/_data_persistence/_projects_dao.py @@ -0,0 +1,77 @@ +import typing +import dataclasses + +import sqlalchemy as sq + +from ... import _domain as domain +from time_tracker._infrastructure import _db + + +class ProjectsSQLDao(domain.ProjectsDao): + + def __init__(self, database: _db.DB): + self.project_key = [field.name for field in dataclasses.fields(domain.Project)] + self.db = database + self.project = sq.Table( + 'project', + self.db.metadata, + sq.Column('id', sq.Integer, primary_key=True, autoincrement=True), + sq.Column('name', sq.String), + sq.Column('description', sq.String), + sq.Column('project_type_id', sq.Integer), + sq.Column('customer_id', sq.Integer, sq.ForeignKey('customer.id')), + sq.Column('status', sq.SmallInteger), + sq.Column('deleted', sq.BOOLEAN), + sq.Column( + 'technologies', + sq.ARRAY(sq.String).with_variant(sq.String, "sqlite") + ), + extend_existing=True, + ) + + def create(self, project_data: domain.Project) -> domain.Project: + try: + new_project = project_data.__dict__ + new_project.pop('id', None) + + query = self.project.insert().values(new_project).return_defaults() + project = self.db.get_session().execute(query) + new_project.update({"id": project.inserted_primary_key[0]}) + return self.__create_project_dto(new_project) + + except sq.exc.SQLAlchemyError: + return None + + def get_by_id(self, id: int) -> domain.Project: + query = sq.sql.select(self.project).where(self.project.c.id == id) + project = self.db.get_session().execute(query).one_or_none() + return self.__create_project_dto(dict(project)) if project else None + + def get_all(self) -> typing.List[domain.Project]: + query = sq.sql.select(self.project) + result = self.db.get_session().execute(query) + return [ + self.__create_project_dto(dict(project)) + for project in result + ] + + def delete(self, id: int) -> domain.Project: + query = ( + self.project.update() + .where(self.project.c.id == id) + .values({"deleted": True, "status": 0}) + ) + self.db.get_session().execute(query) + return self.get_by_id(id) + + def update(self, id: int, project_data: dict) -> domain.Project: + try: + query = self.project.update().where(self.project.c.id == id).values(project_data) + self.db.get_session().execute(query) + return self.get_by_id(id) + except sq.exc.SQLAlchemyError as error: + raise Exception(error.orig) + + def __create_project_dto(self, project: dict) -> domain.Project: + project = {key: project.get(key) for key in self.project_key} + return domain.Project(**project) diff --git a/V2/time_tracker/projects/interface.py b/V2/time_tracker/projects/interface.py new file mode 100644 index 00000000..2fb3244b --- /dev/null +++ b/V2/time_tracker/projects/interface.py @@ -0,0 +1,5 @@ +# flake8: noqa +from ._application import create_project +from ._application import delete_project +from ._application import get_projects +from ._application import update_project \ No newline at end of file From c8a31341b120792f46442815fad2d463262302ab Mon Sep 17 00:00:00 2001 From: Sandro Castillo Date: Thu, 25 Nov 2021 19:47:16 -0500 Subject: [PATCH 19/33] feat: TT-404 GET Time Entries (#341) --- V2/serverless.yml | 14 +++- .../azure/time_entry_azure_endpoints_test.py | 65 ++++++++++++++++++- .../integration/daos/time_entries_dao_test.py | 55 ++++++++++++++++ .../unit/services/time_entry_service_test.py | 26 ++++++++ .../use_cases/time_entries_use_case_test.py | 32 +++++++++ .../time_entries/_application/__init__.py | 1 + .../_application/_time_entries/__init__.py | 1 + .../_time_entries/_get_time_entries.py | 61 +++++++++++++++++ .../time_entries/_domain/__init__.py | 4 +- .../_time_entries_dao.py | 8 +++ .../_domain/_services/_time_entry.py | 9 ++- .../_domain/_use_cases/__init__.py | 2 + .../_get_time_entry_by_id_use_case.py | 9 +++ .../_use_cases/_get_time_entry_use_case.py | 11 ++++ .../_data_persistence/__init__.py | 2 +- ...ntries_dao.py => _time_entries_sql_dao.py} | 15 +++++ V2/time_tracker/time_entries/interface.py | 1 + 17 files changed, 308 insertions(+), 8 deletions(-) create mode 100644 V2/time_tracker/time_entries/_application/_time_entries/_get_time_entries.py create mode 100644 V2/time_tracker/time_entries/_domain/_use_cases/_get_time_entry_by_id_use_case.py create mode 100644 V2/time_tracker/time_entries/_domain/_use_cases/_get_time_entry_use_case.py rename V2/time_tracker/time_entries/_infrastructure/_data_persistence/{_time_entries_dao.py => _time_entries_sql_dao.py} (85%) diff --git a/V2/serverless.yml b/V2/serverless.yml index e02e8fb6..ba8edb52 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -63,9 +63,9 @@ functions: - http: true x-azure-settings: methods: - - PUT + - PUT route: activities/{id} - authLevel: anonymous + authLevel: anonymous create_activity: handler: time_tracker/activities/interface.create_activity @@ -87,6 +87,16 @@ functions: route: time-entries/ authLevel: anonymous + get_time_entries: + handler: time_tracker/time_entries/interface.get_time_entries + events: + - http: true + x-azure-settings: + methods: + - GET + route: time-entries/{id:?} + authLevel: anonymous + delete_time_entry: handler: time_tracker/time_entries/interface.delete_time_entry events: diff --git a/V2/tests/api/azure/time_entry_azure_endpoints_test.py b/V2/tests/api/azure/time_entry_azure_endpoints_test.py index f57db585..fcc8dea0 100644 --- a/V2/tests/api/azure/time_entry_azure_endpoints_test.py +++ b/V2/tests/api/azure/time_entry_azure_endpoints_test.py @@ -1,6 +1,7 @@ import pytest import json from faker import Faker +from http import HTTPStatus import azure.functions as func @@ -39,7 +40,7 @@ def test__time_entry_azure_endpoint__creates_an_time_entry__when_time_entry_has_ time_entry_json_data = json.loads(response.get_body()) time_entry_body['id'] = time_entry_json_data['id'] - assert response.status_code == 201 + assert response.status_code == HTTPStatus.CREATED assert time_entry_json_data == time_entry_body @@ -60,7 +61,7 @@ def test__delete_time_entries_azure_endpoint__returns_an_time_entry_with_true_de response = azure_time_entries._delete_time_entry.delete_time_entry(req) time_entry_json_data = json.loads(response.get_body().decode("utf-8")) - assert response.status_code == 200 + assert response.status_code == HTTPStatus.OK assert time_entry_json_data['deleted'] is True @@ -75,7 +76,65 @@ def test__delete_time_entries_azure_endpoint__returns_a_status_code_400__when_ti response = azure_time_entries._delete_time_entry.delete_time_entry(req) - assert response.status_code == 400 + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b'Invalid Format ID' + + +def test__time_entry_azure_endpoint__returns_all_time_entries( + test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity +): + inserted_activity = insert_activity(activity_factory(), test_db) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + inserted_time_entries = insert_time_entry(time_entries_to_insert, test_db).__dict__ + + req = func.HttpRequest(method="GET", body=None, url=TIME_ENTRY_URL) + + response = azure_time_entries.get_time_entries(req) + time_entries_json_data = response.get_body().decode("utf-8") + time_entry_list = json.loads(time_entries_json_data) + + assert response.status_code == HTTPStatus.OK + assert time_entry_list.pop() == inserted_time_entries + + +def test__time_entry_azure_endpoint__returns_an_time_entry__when_time_entry_matches_its_id( + test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity +): + inserted_activity = insert_activity(activity_factory(), test_db) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + inserted_time_entries = insert_time_entry(time_entries_to_insert, test_db).__dict__ + + req = func.HttpRequest( + method="GET", + body=None, + url=TIME_ENTRY_URL, + route_params={"id": inserted_time_entries["id"]}, + ) + + response = azure_time_entries.get_time_entries(req) + time_entry_json_data = response.get_body().decode("utf-8") + + assert response.status_code == HTTPStatus.OK + assert time_entry_json_data == json.dumps(inserted_time_entries) + + +def test__get_time_entries_azure_endpoint__returns_a_status_code_400__when_time_entry_recive_invalid_id( + test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity +): + inserted_activity = insert_activity(activity_factory(), test_db) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + insert_time_entry(time_entries_to_insert, test_db).__dict__ + + req = func.HttpRequest( + method="GET", + body=None, + url=TIME_ENTRY_URL, + route_params={"id": "invalid id"}, + ) + + response = azure_time_entries.get_time_entries(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST assert response.get_body() == b'Invalid Format ID' diff --git a/V2/tests/integration/daos/time_entries_dao_test.py b/V2/tests/integration/daos/time_entries_dao_test.py index fbe5a7ed..e78af556 100644 --- a/V2/tests/integration/daos/time_entries_dao_test.py +++ b/V2/tests/integration/daos/time_entries_dao_test.py @@ -1,4 +1,6 @@ import pytest +import typing + from faker import Faker import time_tracker.time_entries._domain as domain @@ -98,3 +100,56 @@ def test_update__returns_none__when_doesnt_found_one_time_entry_to_update( time_entry = dao.update(0, inserted_time_entries) assert time_entry is None + + +def test__get_all__returns_a_list_of_time_entries_dto_objects__when_one_or_more_time_entries_are_found_in_sql_database( + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory +): + + dao = create_fake_dao(test_db) + inserted_activity = insert_activity(activity_factory(), dao.db) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + inserted_time_entries = [dao.create(time_entries_to_insert)] + + time_entry = dao.get_all() + + assert isinstance(time_entry, typing.List) + assert time_entry == inserted_time_entries + + +def test__get_all__returns_an_empty_list__when_doesnt_found_any_time_entries_in_sql_database( + test_db, create_fake_dao, insert_activity, activity_factory +): + dao = create_fake_dao(test_db) + insert_activity(activity_factory(), dao.db) + + time_entry = dao.get_all() + assert time_entry == [] + + +def test__get_by_id__returns_a_time_entry_dto__when_found_one_time_entry_that_match_id_with_sql_database( + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory +): + dao = create_fake_dao(test_db) + inserted_activity = insert_activity(activity_factory(), dao.db) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + inserted_time_entries = dao.create(time_entries_to_insert) + + time_entry = dao.get_by_id(time_entries_to_insert.id) + + assert isinstance(time_entry, domain.TimeEntry) + assert time_entry.id == inserted_time_entries.id + assert time_entry == inserted_time_entries + + +def test__get_by_id__returns_none__when_no_time_entry_matches_by_id( + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory +): + dao = create_fake_dao(test_db) + inserted_activity = insert_activity(activity_factory(), dao.db) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + dao.create(time_entries_to_insert) + + time_entry = dao.get_by_id(Faker().pyint()) + + assert time_entry is None diff --git a/V2/tests/unit/services/time_entry_service_test.py b/V2/tests/unit/services/time_entry_service_test.py index 0952f8a9..1992324f 100644 --- a/V2/tests/unit/services/time_entry_service_test.py +++ b/V2/tests/unit/services/time_entry_service_test.py @@ -46,3 +46,29 @@ def test__update_time_entry__uses_the_time_entry_dao__to_update_one_time_entry( assert time_entry_dao.update.called assert expected_time_entry == updated_time_entry + + +def test__get_all__uses_the_time_entry_dao__to_retrieve_time_entries(mocker): + expected_time_entries = mocker.Mock() + time_entry_dao = mocker.Mock( + get_all=mocker.Mock(return_value=expected_time_entries) + ) + time_activity_service = TimeEntryService(time_entry_dao) + + actual_activities = time_activity_service.get_all() + + assert time_entry_dao.get_all.called + assert expected_time_entries == actual_activities + + +def test__get_by_id__uses_the_time_entry_dao__to_retrieve_one_time_entry(mocker): + expected_time_entry = mocker.Mock() + time_entry_dao = mocker.Mock( + get_by_id=mocker.Mock(return_value=expected_time_entry) + ) + time_entry_service = TimeEntryService(time_entry_dao) + + actual_time_entry = time_entry_service.get_by_id(Faker().uuid4()) + + assert time_entry_dao.get_by_id.called + assert expected_time_entry == actual_time_entry diff --git a/V2/tests/unit/use_cases/time_entries_use_case_test.py b/V2/tests/unit/use_cases/time_entries_use_case_test.py index 1a679f37..05937789 100644 --- a/V2/tests/unit/use_cases/time_entries_use_case_test.py +++ b/V2/tests/unit/use_cases/time_entries_use_case_test.py @@ -3,6 +3,8 @@ from time_tracker.time_entries._domain import _use_cases +fake = Faker() + def test__create_time_entry_function__uses_the_time_entries_service__to_create_time_entry( mocker: MockFixture, time_entry_factory @@ -43,3 +45,33 @@ def test__update_time_entries_function__uses_the_time_entry_service__to_update_a assert time_entry_service.update.called assert expected_time_entry == updated_time_entry + + +def test__get_all_time_entries_function__using_the_use_case_get_time_entries__to_get_all_time_entries( + mocker: MockFixture, +): + expected_time_entries = mocker.Mock() + time_entry_service = mocker.Mock( + get_all=mocker.Mock(return_value=expected_time_entries) + ) + + time_entries_use_case = _use_cases.GetTimeEntriesUseCase(time_entry_service) + actual_time_entries = time_entries_use_case.get_time_entries() + + assert time_entry_service.get_all.called + assert expected_time_entries == actual_time_entries + + +def test__get_time_entry_by_id_function__uses_the_time_entry_service__to_retrieve_time_entry( + mocker: MockFixture, +): + expected_time_entries = mocker.Mock() + time_entry_service = mocker.Mock( + get_by_id=mocker.Mock(return_value=expected_time_entries) + ) + + time_entry_use_case = _use_cases.GetTimeEntryUseCase(time_entry_service) + actual_time_entry = time_entry_use_case.get_time_entry_by_id(fake.uuid4()) + + assert time_entry_service.get_by_id.called + assert expected_time_entries == actual_time_entry diff --git a/V2/time_tracker/time_entries/_application/__init__.py b/V2/time_tracker/time_entries/_application/__init__.py index 0ca4e272..eb817c22 100644 --- a/V2/time_tracker/time_entries/_application/__init__.py +++ b/V2/time_tracker/time_entries/_application/__init__.py @@ -2,3 +2,4 @@ from ._time_entries import create_time_entry from ._time_entries import delete_time_entry from ._time_entries import update_time_entry +from ._time_entries import get_time_entries diff --git a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py index 0f6cf2db..382fbbe4 100644 --- a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py +++ b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py @@ -2,3 +2,4 @@ from ._create_time_entry import create_time_entry from ._delete_time_entry import delete_time_entry from ._update_time_entry import update_time_entry +from ._get_time_entries import get_time_entries diff --git a/V2/time_tracker/time_entries/_application/_time_entries/_get_time_entries.py b/V2/time_tracker/time_entries/_application/_time_entries/_get_time_entries.py new file mode 100644 index 00000000..37574d32 --- /dev/null +++ b/V2/time_tracker/time_entries/_application/_time_entries/_get_time_entries.py @@ -0,0 +1,61 @@ +import json +from http import HTTPStatus + +import azure.functions as func + +from time_tracker.time_entries._infrastructure import TimeEntriesSQLDao +from time_tracker.time_entries._domain import TimeEntryService, _use_cases +from time_tracker._infrastructure import DB + + +NOT_FOUND = b'Not Found' +INVALID_FORMAT_ID = b'Invalid Format ID' + + +def get_time_entries(req: func.HttpRequest) -> func.HttpResponse: + + time_entry_id = req.route_params.get('id') + status_code = HTTPStatus.OK + + if time_entry_id: + try: + response = _get_by_id(int(time_entry_id)) + if response == NOT_FOUND: + status_code = HTTPStatus.NOT_FOUND + except ValueError: + response = INVALID_FORMAT_ID + status_code = HTTPStatus.BAD_REQUEST + else: + response = _get_all() + + return func.HttpResponse( + body=response, status_code=status_code, mimetype="application/json" + ) + + +def _get_by_id(id: int) -> str: + database = DB() + time_entry_use_case = _use_cases.GetTimeEntryUseCase( + _create_time_entry_service(database) + ) + time_entry = time_entry_use_case.get_time_entry_by_id(id) + + return json.dumps(time_entry.__dict__) if time_entry else NOT_FOUND + + +def _get_all() -> str: + database = DB() + time_entries_use_case = _use_cases.GetTimeEntriesUseCase( + _create_time_entry_service(database) + ) + return json.dumps( + [ + time_entry.__dict__ + for time_entry in time_entries_use_case.get_time_entries() + ] + ) + + +def _create_time_entry_service(db: DB): + time_entry_sql = TimeEntriesSQLDao(db) + return TimeEntryService(time_entry_sql) diff --git a/V2/time_tracker/time_entries/_domain/__init__.py b/V2/time_tracker/time_entries/_domain/__init__.py index de58675c..2034f8d3 100644 --- a/V2/time_tracker/time_entries/_domain/__init__.py +++ b/V2/time_tracker/time_entries/_domain/__init__.py @@ -6,4 +6,6 @@ CreateTimeEntryUseCase, DeleteTimeEntryUseCase, UpdateTimeEntryUseCase, -) \ No newline at end of file + GetTimeEntriesUseCase, + GetTimeEntryUseCase +) diff --git a/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py b/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py index 8c1dc9d9..ca4ceb98 100644 --- a/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py +++ b/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py @@ -1,4 +1,5 @@ import abc +import typing from time_tracker.time_entries._domain import TimeEntry @@ -15,3 +16,10 @@ def delete(self, id: int) -> TimeEntry: @abc.abstractmethod def update(self, id: int, new_time_entry: dict) -> TimeEntry: pass + + def get_by_id(self, id: int) -> TimeEntry: + pass + + @abc.abstractmethod + def get_all(self) -> typing.List[TimeEntry]: + pass diff --git a/V2/time_tracker/time_entries/_domain/_services/_time_entry.py b/V2/time_tracker/time_entries/_domain/_services/_time_entry.py index 5c32c1e3..5b3f4115 100644 --- a/V2/time_tracker/time_entries/_domain/_services/_time_entry.py +++ b/V2/time_tracker/time_entries/_domain/_services/_time_entry.py @@ -1,8 +1,9 @@ +import typing + from time_tracker.time_entries._domain import TimeEntry, TimeEntriesDao class TimeEntryService: - def __init__(self, time_entry_dao: TimeEntriesDao): self.time_entry_dao = time_entry_dao @@ -14,3 +15,9 @@ def delete(self, id: int) -> TimeEntry: def update(self, time_entry_id: int, new_time_entry: dict) -> TimeEntry: return self.time_entry_dao.update(time_entry_id, new_time_entry) + + def get_by_id(self, id: int) -> TimeEntry: + return self.time_entry_dao.get_by_id(id) + + def get_all(self) -> typing.List[TimeEntry]: + return self.time_entry_dao.get_all() diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py index 4f0ac92e..fdd1258d 100644 --- a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py +++ b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py @@ -2,3 +2,5 @@ from ._create_time_entry_use_case import CreateTimeEntryUseCase from ._delete_time_entry_use_case import DeleteTimeEntryUseCase from ._update_time_entry_use_case import UpdateTimeEntryUseCase +from ._get_time_entry_use_case import GetTimeEntriesUseCase +from ._get_time_entry_by_id_use_case import GetTimeEntryUseCase diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/_get_time_entry_by_id_use_case.py b/V2/time_tracker/time_entries/_domain/_use_cases/_get_time_entry_by_id_use_case.py new file mode 100644 index 00000000..410233e1 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_use_cases/_get_time_entry_by_id_use_case.py @@ -0,0 +1,9 @@ +from time_tracker.time_entries._domain import TimeEntryService, TimeEntry + + +class GetTimeEntryUseCase: + def __init__(self, time_entry_service: TimeEntryService): + self.time_entry_service = time_entry_service + + def get_time_entry_by_id(self, id: int) -> TimeEntry: + return self.time_entry_service.get_by_id(id) diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/_get_time_entry_use_case.py b/V2/time_tracker/time_entries/_domain/_use_cases/_get_time_entry_use_case.py new file mode 100644 index 00000000..c7bd3f27 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_use_cases/_get_time_entry_use_case.py @@ -0,0 +1,11 @@ +import typing + +from time_tracker.time_entries._domain import TimeEntryService, TimeEntry + + +class GetTimeEntriesUseCase: + def __init__(self, time_entry_service: TimeEntryService): + self.time_entry_service = time_entry_service + + def get_time_entries(self) -> typing.List[TimeEntry]: + return self.time_entry_service.get_all() diff --git a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/__init__.py b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/__init__.py index b999febe..76b56455 100644 --- a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/__init__.py +++ b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/__init__.py @@ -1,2 +1,2 @@ # flake8: noqa -from ._time_entries_dao import TimeEntriesSQLDao +from ._time_entries_sql_dao import TimeEntriesSQLDao diff --git a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py similarity index 85% rename from V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py rename to V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py index 9c0740fa..9e7016d4 100644 --- a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py +++ b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py @@ -1,6 +1,8 @@ import dataclasses +import typing import sqlalchemy +import sqlalchemy.sql as sql import time_tracker.time_entries._domain as domain from time_tracker._infrastructure import _db @@ -31,6 +33,19 @@ def __init__(self, database: _db.DB): extend_existing=True, ) + def get_by_id(self, time_entry_id: int) -> domain.TimeEntry: + query = sql.select(self.time_entry).where(self.time_entry.c.id == time_entry_id) + time_entry = self.db.get_session().execute(query).one_or_none() + return self.__create_time_entry_dto(dict(time_entry)) if time_entry else None + + def get_all(self) -> typing.List[domain.TimeEntry]: + query = sql.select(self.time_entry) + result = self.db.get_session().execute(query) + return [ + self.__create_time_entry_dto(dict(time_entry)) + for time_entry in result + ] + def create(self, time_entry_data: domain.TimeEntry) -> domain.TimeEntry: try: new_time_entry = time_entry_data.__dict__ diff --git a/V2/time_tracker/time_entries/interface.py b/V2/time_tracker/time_entries/interface.py index 7e1be4ef..8873b93d 100644 --- a/V2/time_tracker/time_entries/interface.py +++ b/V2/time_tracker/time_entries/interface.py @@ -2,3 +2,4 @@ from ._application import create_time_entry from ._application import delete_time_entry from ._application import update_time_entry +from ._application import get_time_entries From fd0bc986fcc074bd5f6d6e9b7b602951375f2aee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Rafael=20Arcos=20G=C3=B3mez?= <37599693+ararcos@users.noreply.github.com> Date: Fri, 26 Nov 2021 10:34:06 -0500 Subject: [PATCH 20/33] feat: TT-429 created enums for response messages (#362) --- V2/time_tracker/utils/enums/__init__.py | 2 ++ V2/time_tracker/utils/enums/response_enums.py | 10 ++++++++++ 2 files changed, 12 insertions(+) create mode 100644 V2/time_tracker/utils/enums/__init__.py create mode 100644 V2/time_tracker/utils/enums/response_enums.py diff --git a/V2/time_tracker/utils/enums/__init__.py b/V2/time_tracker/utils/enums/__init__.py new file mode 100644 index 00000000..317ca876 --- /dev/null +++ b/V2/time_tracker/utils/enums/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from .response_enums import ResponseEnums \ No newline at end of file diff --git a/V2/time_tracker/utils/enums/response_enums.py b/V2/time_tracker/utils/enums/response_enums.py new file mode 100644 index 00000000..6509ad4c --- /dev/null +++ b/V2/time_tracker/utils/enums/response_enums.py @@ -0,0 +1,10 @@ +from enum import Enum + + +class ResponseEnums(Enum): + INVALID_ID = "Invalid Format ID" + NOT_FOUND = "Not found" + NOT_CREATED = "could not be created" + INCORRECT_BODY = "Incorrect body" + + MIME_TYPE = "application/json" From f9e1403aadd8abca2de270fcf8a952439b525ff7 Mon Sep 17 00:00:00 2001 From: mandres2015 <32377408+mandres2015@users.noreply.github.com> Date: Fri, 26 Nov 2021 14:55:58 -0500 Subject: [PATCH 21/33] feat:TT-407 list latest v2 time entries (#353) * reactor: TT-407 rebase with master - DELETE * fix: TT-407 sintax flake8 correted * fix: TT-407 problems solved * fix: TT-407 enums added in responses --- V2/serverless.yml | 37 +++++++++----- .../azure/time_entry_azure_endpoints_test.py | 40 +++++++++++++++ .../integration/daos/time_entries_dao_test.py | 28 ++++++++++- .../unit/services/time_entry_service_test.py | 15 ++++++ .../use_cases/time_entries_use_case_test.py | 13 +++++ .../time_entries/_application/__init__.py | 1 + .../_application/_time_entries/__init__.py | 1 + .../_time_entries/_get_latest_entries.py | 49 +++++++++++++++++++ .../time_entries/_domain/__init__.py | 3 +- .../_time_entries_dao.py | 5 ++ .../_domain/_services/_time_entry.py | 3 ++ .../_domain/_use_cases/__init__.py | 1 + .../_get_latest_entries_use_case.py | 11 +++++ .../_time_entries_sql_dao.py | 14 ++++++ V2/time_tracker/time_entries/interface.py | 1 + 15 files changed, 207 insertions(+), 15 deletions(-) create mode 100644 V2/time_tracker/time_entries/_application/_time_entries/_get_latest_entries.py create mode 100644 V2/time_tracker/time_entries/_domain/_use_cases/_get_latest_entries_use_case.py diff --git a/V2/serverless.yml b/V2/serverless.yml index ba8edb52..9b31ee0b 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -1,6 +1,6 @@ service: azure-time-tracker -frameworkVersion: "2" +frameworkVersion: '2' provider: name: azure @@ -23,18 +23,18 @@ plugins: package: patterns: - - "!env/**" - - "!.env/**" - - "!local.settings.json" - - "!.vscode/**" - - "!__pycache__/**" - - "!node_modules/**" - - "!.python_packages/**" - - "!.funcignore" - - "!package.json" - - "!package-lock.json" - - "!.gitignore" - - "!.git/**" + - '!env/**' + - '!.env/**' + - '!local.settings.json' + - '!.vscode/**' + - '!__pycache__/**' + - '!node_modules/**' + - '!.python_packages/**' + - '!.funcignore' + - '!package.json' + - '!package-lock.json' + - '!.gitignore' + - '!.git/**' functions: get_activities: @@ -117,6 +117,16 @@ functions: route: time-entries/{id} authLevel: anonymous + get_latest_time_entry: + handler: time_tracker/time_entries/interface.get_latest_entries + events: + - http: true + x-azure-settings: + methods: + - GET + route: time-entries/latest/ + authLevel: anonymous + create_customer: handler: time_tracker/customers/interface.create_customer events: @@ -165,4 +175,5 @@ functions: methods: - POST route: projects/ + authLevel: anonymous diff --git a/V2/tests/api/azure/time_entry_azure_endpoints_test.py b/V2/tests/api/azure/time_entry_azure_endpoints_test.py index fcc8dea0..42e3d5ec 100644 --- a/V2/tests/api/azure/time_entry_azure_endpoints_test.py +++ b/V2/tests/api/azure/time_entry_azure_endpoints_test.py @@ -9,6 +9,7 @@ from time_tracker._infrastructure import DB from time_tracker.time_entries import _domain as domain_time_entries from time_tracker.time_entries import _infrastructure as infrastructure_time_entries +from time_tracker.utils.enums import ResponseEnums TIME_ENTRY_URL = "/api/time-entries/" @@ -208,3 +209,42 @@ def test__update_time_entries_azure_endpoint__returns_a_status_code_400__when_ti assert response.status_code == 400 assert response.get_body() == b'Incorrect time entry body' + + +def test__get_latest_entries_azure_endpoint__returns_a_list_of_latest_time_entries__when_an_owner_id_match( + test_db, time_entry_factory, insert_time_entry, insert_activity, activity_factory, +): + inserted_activity = insert_activity(activity_factory(), test_db).__dict__ + time_entry_body = time_entry_factory(activity_id=inserted_activity["id"], technologies="[jira,sql]") + inserted_time_entry = insert_time_entry(time_entry_body, test_db).__dict__ + + req = func.HttpRequest( + method='GET', + body=None, + url=TIME_ENTRY_URL+"latest/", + params={"owner_id": inserted_time_entry["owner_id"]}, + ) + + response = azure_time_entries._get_latest_entries.get_latest_entries(req) + time_entry_json_data = json.loads(response.get_body().decode("utf-8")) + + assert response.status_code == HTTPStatus.OK + assert time_entry_json_data == [inserted_time_entry] + + +def test__get_latest_entries_azure_endpoint__returns_not_found__when_recieve_an_invalid_owner_id( + test_db, insert_activity, activity_factory, +): + insert_activity(activity_factory(), test_db) + + req = func.HttpRequest( + method='GET', + body=None, + url=TIME_ENTRY_URL+"latest/", + params={"owner_id": Faker().pyint()}, + ) + + response = azure_time_entries._get_latest_entries.get_latest_entries(req) + + assert response.status_code == HTTPStatus.NOT_FOUND + assert response.get_body().decode("utf-8") == ResponseEnums.NOT_FOUND.value diff --git a/V2/tests/integration/daos/time_entries_dao_test.py b/V2/tests/integration/daos/time_entries_dao_test.py index e78af556..e48241cc 100644 --- a/V2/tests/integration/daos/time_entries_dao_test.py +++ b/V2/tests/integration/daos/time_entries_dao_test.py @@ -90,7 +90,7 @@ def test_update__returns_an_time_entry_dto__when_found_one_time_entry_to_update( def test_update__returns_none__when_doesnt_found_one_time_entry_to_update( - test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory ): dao = create_fake_dao(test_db) inserted_activity = insert_activity(activity_factory(), dao.db) @@ -153,3 +153,29 @@ def test__get_by_id__returns_none__when_no_time_entry_matches_by_id( time_entry = dao.get_by_id(Faker().pyint()) assert time_entry is None + + +def test_get_latest_entries__returns_a_list_of_latest_time_entries__when_an_owner_id_match( + create_fake_dao, time_entry_factory, insert_activity, activity_factory, test_db +): + dao = create_fake_dao(test_db) + inserted_activity = insert_activity(activity_factory(), dao.db) + time_entry_to_insert = time_entry_factory( + activity_id=inserted_activity.id, + technologies="[jira,sql]") + inserted_time_entry = dao.create(time_entry_to_insert) + + result = dao.get_latest_entries(int(inserted_time_entry.owner_id)) + + assert result == [inserted_time_entry.__dict__] + + +def test_get_latest_entries__returns_none__when_an_owner_id_is_not_found( + create_fake_dao, test_db, insert_activity, activity_factory +): + dao = create_fake_dao(test_db) + insert_activity(activity_factory(), dao.db) + + result = dao.get_latest_entries(Faker().pyint()) + + assert result is None diff --git a/V2/tests/unit/services/time_entry_service_test.py b/V2/tests/unit/services/time_entry_service_test.py index 1992324f..d1596471 100644 --- a/V2/tests/unit/services/time_entry_service_test.py +++ b/V2/tests/unit/services/time_entry_service_test.py @@ -72,3 +72,18 @@ def test__get_by_id__uses_the_time_entry_dao__to_retrieve_one_time_entry(mocker) assert time_entry_dao.get_by_id.called assert expected_time_entry == actual_time_entry + + +def test__get_latest_entries__uses_the_time_entry_dao__to_get_last_entries( + mocker, +): + expected_latest_time_entries = mocker.Mock() + time_entry_dao = mocker.Mock( + get_latest_entries=mocker.Mock(return_value=expected_latest_time_entries) + ) + + time_entry_service = TimeEntryService(time_entry_dao) + latest_time_entries = time_entry_service.get_latest_entries(Faker().pyint(), Faker().pyint()) + + assert expected_latest_time_entries == latest_time_entries + assert time_entry_dao.get_latest_entries.called diff --git a/V2/tests/unit/use_cases/time_entries_use_case_test.py b/V2/tests/unit/use_cases/time_entries_use_case_test.py index 05937789..f96666cb 100644 --- a/V2/tests/unit/use_cases/time_entries_use_case_test.py +++ b/V2/tests/unit/use_cases/time_entries_use_case_test.py @@ -75,3 +75,16 @@ def test__get_time_entry_by_id_function__uses_the_time_entry_service__to_retriev assert time_entry_service.get_by_id.called assert expected_time_entries == actual_time_entry + + +def test__get_latest_entries_function__uses_the_time_entry_service__to_get_last_entries( + mocker: MockFixture, +): + expected_latest_time_entries = mocker.Mock() + time_entry_service = mocker.Mock(get_latest_entries=mocker.Mock(return_value=expected_latest_time_entries)) + + time_entry_use_case = _use_cases.GetLastestTimeEntryUseCase(time_entry_service) + latest_time_entries = time_entry_use_case.get_latest_entries(Faker().pyint(), Faker().pyint()) + + assert time_entry_service.get_latest_entries.called + assert expected_latest_time_entries == latest_time_entries diff --git a/V2/time_tracker/time_entries/_application/__init__.py b/V2/time_tracker/time_entries/_application/__init__.py index eb817c22..4d003056 100644 --- a/V2/time_tracker/time_entries/_application/__init__.py +++ b/V2/time_tracker/time_entries/_application/__init__.py @@ -3,3 +3,4 @@ from ._time_entries import delete_time_entry from ._time_entries import update_time_entry from ._time_entries import get_time_entries +from ._time_entries import get_latest_entries diff --git a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py index 382fbbe4..29631650 100644 --- a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py +++ b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py @@ -3,3 +3,4 @@ from ._delete_time_entry import delete_time_entry from ._update_time_entry import update_time_entry from ._get_time_entries import get_time_entries +from ._get_latest_entries import get_latest_entries diff --git a/V2/time_tracker/time_entries/_application/_time_entries/_get_latest_entries.py b/V2/time_tracker/time_entries/_application/_time_entries/_get_latest_entries.py new file mode 100644 index 00000000..b813fb4f --- /dev/null +++ b/V2/time_tracker/time_entries/_application/_time_entries/_get_latest_entries.py @@ -0,0 +1,49 @@ +import json +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB +from time_tracker.utils.enums import ResponseEnums + + +def get_latest_entries(req: func.HttpRequest) -> func.HttpResponse: + database = DB() + time_entry_dao = _infrastructure.TimeEntriesSQLDao(database) + time_entry_service = _domain.TimeEntryService(time_entry_dao) + use_case = _domain._use_cases.GetLastestTimeEntryUseCase(time_entry_service) + + try: + owner_id = req.params.get("owner_id") + limit = req.params.get("limit") + + if not owner_id: + return func.HttpResponse( + body=ResponseEnums.NOT_FOUND.value, + status_code=HTTPStatus.NOT_FOUND, + mimetype=ResponseEnums.MIME_TYPE.value, + ) + + time_entries = use_case.get_latest_entries(int(owner_id), int(limit) if limit and int(limit) > 0 else None) + + if not time_entries or len(time_entries) == 0: + return func.HttpResponse( + body=ResponseEnums.NOT_FOUND.value, + status_code=HTTPStatus.NOT_FOUND, + mimetype=ResponseEnums.MIME_TYPE.value, + ) + + return func.HttpResponse( + body=json.dumps(time_entries, default=str), + status_code=HTTPStatus.OK, + mimetype=ResponseEnums.MIME_TYPE.value, + ) + + except ValueError: + return func.HttpResponse( + body=ResponseEnums.INVALID_ID.value, + status_code=HTTPStatus.BAD_REQUEST, + mimetype=ResponseEnums.MIME_TYPE.value, + ) diff --git a/V2/time_tracker/time_entries/_domain/__init__.py b/V2/time_tracker/time_entries/_domain/__init__.py index 2034f8d3..513877bf 100644 --- a/V2/time_tracker/time_entries/_domain/__init__.py +++ b/V2/time_tracker/time_entries/_domain/__init__.py @@ -7,5 +7,6 @@ DeleteTimeEntryUseCase, UpdateTimeEntryUseCase, GetTimeEntriesUseCase, - GetTimeEntryUseCase + GetTimeEntryUseCase, + GetLastestTimeEntryUseCase, ) diff --git a/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py b/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py index ca4ceb98..59b9d975 100644 --- a/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py +++ b/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py @@ -17,9 +17,14 @@ def delete(self, id: int) -> TimeEntry: def update(self, id: int, new_time_entry: dict) -> TimeEntry: pass + @abc.abstractmethod def get_by_id(self, id: int) -> TimeEntry: pass @abc.abstractmethod def get_all(self) -> typing.List[TimeEntry]: pass + + @abc.abstractmethod + def get_latest_entries(self, owner_id: int, limit: int) -> typing.List[TimeEntry]: + pass diff --git a/V2/time_tracker/time_entries/_domain/_services/_time_entry.py b/V2/time_tracker/time_entries/_domain/_services/_time_entry.py index 5b3f4115..0c2b8b9b 100644 --- a/V2/time_tracker/time_entries/_domain/_services/_time_entry.py +++ b/V2/time_tracker/time_entries/_domain/_services/_time_entry.py @@ -21,3 +21,6 @@ def get_by_id(self, id: int) -> TimeEntry: def get_all(self) -> typing.List[TimeEntry]: return self.time_entry_dao.get_all() + + def get_latest_entries(self, owner_id: int, limit: int) -> typing.List[TimeEntry]: + return self.time_entry_dao.get_latest_entries(owner_id, limit) diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py index fdd1258d..055cd850 100644 --- a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py +++ b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py @@ -4,3 +4,4 @@ from ._update_time_entry_use_case import UpdateTimeEntryUseCase from ._get_time_entry_use_case import GetTimeEntriesUseCase from ._get_time_entry_by_id_use_case import GetTimeEntryUseCase +from ._get_latest_entries_use_case import GetLastestTimeEntryUseCase diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/_get_latest_entries_use_case.py b/V2/time_tracker/time_entries/_domain/_use_cases/_get_latest_entries_use_case.py new file mode 100644 index 00000000..c070f8ac --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_use_cases/_get_latest_entries_use_case.py @@ -0,0 +1,11 @@ +from time_tracker.time_entries._domain import TimeEntry, TimeEntryService +import typing + + +class GetLastestTimeEntryUseCase: + + def __init__(self, time_entry_service: TimeEntryService): + self.time_entry_service = time_entry_service + + def get_latest_entries(self, owner_id: int, limit: int) -> typing.List[TimeEntry]: + return self.time_entry_service.get_latest_entries(owner_id, limit) diff --git a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py index 9e7016d4..5d368e26 100644 --- a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py +++ b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py @@ -90,3 +90,17 @@ def __create_time_entry_dto(self, time_entry: dict) -> domain.TimeEntry: "end_date": str(time_entry.get("end_date"))}) time_entry = {key: time_entry.get(key) for key in self.time_entry_key} return domain.TimeEntry(**time_entry) + + def get_latest_entries(self, owner_id: int, limit: int = 20) -> typing.List[domain.TimeEntry]: + query = ( + self.time_entry.select() + .where(sqlalchemy.and_( + self.time_entry.c.owner_id == owner_id, + self.time_entry.c.deleted.is_(False) + )) + .order_by(self.time_entry.c.start_date.desc()) + .limit(limit) + ) + time_entries_data = self.db.get_session().execute(query) + list_time_entries = [dict(entry) for entry in time_entries_data] + return list_time_entries if len(list_time_entries) > 0 else None diff --git a/V2/time_tracker/time_entries/interface.py b/V2/time_tracker/time_entries/interface.py index 8873b93d..87876204 100644 --- a/V2/time_tracker/time_entries/interface.py +++ b/V2/time_tracker/time_entries/interface.py @@ -3,3 +3,4 @@ from ._application import delete_time_entry from ._application import update_time_entry from ._application import get_time_entries +from ._application import get_latest_entries From 1db51d68824a429730fedd1cbf58ee334ee00fa0 Mon Sep 17 00:00:00 2001 From: mandres2015 <32377408+mandres2015@users.noreply.github.com> Date: Mon, 29 Nov 2021 10:32:24 -0500 Subject: [PATCH 22/33] feat: TT-418 crud customer v2 (#361) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: TT-418 create added * feat: TT-418 create tests added * feat: TT-418 create get_all, get_by_id and delete methods * feat: TT-418 solve conflicts after add tests * feat: TT-418 crud and tests added * fix: TT-418 renamed tests and problems solved * fix: TT-418 code smell resolved * fix: TT-418 serverless corrected and github files retored Co-authored-by: Gabriel Cobeña --- ...me-tracker-v1-on-pull-request-workflow.yml | 4 +- .../time-tracker-v1-on-push-workflow.yml | 4 +- V2/serverless.yml | 51 ++++++ .../azure/customer_azure_endpoints_test.py | 173 +++++++++++++++++- V2/tests/fixtures.py | 2 +- .../integration/daos/customers_dao_test.py | 106 ++++++++++- .../unit/services/customer_service_test.py | 56 ++++++ .../unit/use_cases/customers_use_case_test.py | 59 ++++++ .../customers/_application/__init__.py | 5 +- .../_application/_customers/__init__.py | 5 +- .../_customers/_create_customer.py | 19 +- .../_customers/_delete_customer.py | 39 ++++ .../_application/_customers/_get_customers.py | 55 ++++++ .../_customers/_update_customer.py | 53 ++++++ V2/time_tracker/customers/_domain/__init__.py | 4 + .../_persistence_contracts/_customers_dao.py | 17 ++ .../customers/_domain/_services/_customer.py | 14 ++ .../customers/_domain/_use_cases/__init__.py | 6 +- .../_use_cases/_delete_customer_use_case.py | 10 + .../_use_cases/_get_all_customer_use_case.py | 12 ++ .../_get_by_id_customer_use_case.py | 10 + .../_use_cases/_update_customer_use_case.py | 10 + .../_data_persistence/_customer_dao.py | 39 ++++ V2/time_tracker/customers/interface.py | 3 + 24 files changed, 731 insertions(+), 25 deletions(-) create mode 100644 V2/time_tracker/customers/_application/_customers/_delete_customer.py create mode 100644 V2/time_tracker/customers/_application/_customers/_get_customers.py create mode 100644 V2/time_tracker/customers/_application/_customers/_update_customer.py create mode 100644 V2/time_tracker/customers/_domain/_use_cases/_delete_customer_use_case.py create mode 100644 V2/time_tracker/customers/_domain/_use_cases/_get_all_customer_use_case.py create mode 100644 V2/time_tracker/customers/_domain/_use_cases/_get_by_id_customer_use_case.py create mode 100644 V2/time_tracker/customers/_domain/_use_cases/_update_customer_use_case.py diff --git a/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml b/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml index c35be604..766f09bf 100644 --- a/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml +++ b/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml @@ -26,7 +26,6 @@ jobs: python -m pip install --upgrade pip pip install -r requirements/time_tracker_api/dev.txt pip install -r requirements/time_tracker_events/dev.txt - - name: Login to azure uses: Azure/login@v1 with: @@ -54,7 +53,6 @@ jobs: AZURE_STORAGE_ACCOUNT_KEY: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-ACCOUNT-KEY }} run: | pytest tests - - name: Test the build of the app run: | - docker build . + docker build . \ No newline at end of file diff --git a/.github/workflows/time-tracker-v1-on-push-workflow.yml b/.github/workflows/time-tracker-v1-on-push-workflow.yml index 152998b4..095712b9 100644 --- a/.github/workflows/time-tracker-v1-on-push-workflow.yml +++ b/.github/workflows/time-tracker-v1-on-push-workflow.yml @@ -26,7 +26,6 @@ jobs: python -m pip install --upgrade pip pip install -r requirements/time_tracker_api/dev.txt pip install -r requirements/time_tracker_events/dev.txt - - name: Login to azure uses: Azure/login@v1 with: @@ -54,7 +53,6 @@ jobs: AZURE_STORAGE_ACCOUNT_KEY: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-ACCOUNT-KEY }} run: | pytest tests - - name: Login to docker registry uses: azure/docker-login@v1 with: @@ -64,4 +62,4 @@ jobs: - name: Build and push image run: | docker build . -t ${{ secrets.REGISTRY_LOGIN_SERVER }}/timetrackerapi:${{ github.sha }} - docker push ${{ secrets.REGISTRY_LOGIN_SERVER }}/timetrackerapi:${{ github.sha }} + docker push ${{ secrets.REGISTRY_LOGIN_SERVER }}/timetrackerapi:${{ github.sha }} \ No newline at end of file diff --git a/V2/serverless.yml b/V2/serverless.yml index 9b31ee0b..f7ae5fd7 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -36,7 +36,12 @@ package: - '!.gitignore' - '!.git/**' +#region start Functions + functions: + +#region Start Functions Activities + get_activities: handler: time_tracker/activities/interface.get_activities events: @@ -77,6 +82,10 @@ functions: route: activities/ authLevel: anonymous +#endregion End Functions Activities + +#region Start Functions Time-Entries + create_time_entry: handler: time_tracker/time_entries/interface.create_time_entry events: @@ -127,6 +136,10 @@ functions: route: time-entries/latest/ authLevel: anonymous +#endregion End Functions Time-Entries + +#region Start Functions Customers + create_customer: handler: time_tracker/customers/interface.create_customer events: @@ -137,6 +150,40 @@ functions: route: customers/ authLevel: anonymous + get_customers: + handler: time_tracker/customers/interface.get_customers + events: + - http: true + x-azure-settings: + methods: + - GET + route: customers/{id:?} + authLevel: anonymous + + update_customer: + handler: time_tracker/customers/interface.update_customer + events: + - http: true + x-azure-settings: + methods: + - PUT + route: customers/{id} + authLevel: anonymous + + delete_customer: + handler: time_tracker/customers/interface.delete_customer + events: + - http: true + x-azure-settings: + methods: + - DELETE + route: customers/{id} + authLevel: anonymous + +#endregion End Functions Customers + +#region Start Functions Projects + get_projects: handler: time_tracker/projects/interface.get_projects events: @@ -177,3 +224,7 @@ functions: route: projects/ authLevel: anonymous + +#endregion End Functions Projects + +#endregion End Functions \ No newline at end of file diff --git a/V2/tests/api/azure/customer_azure_endpoints_test.py b/V2/tests/api/azure/customer_azure_endpoints_test.py index 47a619d5..f1f35d4f 100644 --- a/V2/tests/api/azure/customer_azure_endpoints_test.py +++ b/V2/tests/api/azure/customer_azure_endpoints_test.py @@ -1,3 +1,4 @@ +from http import HTTPStatus import json from faker import Faker @@ -8,7 +9,7 @@ CUSTOMER_URL = "/api/customers/" -def test__customer_azure_endpoint__creates_a_customer__when_customer_has_all_necesary_attributes( +def test__create_customer_azure_endpoint__creates_a_customer__when_customer_has_all_necesary_attributes( customer_factory ): customer_body = customer_factory().__dict__ @@ -24,11 +25,11 @@ def test__customer_azure_endpoint__creates_a_customer__when_customer_has_all_nec customer_json_data = json.loads(response.get_body()) customer_body['id'] = customer_json_data['id'] - assert response.status_code == 201 + assert response.status_code == HTTPStatus.CREATED assert customer_json_data == customer_body -def test__customer_azure_endpoint__returns_a_status_400__when_dont_recieve_all_necessary_attributes(): +def test__create_customer_azure_endpoint__returns_a_status_400__when_dont_recieve_all_necessary_attributes(): customer_to_insert = { "id": None, "name": Faker().user_name(), @@ -45,5 +46,169 @@ def test__customer_azure_endpoint__returns_a_status_400__when_dont_recieve_all_n response = azure_customers._create_customer.create_customer(req) - assert response.status_code == 400 + assert response.status_code == HTTPStatus.BAD_REQUEST assert response.get_body() == b'Invalid format or structure of the attributes of the customer' + + +def test__delete_customer_azure_endpoint__returns_a_customer_with_true_deleted__when_its_id_is_found( + test_db, customer_factory, insert_customer +): + customer_preinsert = customer_factory() + inserted_customer = insert_customer(customer_preinsert, test_db).__dict__ + + req = func.HttpRequest( + method='DELETE', + body=None, + url=CUSTOMER_URL, + route_params={"id": inserted_customer["id"]}, + ) + + response = azure_customers._delete_customer.delete_customer(req) + customer_json_data = json.loads(response.get_body().decode("utf-8")) + + assert response.status_code == HTTPStatus.OK + assert customer_json_data['deleted'] is True + + +def test__delete_customer_azure_endpoint__returns_not_found__when_its_id_is_not_found(): + req = func.HttpRequest( + method='DELETE', + body=None, + url=CUSTOMER_URL, + route_params={"id": Faker().pyint()}, + ) + + response = azure_customers._delete_customer.delete_customer(req) + + assert response.status_code == HTTPStatus.NOT_FOUND + assert response.get_body() == b'Not found' + + +def test__update_customer_azure_endpoint__returns_an_updated_customer__when_customer_has_all_necesary_attributes( + test_db, customer_factory, insert_customer +): + existent_customer = customer_factory() + inserted_customer = insert_customer(existent_customer, test_db).__dict__ + + inserted_customer["description"] = Faker().sentence() + + body = json.dumps(inserted_customer).encode("utf-8") + req = func.HttpRequest( + method='PUT', + body=body, + url=CUSTOMER_URL, + route_params={"id": inserted_customer["id"]}, + ) + + response = azure_customers._update_customer.update_customer(req) + customer_json_data = json.loads(response.get_body()) + + assert response.status_code == HTTPStatus.OK + assert customer_json_data == inserted_customer + + +def test__update_customer_azure_endpoint__returns_update_a_customer__when_customer_has_all_necesary_attributes( + customer_factory +): + existent_customer = customer_factory().__dict__ + + body = json.dumps(existent_customer).encode("utf-8") + req = func.HttpRequest( + method='PUT', + body=body, + url=CUSTOMER_URL, + route_params={"id": Faker().pyint()}, + ) + + response = azure_customers._update_customer.update_customer(req) + + assert response.status_code == HTTPStatus.CONFLICT + assert response.get_body() == b'This customer does not exist or is duplicated' + + +def test__update_customer_azure_endpoint__returns_invalid_format__when_customer_doesnt_have_all_necesary_attributes( + customer_factory, insert_customer, test_db +): + existent_customer = customer_factory() + inserted_customer = insert_customer(existent_customer, test_db).__dict__ + + inserted_customer.pop("name") + + body = json.dumps(inserted_customer).encode("utf-8") + req = func.HttpRequest( + method='PUT', + body=body, + url=CUSTOMER_URL, + route_params={"id": inserted_customer["id"]}, + ) + + response = azure_customers._update_customer.update_customer(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b'Invalid format or structure of the attributes of the customer' + + +def test__delete_customers_azure_endpoint__returns_a_status_code_400__when_customer_recive_invalid_id( +): + req = func.HttpRequest( + method="DELETE", + body=None, + url=CUSTOMER_URL, + route_params={"id": "invalid id"}, + ) + + response = azure_customers._delete_customer.delete_customer(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b'Invalid Format ID' + + +def test__customers_azure_endpoint__returns_all_customers( + test_db, customer_factory, insert_customer +): + customer_to_insert = customer_factory() + + inserted_customer = insert_customer(customer_to_insert, test_db).__dict__ + + req = func.HttpRequest(method='GET', body=None, url=CUSTOMER_URL) + response = azure_customers._get_customers.get_customers(req) + customers_json_data = response.get_body().decode("utf-8") + customer_list = json.loads(customers_json_data) + + assert response.status_code == HTTPStatus.OK + assert customers_json_data <= json.dumps(inserted_customer) + assert customer_list.pop() == inserted_customer + + +def test__customer_azure_endpoint__returns_a_customer__when_customer_matches_its_id( + test_db, customer_factory, insert_customer +): + existent_customer = customer_factory() + inserted_customer = insert_customer(existent_customer, test_db).__dict__ + + req = func.HttpRequest( + method='GET', + body=None, + url=CUSTOMER_URL, + route_params={"id": inserted_customer["id"]}, + ) + + response = azure_customers._get_customers.get_customers(req) + customer_json_data = response.get_body().decode("utf-8") + + assert response.status_code == HTTPStatus.OK + assert customer_json_data == json.dumps(inserted_customer) + + +def test__customer_azure_endpoint__returns_invalid_id__when_customer_not_matches_its_id(): + req = func.HttpRequest( + method='GET', + body=None, + url=CUSTOMER_URL, + route_params={"id": "Invalid ID"}, + ) + + response = azure_customers._get_customers.get_customers(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b'The id has an invalid format' diff --git a/V2/tests/fixtures.py b/V2/tests/fixtures.py index 2eae7b16..e4e52fb3 100644 --- a/V2/tests/fixtures.py +++ b/V2/tests/fixtures.py @@ -2,9 +2,9 @@ from faker import Faker import time_tracker.activities._domain as activities_domain -import time_tracker.activities._infrastructure as activities_infrastructure import time_tracker.time_entries._domain as time_entries_domain import time_tracker.customers._domain as customers_domain +import time_tracker.activities._infrastructure as activities_infrastructure import time_tracker.customers._infrastructure as customers_infrastructure import time_tracker.projects._domain as projects_domain from time_tracker._infrastructure import DB diff --git a/V2/tests/integration/daos/customers_dao_test.py b/V2/tests/integration/daos/customers_dao_test.py index b85cd3e3..496aaf47 100644 --- a/V2/tests/integration/daos/customers_dao_test.py +++ b/V2/tests/integration/daos/customers_dao_test.py @@ -1,4 +1,7 @@ +import typing + import pytest +from faker import Faker import time_tracker.customers._domain as domain import time_tracker.customers._infrastructure as infrastructure @@ -22,7 +25,7 @@ def _clean_database(): dao.db.get_session().execute(query) -def test__customer_dao__returns_a_customer_dto__when_saves_correctly_with_sql_database( +def test__create_customer_dao__returns_a_customer_dto__when_saves_correctly_with_sql_database( test_db, customer_factory, create_fake_dao ): dao = create_fake_dao(test_db) @@ -33,3 +36,104 @@ def test__customer_dao__returns_a_customer_dto__when_saves_correctly_with_sql_da assert isinstance(inserted_customer, domain.Customer) assert inserted_customer == customer_to_insert + + +def test__get_all__returns_a_list_of_customer_dto_objects__when_one_or_more_customers_are_found_with_sql_database( + test_db, create_fake_dao, customer_factory, insert_customer +): + dao = create_fake_dao(test_db) + customer_to_insert = customer_factory() + inserted_customer = [dao.create(customer_to_insert)] + + customers = dao.get_all() + + assert isinstance(customers, typing.List) + assert customers == inserted_customer + + +def test_get_by_id__returns_a_customer_dto__when_found_one_customer_that_matches_its_id_with_sql_database( + test_db, create_fake_dao, customer_factory, insert_customer +): + dao = create_fake_dao(test_db) + existent_customer = customer_factory() + inserted_customer = insert_customer(existent_customer, dao.db) + + customer = dao.get_by_id(inserted_customer.id) + + assert isinstance(customer, domain.Customer) + assert customer.id == inserted_customer.id + assert customer == inserted_customer + + +def test__get_by_id__returns_none__when_no_customer_matches_its_id_with_sql_database( + test_db, create_fake_dao, customer_factory +): + dao = create_fake_dao(test_db) + existent_customer = customer_factory() + + customer = dao.get_by_id(existent_customer.id) + + assert customer is None + + +def test_get_all__returns_an_empty_list__when_doesnt_found_any_customers_with_sql_database( + test_db, create_fake_dao +): + customers = create_fake_dao(test_db).get_all() + + assert isinstance(customers, typing.List) + assert customers == [] + + +def test_delete__returns_a_customer_with_inactive_status__when_a_customer_matching_its_id_is_found_with_sql_database( + test_db, create_fake_dao, customer_factory, insert_customer +): + dao = create_fake_dao(test_db) + existent_customer = customer_factory() + inserted_customer = insert_customer(existent_customer, dao.db) + + customer = dao.delete(inserted_customer.id) + + assert isinstance(customer, domain.Customer) + assert customer.id == inserted_customer.id + assert customer.status == 1 + assert customer.deleted is True + + +def test_delete__returns_none__when_no_customer_matching_its_id_is_found_with_sql_database( + test_db, create_fake_dao, customer_factory +): + dao = create_fake_dao(test_db) + existent_customer = customer_factory() + + results = dao.delete(existent_customer.id) + + assert results is None + + +def test__update_customer_dao__returns_an_updated_customer_dto__when_updates_correctly_with_sql_database( + test_db, customer_factory, create_fake_dao, insert_customer +): + dao = create_fake_dao(test_db) + + existent_customer = customer_factory() + inserted_customer = insert_customer(existent_customer, dao.db).__dict__ + + inserted_customer["description"] = Faker().sentence() + + updated_customer = dao.update(inserted_customer["id"], domain.Customer(**inserted_customer)) + + assert isinstance(updated_customer, domain.Customer) + assert updated_customer.description == inserted_customer["description"] + assert updated_customer.__dict__ == inserted_customer + + +def test__update_customer_dao__returns_none__when_an_incorrect_id_is_passed( + test_db, customer_factory, create_fake_dao, insert_customer +): + dao = create_fake_dao(test_db) + existent_customer = customer_factory() + + updated_customer = dao.update(Faker().pyint(), existent_customer) + + assert updated_customer is None diff --git a/V2/tests/unit/services/customer_service_test.py b/V2/tests/unit/services/customer_service_test.py index bb25070f..776d18ee 100644 --- a/V2/tests/unit/services/customer_service_test.py +++ b/V2/tests/unit/services/customer_service_test.py @@ -1,3 +1,5 @@ +from faker import Faker + from time_tracker.customers._domain import CustomerService @@ -12,3 +14,57 @@ def test__create_customer__uses_the_customer_dao__to_create_a_customer(mocker, c assert customer_dao.create.called assert expected_customer == new_customer + + +def test__delete_customer__uses_the_customer_dao__to_delete_customer_selected( + mocker, +): + expected_customer = mocker.Mock() + customer_dao = mocker.Mock( + delete=mocker.Mock(return_value=expected_customer) + ) + + customer_service = CustomerService(customer_dao) + deleted_customer = customer_service.delete(Faker().pyint()) + + assert customer_dao.delete.called + assert expected_customer == deleted_customer + + +def test__get_all__uses_the_customer_dao__to_retrieve_customers(mocker): + expected_customers = mocker.Mock() + customer_dao = mocker.Mock( + get_all=mocker.Mock(return_value=expected_customers) + ) + customer_service = CustomerService(customer_dao) + + actual_customers = customer_service.get_all() + + assert customer_dao.get_all.called + assert expected_customers == actual_customers + + +def test__get_by_id__uses_the_customer_dao__to_retrieve_one_customer(mocker): + expected_customer = mocker.Mock() + customer_dao = mocker.Mock( + get_by_id=mocker.Mock(return_value=expected_customer) + ) + customer_service = CustomerService(customer_dao) + + actual_customer = customer_service.get_by_id(Faker().pyint()) + + assert customer_dao.get_by_id.called + assert expected_customer == actual_customer + + +def test__update_customer__uses_the_customer_dao__to_update_a_customer(mocker, customer_factory): + expected_customer = mocker.Mock() + customer_dao = mocker.Mock( + update=mocker.Mock(return_value=expected_customer) + ) + customer_service = CustomerService(customer_dao) + + updated_customer = customer_service.update(Faker().pyint(), customer_factory()) + + assert customer_dao.update.called + assert expected_customer == updated_customer diff --git a/V2/tests/unit/use_cases/customers_use_case_test.py b/V2/tests/unit/use_cases/customers_use_case_test.py index 3b8566a9..63e03081 100644 --- a/V2/tests/unit/use_cases/customers_use_case_test.py +++ b/V2/tests/unit/use_cases/customers_use_case_test.py @@ -1,4 +1,5 @@ from pytest_mock import MockFixture +from faker import Faker from time_tracker.customers._domain import _use_cases @@ -16,3 +17,61 @@ def test__create_customer_function__uses_the_customer_service__to_create_a_custo assert customer_service.create.called assert expected_customer == new_customer + + +def test__delete_customer_function__uses_the_customer_service__to_delete_customer_selected( + mocker: MockFixture, +): + expected_customer = mocker.Mock() + customer_service = mocker.Mock(delete=mocker.Mock(return_value=expected_customer)) + + customer_use_case = _use_cases.DeleteCustomerUseCase(customer_service) + deleted_customer = customer_use_case.delete_customer(Faker().pyint()) + + assert customer_service.delete.called + assert expected_customer == deleted_customer + + +def test__get_list_customers_function__uses_the_customer_service__to_retrieve_customers( + mocker: MockFixture, +): + expected_customers = mocker.Mock() + customer_service = mocker.Mock( + get_all=mocker.Mock(return_value=expected_customers) + ) + + customers_use_case = _use_cases.GetAllCustomerUseCase(customer_service) + actual_customers = customers_use_case.get_all_customer() + + assert customer_service.get_all.called + assert expected_customers == actual_customers + + +def test__get_customer_by_id_function__uses_the_customer_service__to_retrieve_customer( + mocker: MockFixture, +): + expected_customer = mocker.Mock() + customer_service = mocker.Mock( + get_by_id=mocker.Mock(return_value=expected_customer) + ) + + customer_use_case = _use_cases.GetByIdCustomerUseCase(customer_service) + actual_customer = customer_use_case.get_customer_by_id(Faker().pyint()) + + assert customer_service.get_by_id.called + assert expected_customer == actual_customer + + +def test__update_customer_function__uses_the_customer_service__to_update_a_customer( + mocker: MockFixture, customer_factory +): + expected_customer = mocker.Mock() + customer_service = mocker.Mock( + update=mocker.Mock(return_value=expected_customer) + ) + + customer_use_case = _use_cases.UpdateCustomerUseCase(customer_service) + updated_customer = customer_use_case.update_customer(Faker().pyint(), customer_factory()) + + assert customer_service.update.called + assert expected_customer == updated_customer diff --git a/V2/time_tracker/customers/_application/__init__.py b/V2/time_tracker/customers/_application/__init__.py index db2c2c15..d9ba1676 100644 --- a/V2/time_tracker/customers/_application/__init__.py +++ b/V2/time_tracker/customers/_application/__init__.py @@ -1,2 +1,5 @@ # flake8: noqa -from ._customers import create_customer \ No newline at end of file +from ._customers import create_customer +from ._customers import get_customers +from ._customers import delete_customer +from ._customers import update_customer diff --git a/V2/time_tracker/customers/_application/_customers/__init__.py b/V2/time_tracker/customers/_application/_customers/__init__.py index bf1f8460..b07840ce 100644 --- a/V2/time_tracker/customers/_application/_customers/__init__.py +++ b/V2/time_tracker/customers/_application/_customers/__init__.py @@ -1,2 +1,5 @@ # flake8: noqa -from ._create_customer import create_customer \ No newline at end of file +from ._create_customer import create_customer +from ._get_customers import get_customers +from ._delete_customer import delete_customer +from ._update_customer import update_customer diff --git a/V2/time_tracker/customers/_application/_customers/_create_customer.py b/V2/time_tracker/customers/_application/_customers/_create_customer.py index 919c34cb..48e39dc3 100644 --- a/V2/time_tracker/customers/_application/_customers/_create_customer.py +++ b/V2/time_tracker/customers/_application/_customers/_create_customer.py @@ -1,6 +1,7 @@ import dataclasses import json import typing +from http import HTTPStatus import azure.functions as func @@ -17,8 +18,8 @@ def create_customer(req: func.HttpRequest) -> func.HttpResponse: use_case = _domain._use_cases.CreateCustomerUseCase(customer_service) customer_data = req.get_json() - customer_is_valid = _validate_customer(customer_data) - if not customer_is_valid: + customer_is_invalid = _validate_customer(customer_data) + if customer_is_invalid: raise ValueError customer_to_create = _domain.Customer( @@ -32,10 +33,10 @@ def create_customer(req: func.HttpRequest) -> func.HttpResponse: if created_customer: body = json.dumps(created_customer.__dict__) - status_code = 201 + status_code = HTTPStatus.CREATED else: body = b'This customer already exists' - status_code = 409 + status_code = HTTPStatus.CONFLICT return func.HttpResponse( body=body, @@ -45,13 +46,11 @@ def create_customer(req: func.HttpRequest) -> func.HttpResponse: except ValueError: return func.HttpResponse( body=b'Invalid format or structure of the attributes of the customer', - status_code=400, + status_code=HTTPStatus.BAD_REQUEST, mimetype="application/json" ) -def _validate_customer(customer_data: dict) -> bool: - if [field.name for field in dataclasses.fields(_domain.Customer) - if (field.name not in customer_data) and (field.type != typing.Optional[field.type])]: - return False - return True +def _validate_customer(customer_data: dict) -> typing.List[str]: + return [field.name for field in dataclasses.fields(_domain.Customer) + if (field.name not in customer_data) and (field.type != typing.Optional[field.type])] diff --git a/V2/time_tracker/customers/_application/_customers/_delete_customer.py b/V2/time_tracker/customers/_application/_customers/_delete_customer.py new file mode 100644 index 00000000..41fc3464 --- /dev/null +++ b/V2/time_tracker/customers/_application/_customers/_delete_customer.py @@ -0,0 +1,39 @@ +import json +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + +DATATYPE = "application/json" + + +def delete_customer(req: func.HttpRequest) -> func.HttpResponse: + customer_dao = _infrastructure.CustomersSQLDao(DB()) + customer_service = _domain.CustomerService(customer_dao) + use_case = _domain._use_cases.DeleteCustomerUseCase(customer_service) + + try: + customer_id = int(req.route_params.get("id")) + deleted_customer = use_case.delete_customer(customer_id) + if not deleted_customer: + return func.HttpResponse( + body="Not found", + status_code=HTTPStatus.NOT_FOUND, + mimetype=DATATYPE + ) + + return func.HttpResponse( + body=json.dumps(deleted_customer.__dict__, default=str), + status_code=HTTPStatus.OK, + mimetype=DATATYPE, + ) + + except ValueError: + return func.HttpResponse( + body=b"Invalid Format ID", + status_code=HTTPStatus.BAD_REQUEST, + mimetype=DATATYPE + ) diff --git a/V2/time_tracker/customers/_application/_customers/_get_customers.py b/V2/time_tracker/customers/_application/_customers/_get_customers.py new file mode 100644 index 00000000..8cb9635f --- /dev/null +++ b/V2/time_tracker/customers/_application/_customers/_get_customers.py @@ -0,0 +1,55 @@ +from http import HTTPStatus +import json + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + + +def get_customers(req: func.HttpRequest) -> func.HttpResponse: + customer_id = req.route_params.get('id') + status_code = HTTPStatus.OK + + try: + if customer_id: + response = _get_by_id(int(customer_id)) + if response == b'This customer does not exist': + status_code = HTTPStatus.NOT_FOUND + else: + response = _get_all() + + return func.HttpResponse( + body=response, status_code=status_code, mimetype="application/json" + ) + except ValueError: + return func.HttpResponse( + body=b"The id has an invalid format", status_code=HTTPStatus.BAD_REQUEST, mimetype="application/json" + ) + + +def _get_by_id(customer_id: int) -> str: + customer_use_case = _domain._use_cases.GetByIdCustomerUseCase( + _create_customer_service(DB()) + ) + customer = customer_use_case.get_customer_by_id(customer_id) + + return json.dumps(customer.__dict__) if customer else b'This customer does not exist' + + +def _get_all() -> str: + customer_sql = _domain._use_cases.GetAllCustomerUseCase( + _create_customer_service(DB()) + ) + return json.dumps( + [ + customer.__dict__ + for customer in customer_sql.get_all_customer() + ] + ) + + +def _create_customer_service(db: DB) -> _domain.CustomerService: + customer_sql = _infrastructure.CustomersSQLDao(db) + return _domain.CustomerService(customer_sql) diff --git a/V2/time_tracker/customers/_application/_customers/_update_customer.py b/V2/time_tracker/customers/_application/_customers/_update_customer.py new file mode 100644 index 00000000..93524c65 --- /dev/null +++ b/V2/time_tracker/customers/_application/_customers/_update_customer.py @@ -0,0 +1,53 @@ +import dataclasses +import json +import typing +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + + +def update_customer(req: func.HttpRequest) -> func.HttpResponse: + try: + database = DB() + customer_id = int(req.route_params.get('id')) + customer_dao = _infrastructure.CustomersSQLDao(database) + customer_service = _domain.CustomerService(customer_dao) + use_case = _domain._use_cases.UpdateCustomerUseCase(customer_service) + + customer_data = req.get_json() + customer_is_invalid = _validate_customer(customer_data) + if customer_is_invalid: + raise ValueError + + customer_to_update = _domain.Customer( + **{field.name: customer_data.get(field.name) for field in dataclasses.fields(_domain.Customer)} + ) + updated_customer = use_case.update_customer(customer_id, customer_to_update) + + if updated_customer: + body = json.dumps(updated_customer.__dict__) + status_code = HTTPStatus.OK + else: + body = b'This customer does not exist or is duplicated' + status_code = HTTPStatus.CONFLICT + + return func.HttpResponse( + body=body, + status_code=status_code, + mimetype="application/json" + ) + except ValueError: + return func.HttpResponse( + body=b'Invalid format or structure of the attributes of the customer', + status_code=HTTPStatus.BAD_REQUEST, + mimetype="application/json" + ) + + +def _validate_customer(customer_data: dict) -> typing.List[str]: + return [field.name for field in dataclasses.fields(_domain.Customer) + if field.name not in customer_data] diff --git a/V2/time_tracker/customers/_domain/__init__.py b/V2/time_tracker/customers/_domain/__init__.py index 8392b8e9..a2e8014b 100644 --- a/V2/time_tracker/customers/_domain/__init__.py +++ b/V2/time_tracker/customers/_domain/__init__.py @@ -4,4 +4,8 @@ from ._services import CustomerService from ._use_cases import ( CreateCustomerUseCase, + UpdateCustomerUseCase, + GetAllCustomerUseCase, + GetByIdCustomerUseCase, + DeleteCustomerUseCase ) \ No newline at end of file diff --git a/V2/time_tracker/customers/_domain/_persistence_contracts/_customers_dao.py b/V2/time_tracker/customers/_domain/_persistence_contracts/_customers_dao.py index 35a7a7e9..186d5c86 100644 --- a/V2/time_tracker/customers/_domain/_persistence_contracts/_customers_dao.py +++ b/V2/time_tracker/customers/_domain/_persistence_contracts/_customers_dao.py @@ -1,4 +1,5 @@ import abc +import typing from time_tracker.customers._domain import Customer @@ -7,3 +8,19 @@ class CustomersDao(abc.ABC): @abc.abstractmethod def create(self, data: Customer) -> Customer: pass + + @abc.abstractmethod + def update(self, id: int, data: Customer) -> Customer: + pass + + @abc.abstractmethod + def get_by_id(self, id: int) -> Customer: + pass + + @abc.abstractmethod + def get_all(self) -> typing.List[Customer]: + pass + + @abc.abstractmethod + def delete(self, id: int) -> Customer: + pass diff --git a/V2/time_tracker/customers/_domain/_services/_customer.py b/V2/time_tracker/customers/_domain/_services/_customer.py index 88633a08..082a7b08 100644 --- a/V2/time_tracker/customers/_domain/_services/_customer.py +++ b/V2/time_tracker/customers/_domain/_services/_customer.py @@ -1,3 +1,5 @@ +import typing + from time_tracker.customers._domain import Customer, CustomersDao @@ -8,3 +10,15 @@ def __init__(self, customer_dao: CustomersDao): def create(self, data: Customer) -> Customer: return self.customer_dao.create(data) + + def update(self, id: int, data: Customer) -> Customer: + return self.customer_dao.update(id, data) + + def get_by_id(self, id: int) -> Customer: + return self.customer_dao.get_by_id(id) + + def get_all(self) -> typing.List[Customer]: + return self.customer_dao.get_all() + + def delete(self, id: int) -> Customer: + return self.customer_dao.delete(id) diff --git a/V2/time_tracker/customers/_domain/_use_cases/__init__.py b/V2/time_tracker/customers/_domain/_use_cases/__init__.py index accd4281..4dcb8239 100644 --- a/V2/time_tracker/customers/_domain/_use_cases/__init__.py +++ b/V2/time_tracker/customers/_domain/_use_cases/__init__.py @@ -1,2 +1,6 @@ # flake8: noqa -from ._create_customer_use_case import CreateCustomerUseCase \ No newline at end of file +from ._create_customer_use_case import CreateCustomerUseCase +from ._update_customer_use_case import UpdateCustomerUseCase +from ._get_by_id_customer_use_case import GetByIdCustomerUseCase +from ._get_all_customer_use_case import GetAllCustomerUseCase +from ._delete_customer_use_case import DeleteCustomerUseCase diff --git a/V2/time_tracker/customers/_domain/_use_cases/_delete_customer_use_case.py b/V2/time_tracker/customers/_domain/_use_cases/_delete_customer_use_case.py new file mode 100644 index 00000000..0477a1f2 --- /dev/null +++ b/V2/time_tracker/customers/_domain/_use_cases/_delete_customer_use_case.py @@ -0,0 +1,10 @@ +from time_tracker.customers._domain import Customer, CustomerService + + +class DeleteCustomerUseCase: + + def __init__(self, customer_service: CustomerService): + self.customer_service = customer_service + + def delete_customer(self, id: int) -> Customer: + return self.customer_service.delete(id) diff --git a/V2/time_tracker/customers/_domain/_use_cases/_get_all_customer_use_case.py b/V2/time_tracker/customers/_domain/_use_cases/_get_all_customer_use_case.py new file mode 100644 index 00000000..d3780449 --- /dev/null +++ b/V2/time_tracker/customers/_domain/_use_cases/_get_all_customer_use_case.py @@ -0,0 +1,12 @@ +import typing + +from time_tracker.customers._domain import Customer, CustomerService + + +class GetAllCustomerUseCase: + + def __init__(self, customer_service: CustomerService): + self.customer_service = customer_service + + def get_all_customer(self) -> typing.List[Customer]: + return self.customer_service.get_all() diff --git a/V2/time_tracker/customers/_domain/_use_cases/_get_by_id_customer_use_case.py b/V2/time_tracker/customers/_domain/_use_cases/_get_by_id_customer_use_case.py new file mode 100644 index 00000000..2372029a --- /dev/null +++ b/V2/time_tracker/customers/_domain/_use_cases/_get_by_id_customer_use_case.py @@ -0,0 +1,10 @@ +from time_tracker.customers._domain import Customer, CustomerService + + +class GetByIdCustomerUseCase: + + def __init__(self, customer_service: CustomerService): + self.customer_service = customer_service + + def get_customer_by_id(self, id: int) -> Customer: + return self.customer_service.get_by_id(id) diff --git a/V2/time_tracker/customers/_domain/_use_cases/_update_customer_use_case.py b/V2/time_tracker/customers/_domain/_use_cases/_update_customer_use_case.py new file mode 100644 index 00000000..318ced28 --- /dev/null +++ b/V2/time_tracker/customers/_domain/_use_cases/_update_customer_use_case.py @@ -0,0 +1,10 @@ +from time_tracker.customers._domain import Customer, CustomerService + + +class UpdateCustomerUseCase: + + def __init__(self, customer_service: CustomerService): + self.customer_service = customer_service + + def update_customer(self, id: int, data: Customer) -> Customer: + return self.customer_service.update(id, data) diff --git a/V2/time_tracker/customers/_infrastructure/_data_persistence/_customer_dao.py b/V2/time_tracker/customers/_infrastructure/_data_persistence/_customer_dao.py index 2b1f4c0d..f3b15122 100644 --- a/V2/time_tracker/customers/_infrastructure/_data_persistence/_customer_dao.py +++ b/V2/time_tracker/customers/_infrastructure/_data_persistence/_customer_dao.py @@ -1,4 +1,5 @@ import dataclasses +import typing import sqlalchemy as sq @@ -22,6 +23,21 @@ def __init__(self, database: _db.DB): extend_existing=True, ) + def get_by_id(self, id: int) -> domain.Customer: + query = sq.sql.select(self.customer).where( + sq.sql.and_(self.customer.c.id == id, self.customer.c.deleted.is_(False)) + ) + customer = self.db.get_session().execute(query).one_or_none() + return self.__create_customer_dto(dict(customer)) if customer else None + + def get_all(self) -> typing.List[domain.Customer]: + query = sq.sql.select(self.customer).where(self.customer.c.deleted.is_(False)) + result = self.db.get_session().execute(query) + return [ + self.__create_customer_dto(dict(customer)) + for customer in result + ] + def create(self, data: domain.Customer) -> domain.Customer: try: new_customer = data.__dict__ @@ -39,3 +55,26 @@ def create(self, data: domain.Customer) -> domain.Customer: def __create_customer_dto(self, customer: dict) -> domain.Customer: customer = {key: customer.get(key) for key in self.customer_key} return domain.Customer(**customer) + + def delete(self, customer_id: int) -> domain.Customer: + query = ( + self.customer.update() + .where(self.customer.c.id == customer_id) + .values({"deleted": True}) + ) + self.db.get_session().execute(query) + query_deleted_customer = sq.sql.select(self.customer).where(self.customer.c.id == customer_id) + customer = self.db.get_session().execute(query_deleted_customer).one_or_none() + return self.__create_customer_dto(dict(customer)) if customer else None + + def update(self, id: int, data: domain.Customer) -> domain.Customer: + try: + new_customer = data.__dict__ + new_customer.pop("id") + + customer_validated = {key: value for (key, value) in new_customer.items() if value is not None} + query = self.customer.update().where(self.customer.c.id == id).values(customer_validated) + self.db.get_session().execute(query) + return self.get_by_id(id) + except sq.exc.SQLAlchemyError: + return None diff --git a/V2/time_tracker/customers/interface.py b/V2/time_tracker/customers/interface.py index e36b8172..9aef2091 100644 --- a/V2/time_tracker/customers/interface.py +++ b/V2/time_tracker/customers/interface.py @@ -1,2 +1,5 @@ # flake8: noqa from ._application import create_customer +from ._application import get_customers +from ._application import delete_customer +from ._application import update_customer From aedf3d24d1cae9f40dcfb61196c619c15a1ac35c Mon Sep 17 00:00:00 2001 From: Jipson Murillo <38593785+Jobzi@users.noreply.github.com> Date: Tue, 30 Nov 2021 15:13:14 -0500 Subject: [PATCH 23/33] feat: TT-414 get latest projects (#363) feat: TT-414 created get latest project, use case, service, dao test: TT-414 add test to get latest projects Co-authored-by: Alexander --- V2/serverless.yml | 12 ++- .../api/azure/project_azure_endpoints_test.py | 58 +++++++---- .../azure/time_entry_azure_endpoints_test.py | 96 ++++++++++++------- V2/tests/conftest.py | 4 +- V2/tests/fixtures.py | 39 +++++++- .../integration/daos/projects_dao_test.py | 28 +++--- .../integration/daos/time_entries_dao_test.py | 77 ++++++++------- .../unit/services/project_service_test.py | 15 +++ .../unit/use_cases/projects_use_case_test.py | 13 +++ .../projects/_application/__init__.py | 3 +- .../_application/_projects/__init__.py | 3 +- .../_application/_projects/_create_project.py | 3 +- .../_projects/_get_latest_projects.py | 26 +++++ V2/time_tracker/projects/_domain/__init__.py | 3 +- .../projects/_domain/_entities/_project.py | 2 + .../_persistence_contracts/_projects_dao.py | 7 +- .../projects/_domain/_services/_project.py | 3 + .../projects/_domain/_use_cases/__init__.py | 1 + .../_get_latest_projects_use_case.py | 11 +++ .../_data_persistence/_projects_dao.py | 45 +++++++-- V2/time_tracker/projects/interface.py | 3 +- .../_application/_time_entries/__init__.py | 1 + .../time_entries/_domain/__init__.py | 1 + .../_domain/_use_cases/__init__.py | 1 + .../_time_entries_sql_dao.py | 2 +- V2/time_tracker/time_entries/interface.py | 1 + 26 files changed, 334 insertions(+), 124 deletions(-) create mode 100644 V2/time_tracker/projects/_application/_projects/_get_latest_projects.py create mode 100644 V2/time_tracker/projects/_domain/_use_cases/_get_latest_projects_use_case.py diff --git a/V2/serverless.yml b/V2/serverless.yml index f7ae5fd7..66fa83af 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -225,6 +225,16 @@ functions: authLevel: anonymous + get_latest_projects: + handler: time_tracker/projects/interface.get_latest_projects + events: + - http: true + x-azure-settings: + methods: + - GET + route: projects/latest + authLevel: anonymous + #endregion End Functions Projects -#endregion End Functions \ No newline at end of file +#endregion End Functions diff --git a/V2/tests/api/azure/project_azure_endpoints_test.py b/V2/tests/api/azure/project_azure_endpoints_test.py index 232462b7..b48a13dc 100644 --- a/V2/tests/api/azure/project_azure_endpoints_test.py +++ b/V2/tests/api/azure/project_azure_endpoints_test.py @@ -1,29 +1,14 @@ import json from http import HTTPStatus -import pytest from faker import Faker import azure.functions as func from time_tracker.projects._application import _projects as azure_projects -from time_tracker.projects import _domain as domain -from time_tracker.projects import _infrastructure as infrastructure PROJECT_URL = '/api/projects/' -@pytest.fixture(name='insert_project') -def _insert_project(test_db, insert_customer, project_factory, customer_factory) -> domain.Project: - inserted_customer = insert_customer(customer_factory(), test_db) - - def _new_project(): - project_to_insert = project_factory(customer_id=inserted_customer.id) - dao = infrastructure.ProjectsSQLDao(test_db) - inserted_project = dao.create(project_to_insert) - return inserted_project - return _new_project - - def test__project_azure_endpoint__returns_all_projects( insert_project ): @@ -146,19 +131,17 @@ def test__update_project_azure_endpoint__returns_a_project__when_found_a_project def test__update_projects_azure_endpoint__returns_a_status_code_404__when_no_found_a_project_to_update( - project_factory ): - project_body = project_factory().__dict__ + project_body = {"description": Faker().sentence()} req = func.HttpRequest( method="PUT", body=json.dumps(project_body).encode("utf-8"), url=PROJECT_URL, - route_params={"id": project_body["id"]}, + route_params={"id": Faker().pyint()}, ) response = azure_projects._update_project.update_project(req) - assert response.status_code == HTTPStatus.NOT_FOUND assert response.get_body() == b"Not found" @@ -249,3 +232,40 @@ def test__project_azure_endpoint__returns_a_status_code_500__when_project_receiv assert response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR assert response.get_body() == b"could not be created" + + +def test__get_latest_projects_azure_endpoint__returns_a_list_of_latest_projects__when_an_owner_id_match( + insert_time_entry +): + inserted_time_entry = insert_time_entry().__dict__ + + req = func.HttpRequest( + method='GET', + body=None, + url=PROJECT_URL+"latest/", + params={"owner_id": inserted_time_entry["owner_id"]}, + ) + + response = azure_projects._get_latest_projects.get_latest_projects(req) + projects_json_data = json.loads(response.get_body().decode("utf-8")) + + assert response.status_code == HTTPStatus.OK + assert inserted_time_entry["project_id"] == projects_json_data[0]["id"] + + +def test__get_latest_projects_azure_endpoint__returns_an_empty_list__when_an_owner_id_not_match( + insert_time_entry +): + insert_time_entry().__dict__ + + req = func.HttpRequest( + method='GET', + body=None, + url=PROJECT_URL+"latest/", + ) + + response = azure_projects._get_latest_projects.get_latest_projects(req) + projects_json_data = json.loads(response.get_body().decode("utf-8")) + + assert response.status_code == HTTPStatus.OK + assert projects_json_data == [] diff --git a/V2/tests/api/azure/time_entry_azure_endpoints_test.py b/V2/tests/api/azure/time_entry_azure_endpoints_test.py index 42e3d5ec..13e3e875 100644 --- a/V2/tests/api/azure/time_entry_azure_endpoints_test.py +++ b/V2/tests/api/azure/time_entry_azure_endpoints_test.py @@ -46,10 +46,11 @@ def test__time_entry_azure_endpoint__creates_an_time_entry__when_time_entry_has_ def test__delete_time_entries_azure_endpoint__returns_an_time_entry_with_true_deleted__when_its_id_is_found( - test_db, time_entry_factory, insert_time_entry, insert_activity, activity_factory, + test_db, time_entry_factory, insert_time_entry, insert_activity, activity_factory, insert_project ): + inserted_project = insert_project() inserted_activity = insert_activity(activity_factory(), test_db).__dict__ - time_entry_body = time_entry_factory(activity_id=inserted_activity["id"]) + time_entry_body = time_entry_factory(activity_id=inserted_activity["id"], project_id=inserted_project.id) inserted_time_entry = insert_time_entry(time_entry_body, test_db) req = func.HttpRequest( @@ -82,10 +83,11 @@ def test__delete_time_entries_azure_endpoint__returns_a_status_code_400__when_ti def test__time_entry_azure_endpoint__returns_all_time_entries( - test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity + test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity, insert_project ): + inserted_project = insert_project() inserted_activity = insert_activity(activity_factory(), test_db) - time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) inserted_time_entries = insert_time_entry(time_entries_to_insert, test_db).__dict__ req = func.HttpRequest(method="GET", body=None, url=TIME_ENTRY_URL) @@ -99,10 +101,11 @@ def test__time_entry_azure_endpoint__returns_all_time_entries( def test__time_entry_azure_endpoint__returns_an_time_entry__when_time_entry_matches_its_id( - test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity + test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity, insert_project ): + inserted_project = insert_project() inserted_activity = insert_activity(activity_factory(), test_db) - time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) inserted_time_entries = insert_time_entry(time_entries_to_insert, test_db).__dict__ req = func.HttpRequest( @@ -120,10 +123,11 @@ def test__time_entry_azure_endpoint__returns_an_time_entry__when_time_entry_matc def test__get_time_entries_azure_endpoint__returns_a_status_code_400__when_time_entry_recive_invalid_id( - test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity + test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity, insert_project ): + inserted_project = insert_project() inserted_activity = insert_activity(activity_factory(), test_db) - time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) insert_time_entry(time_entries_to_insert, test_db).__dict__ req = func.HttpRequest( @@ -139,12 +143,53 @@ def test__get_time_entries_azure_endpoint__returns_a_status_code_400__when_time_ assert response.get_body() == b'Invalid Format ID' +def test__get_latest_entries_azure_endpoint__returns_a_list_of_latest_time_entries__when_an_owner_id_match( + test_db, time_entry_factory, insert_time_entry, insert_activity, activity_factory, insert_project +): + inserted_project = insert_project() + inserted_activity = insert_activity(activity_factory(), test_db).__dict__ + time_entry_body = time_entry_factory(activity_id=inserted_activity["id"], project_id=inserted_project.id) + inserted_time_entry = insert_time_entry(time_entry_body, test_db).__dict__ + + req = func.HttpRequest( + method='GET', + body=None, + url=TIME_ENTRY_URL+"latest/", + params={"owner_id": inserted_time_entry["owner_id"]}, + ) + + response = azure_time_entries._get_latest_entries.get_latest_entries(req) + time_entry_json_data = json.loads(response.get_body().decode("utf-8")) + + assert response.status_code == 200 + assert time_entry_json_data == [inserted_time_entry] + + +def test__get_latest_entries_azure_endpoint__returns_no_time_entries_found__when_recieve_an_invalid_owner_id( + test_db, insert_activity, activity_factory, +): + insert_activity(activity_factory(), test_db) + + req = func.HttpRequest( + method='GET', + body=None, + url=TIME_ENTRY_URL+"latest/", + params={"owner_id": Faker().pyint()}, + ) + + response = azure_time_entries._get_latest_entries.get_latest_entries(req) + + assert response.status_code == 404 + assert response.get_body() == b'Not found' + + def test__update_time_entry_azure_endpoint__returns_an_time_entry__when_found_an_time_entry_to_update( - test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity + test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity, insert_project ): - inserted_activity = insert_activity(activity_factory(), test_db) - existent_time_entries = time_entry_factory(activity_id=inserted_activity.id) - inserted_time_entries = insert_time_entry(existent_time_entries, test_db).__dict__ + inserted_project = insert_project() + inserted_activity = insert_activity(activity_factory(), test_db).__dict__ + time_entry_body = time_entry_factory(activity_id=inserted_activity["id"], project_id=inserted_project.id) + inserted_time_entry = insert_time_entry(time_entry_body, test_db).__dict__ time_entry_body = {"description": Faker().sentence()} @@ -152,15 +197,15 @@ def test__update_time_entry_azure_endpoint__returns_an_time_entry__when_found_an method='PUT', body=json.dumps(time_entry_body).encode("utf-8"), url=TIME_ENTRY_URL, - route_params={"id": inserted_time_entries["id"]}, + route_params={"id": inserted_time_entry["id"]}, ) response = azure_time_entries._update_time_entry.update_time_entry(req) activitiy_json_data = response.get_body().decode("utf-8") - inserted_time_entries.update(time_entry_body) + inserted_time_entry.update(time_entry_body) assert response.status_code == 200 - assert activitiy_json_data == json.dumps(inserted_time_entries) + assert activitiy_json_data == json.dumps(inserted_time_entry) def test__update_time_entries_azure_endpoint__returns_a_status_code_400__when_time_entry_recive_invalid_format_id(): @@ -211,27 +256,6 @@ def test__update_time_entries_azure_endpoint__returns_a_status_code_400__when_ti assert response.get_body() == b'Incorrect time entry body' -def test__get_latest_entries_azure_endpoint__returns_a_list_of_latest_time_entries__when_an_owner_id_match( - test_db, time_entry_factory, insert_time_entry, insert_activity, activity_factory, -): - inserted_activity = insert_activity(activity_factory(), test_db).__dict__ - time_entry_body = time_entry_factory(activity_id=inserted_activity["id"], technologies="[jira,sql]") - inserted_time_entry = insert_time_entry(time_entry_body, test_db).__dict__ - - req = func.HttpRequest( - method='GET', - body=None, - url=TIME_ENTRY_URL+"latest/", - params={"owner_id": inserted_time_entry["owner_id"]}, - ) - - response = azure_time_entries._get_latest_entries.get_latest_entries(req) - time_entry_json_data = json.loads(response.get_body().decode("utf-8")) - - assert response.status_code == HTTPStatus.OK - assert time_entry_json_data == [inserted_time_entry] - - def test__get_latest_entries_azure_endpoint__returns_not_found__when_recieve_an_invalid_owner_id( test_db, insert_activity, activity_factory, ): diff --git a/V2/tests/conftest.py b/V2/tests/conftest.py index ff67203c..c11fc951 100644 --- a/V2/tests/conftest.py +++ b/V2/tests/conftest.py @@ -1,5 +1,5 @@ # flake8: noqa from fixtures import _activity_factory, _test_db, _insert_activity -from fixtures import _time_entry_factory +from fixtures import _time_entry_factory, _insert_time_entry from fixtures import _customer_factory, _insert_customer -from fixtures import _project_factory +from fixtures import _project_factory, _insert_project diff --git a/V2/tests/fixtures.py b/V2/tests/fixtures.py index e4e52fb3..82391ebf 100644 --- a/V2/tests/fixtures.py +++ b/V2/tests/fixtures.py @@ -3,10 +3,12 @@ import time_tracker.activities._domain as activities_domain import time_tracker.time_entries._domain as time_entries_domain +import time_tracker.time_entries._infrastructure as time_entries_infrastructure import time_tracker.customers._domain as customers_domain import time_tracker.activities._infrastructure as activities_infrastructure import time_tracker.customers._infrastructure as customers_infrastructure import time_tracker.projects._domain as projects_domain +import time_tracker.projects._infrastructure as projects_infrastructure from time_tracker._infrastructure import DB @@ -108,7 +110,8 @@ def _make_project( customer_id=Faker().pyint(), status=Faker().pyint(), deleted=False, - technologies=str(Faker().pylist()) + technologies=str(Faker().pylist()), + customer=None ): project = projects_domain.Project( id=id, @@ -118,7 +121,8 @@ def _make_project( customer_id=customer_id, status=status, deleted=deleted, - technologies=technologies + technologies=technologies, + customer=customer ) return project return _make_project @@ -131,3 +135,34 @@ def _new_customer(customer: customers_domain.Customer, database: DB): new_customer = dao.create(customer) return new_customer return _new_customer + + +@pytest.fixture(name='insert_project') +def _insert_project(test_db, insert_customer, project_factory, customer_factory) -> projects_domain.Project: + inserted_customer = insert_customer(customer_factory(), test_db) + + def _new_project(): + project_to_insert = project_factory(id=None, customer_id=inserted_customer.id, deleted=False) + dao = projects_infrastructure.ProjectsSQLDao(test_db) + inserted_project = dao.create(project_to_insert) + return inserted_project + return _new_project + + +@pytest.fixture(name='insert_time_entry') +def _insert_time_entry( + test_db, insert_project, activity_factory, insert_activity, time_entry_factory +) -> time_entries_domain.TimeEntry: + + inserted_project = insert_project() + inserted_activity = insert_activity(activity_factory(), test_db) + + def _new_time_entry(owner_id: int = Faker().pyint()): + dao = time_entries_infrastructure.TimeEntriesSQLDao(test_db) + time_entries_to_insert = time_entry_factory( + activity_id=inserted_activity.id, project_id=inserted_project.id, owner_id=owner_id + ) + + inserted_time_entries = dao.create(time_entries_to_insert) + return inserted_time_entries + return _new_time_entry diff --git a/V2/tests/integration/daos/projects_dao_test.py b/V2/tests/integration/daos/projects_dao_test.py index 64837e37..01f5a1a3 100644 --- a/V2/tests/integration/daos/projects_dao_test.py +++ b/V2/tests/integration/daos/projects_dao_test.py @@ -7,18 +7,6 @@ from time_tracker._infrastructure import DB -@pytest.fixture(name='insert_project') -def _insert_project(customer_factory, test_db, insert_customer, create_fake_dao, project_factory) -> domain.Project: - inserted_customer = insert_customer(customer_factory(), test_db) - - def _new_project(): - project_to_insert = project_factory(customer_id=inserted_customer.id) - inserted_project = create_fake_dao.create(project_to_insert) - return inserted_project - - return _new_project - - @pytest.fixture(name='create_fake_dao') def _create_fake_dao() -> domain.ProjectsDao: db_fake = DB() @@ -44,6 +32,9 @@ def test__create_project__returns_a_project_dto__when_saves_correctly_with_sql_d inserted_project = dao.create(project_to_insert) + expected_project = project_to_insert.__dict__ + expected_project.update({"customer": inserted_customer.__dict__}) + assert isinstance(inserted_project, domain.Project) assert inserted_project == project_to_insert @@ -85,6 +76,7 @@ def test__get_all__returns_a_list_of_project_dto_objects__when_one_or_more_proje ] projects = dao.get_all() + assert isinstance(projects, typing.List) assert projects == inserted_projects @@ -147,3 +139,15 @@ def test_delete__returns_none__when_no_project_matching_its_id_is_found_with_sql results = dao.delete(project_to_insert.id) assert results is None + + +def test_get_latest_projects__returns_a_list_of_project_dto_objects__when_find_projects_in_the_latest_time_entries( + create_fake_dao, insert_time_entry +): + dao = create_fake_dao + owner_id = Faker().pyint() + inserted_time_entries = insert_time_entry(owner_id) + latest_projects = dao.get_latest(owner_id) + + assert isinstance(latest_projects, typing.List) + assert latest_projects[0].id == inserted_time_entries.project_id diff --git a/V2/tests/integration/daos/time_entries_dao_test.py b/V2/tests/integration/daos/time_entries_dao_test.py index e48241cc..3c17f7e9 100644 --- a/V2/tests/integration/daos/time_entries_dao_test.py +++ b/V2/tests/integration/daos/time_entries_dao_test.py @@ -26,12 +26,12 @@ def _clean_database(): def test__time_entry__returns_a_time_entry_dto__when_saves_correctly_with_sql_database( - test_db, time_entry_factory, create_fake_dao, insert_activity, activity_factory + test_db, time_entry_factory, create_fake_dao, insert_activity, activity_factory, insert_project ): dao = create_fake_dao(test_db) inserted_activity = insert_activity(activity_factory(), dao.db) - - time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id) + inserted_project = insert_project() + time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) inserted_time_entry = dao.create(time_entry_to_insert) @@ -51,12 +51,13 @@ def test__time_entry__returns_None__when_not_saves_correctly( def test_delete__returns_an_time_entry_with_true_deleted__when_an_time_entry_matching_its_id_is_found( - create_fake_dao, test_db, time_entry_factory, insert_activity, activity_factory + create_fake_dao, test_db, time_entry_factory, insert_activity, activity_factory, insert_project ): dao = create_fake_dao(test_db) + inserted_project = insert_project() inserted_activity = insert_activity(activity_factory(), dao.db) - existent_time_entry = time_entry_factory(activity_id=inserted_activity.id) - inserted_time_entry = dao.create(existent_time_entry) + time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) + inserted_time_entry = dao.create(time_entry_to_insert) result = dao.delete(inserted_time_entry.id) @@ -73,42 +74,60 @@ def test_delete__returns_none__when_no_time_entry_matching_its_id_is_found( assert result is None +def test_get_latest_entries__returns_a_list_of_latest_time_entries__when_an_owner_id_match( + create_fake_dao, time_entry_factory, insert_activity, activity_factory, test_db, insert_project +): + dao = create_fake_dao(test_db) + inserted_project = insert_project() + inserted_activity = insert_activity(activity_factory(), dao.db) + time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) + inserted_time_entry = dao.create(time_entry_to_insert).__dict__ + + result = dao.get_latest_entries(int(inserted_time_entry["owner_id"])) + + assert result == [inserted_time_entry] + + def test_update__returns_an_time_entry_dto__when_found_one_time_entry_to_update( - test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory, insert_project ): dao = create_fake_dao(test_db) + inserted_project = insert_project() inserted_activity = insert_activity(activity_factory(), dao.db) - existent_time_entries = time_entry_factory(activity_id=inserted_activity.id) - inserted_time_entries = dao.create(existent_time_entries).__dict__ - time_entry_id = inserted_time_entries["id"] - inserted_time_entries.update({"description": "description updated"}) + time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) + inserted_time_entry = dao.create(time_entry_to_insert).__dict__ + + time_entry_id = inserted_time_entry["id"] + inserted_time_entry.update({"description": "description updated"}) - time_entry = dao.update(time_entry_id=time_entry_id, time_entry_data=inserted_time_entries) + time_entry = dao.update(time_entry_id=time_entry_id, time_entry_data=inserted_time_entry) assert time_entry.id == time_entry_id - assert time_entry.description == inserted_time_entries.get("description") + assert time_entry.description == inserted_time_entry.get("description") def test_update__returns_none__when_doesnt_found_one_time_entry_to_update( - test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory, insert_project ): dao = create_fake_dao(test_db) + inserted_project = insert_project() inserted_activity = insert_activity(activity_factory(), dao.db) - existent_time_entries = time_entry_factory(activity_id=inserted_activity.id) - inserted_time_entries = dao.create(existent_time_entries).__dict__ + time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) + inserted_time_entry = dao.create(time_entry_to_insert).__dict__ - time_entry = dao.update(0, inserted_time_entries) + time_entry = dao.update(0, inserted_time_entry) assert time_entry is None def test__get_all__returns_a_list_of_time_entries_dto_objects__when_one_or_more_time_entries_are_found_in_sql_database( - test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory, insert_project ): dao = create_fake_dao(test_db) + inserted_project = insert_project() inserted_activity = insert_activity(activity_factory(), dao.db) - time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) inserted_time_entries = [dao.create(time_entries_to_insert)] time_entry = dao.get_all() @@ -128,11 +147,12 @@ def test__get_all__returns_an_empty_list__when_doesnt_found_any_time_entries_in_ def test__get_by_id__returns_a_time_entry_dto__when_found_one_time_entry_that_match_id_with_sql_database( - test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory, insert_project ): dao = create_fake_dao(test_db) + inserted_project = insert_project() inserted_activity = insert_activity(activity_factory(), dao.db) - time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) inserted_time_entries = dao.create(time_entries_to_insert) time_entry = dao.get_by_id(time_entries_to_insert.id) @@ -155,21 +175,6 @@ def test__get_by_id__returns_none__when_no_time_entry_matches_by_id( assert time_entry is None -def test_get_latest_entries__returns_a_list_of_latest_time_entries__when_an_owner_id_match( - create_fake_dao, time_entry_factory, insert_activity, activity_factory, test_db -): - dao = create_fake_dao(test_db) - inserted_activity = insert_activity(activity_factory(), dao.db) - time_entry_to_insert = time_entry_factory( - activity_id=inserted_activity.id, - technologies="[jira,sql]") - inserted_time_entry = dao.create(time_entry_to_insert) - - result = dao.get_latest_entries(int(inserted_time_entry.owner_id)) - - assert result == [inserted_time_entry.__dict__] - - def test_get_latest_entries__returns_none__when_an_owner_id_is_not_found( create_fake_dao, test_db, insert_activity, activity_factory ): diff --git a/V2/tests/unit/services/project_service_test.py b/V2/tests/unit/services/project_service_test.py index 9baf657e..913bd40f 100644 --- a/V2/tests/unit/services/project_service_test.py +++ b/V2/tests/unit/services/project_service_test.py @@ -72,3 +72,18 @@ def test__create_project__uses_the_project_dao__to_create_an_project(mocker, pro assert project_dao.create.called assert expected_project == actual_project + + +def test__get_latest_projects__uses_the_project_dao__to_get_last_projects( + mocker, +): + expected_latest_projects = mocker.Mock() + project_dao = mocker.Mock( + get_latest=mocker.Mock(return_value=expected_latest_projects) + ) + + project_service = ProjectService(project_dao) + latest_projects = project_service.get_latest(Faker().pyint()) + + assert expected_latest_projects == latest_projects + assert project_dao.get_latest.called diff --git a/V2/tests/unit/use_cases/projects_use_case_test.py b/V2/tests/unit/use_cases/projects_use_case_test.py index 22167418..9f5d5f5c 100644 --- a/V2/tests/unit/use_cases/projects_use_case_test.py +++ b/V2/tests/unit/use_cases/projects_use_case_test.py @@ -78,3 +78,16 @@ def test__update_project_function__uses_the_projects_service__to_update_an_proje assert project_service.update.called assert expected_project == updated_project + + +def test__get_latest_projects_function__uses_the_project_service__to_get_latest_project( + mocker: MockFixture, +): + expected_latest_projects = mocker.Mock() + project_service = mocker.Mock(get_latest=mocker.Mock(return_value=expected_latest_projects)) + + project_use_case = _use_cases.GetLatestProjectsUseCase(project_service) + latest_projects = project_use_case.get_latest(Faker().pyint()) + + assert project_service.get_latest.called + assert expected_latest_projects == latest_projects diff --git a/V2/time_tracker/projects/_application/__init__.py b/V2/time_tracker/projects/_application/__init__.py index 6b48fb8a..96a6f985 100644 --- a/V2/time_tracker/projects/_application/__init__.py +++ b/V2/time_tracker/projects/_application/__init__.py @@ -2,4 +2,5 @@ from ._projects import create_project from ._projects import delete_project from ._projects import get_projects -from ._projects import update_project \ No newline at end of file +from ._projects import update_project +from ._projects import get_latest_projects \ No newline at end of file diff --git a/V2/time_tracker/projects/_application/_projects/__init__.py b/V2/time_tracker/projects/_application/_projects/__init__.py index 9f87eef2..b7500f9b 100644 --- a/V2/time_tracker/projects/_application/_projects/__init__.py +++ b/V2/time_tracker/projects/_application/_projects/__init__.py @@ -2,4 +2,5 @@ from ._create_project import create_project from ._delete_project import delete_project from ._get_projects import get_projects -from ._update_project import update_project \ No newline at end of file +from ._update_project import update_project +from ._get_latest_projects import get_latest_projects \ No newline at end of file diff --git a/V2/time_tracker/projects/_application/_projects/_create_project.py b/V2/time_tracker/projects/_application/_projects/_create_project.py index 559ba864..1397284a 100644 --- a/V2/time_tracker/projects/_application/_projects/_create_project.py +++ b/V2/time_tracker/projects/_application/_projects/_create_project.py @@ -31,7 +31,8 @@ def create_project(req: func.HttpRequest) -> func.HttpResponse: customer_id=project_data["customer_id"], status=project_data["status"], deleted=False, - technologies=project_data["technologies"] + technologies=project_data["technologies"], + customer=None ) created_project = use_case.create_project(project_to_create) diff --git a/V2/time_tracker/projects/_application/_projects/_get_latest_projects.py b/V2/time_tracker/projects/_application/_projects/_get_latest_projects.py new file mode 100644 index 00000000..0aa9badc --- /dev/null +++ b/V2/time_tracker/projects/_application/_projects/_get_latest_projects.py @@ -0,0 +1,26 @@ +import json +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB as database + + +def get_latest_projects(req: func.HttpRequest) -> func.HttpResponse: + project_dao = _infrastructure.ProjectsSQLDao(database()) + project_service = _domain.ProjectService(project_dao) + use_case = _domain._use_cases.GetLatestProjectsUseCase(project_service) + + owner_id = req.params.get('owner_id') + response = [ + project.__dict__ + for project in use_case.get_latest(owner_id) + ] + + return func.HttpResponse( + body=json.dumps(response), + status_code=HTTPStatus.OK, + mimetype="application/json", + ) diff --git a/V2/time_tracker/projects/_domain/__init__.py b/V2/time_tracker/projects/_domain/__init__.py index c90dbcaf..6cdbe548 100644 --- a/V2/time_tracker/projects/_domain/__init__.py +++ b/V2/time_tracker/projects/_domain/__init__.py @@ -7,5 +7,6 @@ DeleteProjectUseCase, GetProjectsUseCase, GetProjectUseCase, - UpdateProjectUseCase + UpdateProjectUseCase, + GetLatestProjectsUseCase ) \ No newline at end of file diff --git a/V2/time_tracker/projects/_domain/_entities/_project.py b/V2/time_tracker/projects/_domain/_entities/_project.py index 0b2ffe1a..75361db8 100644 --- a/V2/time_tracker/projects/_domain/_entities/_project.py +++ b/V2/time_tracker/projects/_domain/_entities/_project.py @@ -12,3 +12,5 @@ class Project: status: int deleted: Optional[bool] technologies: List[str] + + customer: Optional[dict] diff --git a/V2/time_tracker/projects/_domain/_persistence_contracts/_projects_dao.py b/V2/time_tracker/projects/_domain/_persistence_contracts/_projects_dao.py index f38c8ebd..ef0bb10f 100644 --- a/V2/time_tracker/projects/_domain/_persistence_contracts/_projects_dao.py +++ b/V2/time_tracker/projects/_domain/_persistence_contracts/_projects_dao.py @@ -1,4 +1,5 @@ import abc +import typing from .. import Project @@ -9,7 +10,7 @@ def create(self, time_entry_data: Project) -> Project: pass @abc.abstractmethod - def get_all(self) -> Project: + def get_all(self) -> typing.List[Project]: pass @abc.abstractmethod @@ -23,3 +24,7 @@ def update(self, id: int, project_data: dict) -> Project: @abc.abstractmethod def delete(self, id: int) -> Project: pass + + @abc.abstractmethod + def get_latest(self, owner_id: int) -> typing.List[Project]: + pass diff --git a/V2/time_tracker/projects/_domain/_services/_project.py b/V2/time_tracker/projects/_domain/_services/_project.py index 0f99dafb..70dfe9c0 100644 --- a/V2/time_tracker/projects/_domain/_services/_project.py +++ b/V2/time_tracker/projects/_domain/_services/_project.py @@ -22,3 +22,6 @@ def update(self, id: int, project_data: dict) -> Project: def delete(self, id: int) -> Project: return self.project_dao.delete(id) + + def get_latest(self, owner_id: int) -> typing.List[Project]: + return self.project_dao.get_latest(owner_id) diff --git a/V2/time_tracker/projects/_domain/_use_cases/__init__.py b/V2/time_tracker/projects/_domain/_use_cases/__init__.py index defb127d..f2a7dfce 100644 --- a/V2/time_tracker/projects/_domain/_use_cases/__init__.py +++ b/V2/time_tracker/projects/_domain/_use_cases/__init__.py @@ -4,3 +4,4 @@ from ._get_project_by_id_use_case import GetProjectUseCase from ._get_projects_use_case import GetProjectsUseCase from ._update_project_use_case import UpdateProjectUseCase +from ._get_latest_projects_use_case import GetLatestProjectsUseCase diff --git a/V2/time_tracker/projects/_domain/_use_cases/_get_latest_projects_use_case.py b/V2/time_tracker/projects/_domain/_use_cases/_get_latest_projects_use_case.py new file mode 100644 index 00000000..b26d484c --- /dev/null +++ b/V2/time_tracker/projects/_domain/_use_cases/_get_latest_projects_use_case.py @@ -0,0 +1,11 @@ +import typing + +from .. import Project, ProjectService + + +class GetLatestProjectsUseCase: + def __init__(self, project_service: ProjectService): + self.project_service = project_service + + def get_latest(self, owner_id: int) -> typing.List[Project]: + return self.project_service.get_latest(owner_id) diff --git a/V2/time_tracker/projects/_infrastructure/_data_persistence/_projects_dao.py b/V2/time_tracker/projects/_infrastructure/_data_persistence/_projects_dao.py index 2ec61186..63e65972 100644 --- a/V2/time_tracker/projects/_infrastructure/_data_persistence/_projects_dao.py +++ b/V2/time_tracker/projects/_infrastructure/_data_persistence/_projects_dao.py @@ -5,6 +5,8 @@ from ... import _domain as domain from time_tracker._infrastructure import _db +from time_tracker.time_entries._infrastructure._data_persistence import TimeEntriesSQLDao +from time_tracker.customers._infrastructure._data_persistence import CustomersSQLDao class ProjectsSQLDao(domain.ProjectsDao): @@ -31,13 +33,12 @@ def __init__(self, database: _db.DB): def create(self, project_data: domain.Project) -> domain.Project: try: - new_project = project_data.__dict__ - new_project.pop('id', None) + validated_project = {key: value for (key, value) in project_data.__dict__.items() if value is not None} + + query = self.project.insert().values(validated_project).return_defaults() - query = self.project.insert().values(new_project).return_defaults() project = self.db.get_session().execute(query) - new_project.update({"id": project.inserted_primary_key[0]}) - return self.__create_project_dto(new_project) + return self.get_by_id(project.inserted_primary_key[0]) except sq.exc.SQLAlchemyError: return None @@ -45,14 +46,29 @@ def create(self, project_data: domain.Project) -> domain.Project: def get_by_id(self, id: int) -> domain.Project: query = sq.sql.select(self.project).where(self.project.c.id == id) project = self.db.get_session().execute(query).one_or_none() - return self.__create_project_dto(dict(project)) if project else None + if project: + customer_dao = CustomersSQLDao(self.db) + customer = customer_dao.get_by_id(project["customer_id"]) + project = dict(project) + project.update({"customer": customer.__dict__ if customer else None}) + + return self.__create_project_dto(project) if project else None def get_all(self) -> typing.List[domain.Project]: query = sq.sql.select(self.project) - result = self.db.get_session().execute(query) + result = self.db.get_session().execute(query).all() + projects = [] + + for project in result: + customer_dao = CustomersSQLDao(self.db) + customer = customer_dao.get_by_id(project["customer_id"]) + project = dict(project) + project.update({"customer": customer.__dict__ if customer else None}) + projects.append(project) + return [ - self.__create_project_dto(dict(project)) - for project in result + self.__create_project_dto(project) + for project in projects ] def delete(self, id: int) -> domain.Project: @@ -72,6 +88,17 @@ def update(self, id: int, project_data: dict) -> domain.Project: except sq.exc.SQLAlchemyError as error: raise Exception(error.orig) + def get_latest(self, owner_id: int) -> typing.List[domain.Project]: + time_entries_dao = TimeEntriesSQLDao(self.db) + latest_time_entries = time_entries_dao.get_latest_entries(owner_id) + latest_projects = [] + + if latest_time_entries: + filter_project = typing.Counter(time_entry['project_id'] for time_entry in latest_time_entries) + latest_projects = [self.get_by_id(project_id) for project_id in filter_project] + + return latest_projects + def __create_project_dto(self, project: dict) -> domain.Project: project = {key: project.get(key) for key in self.project_key} return domain.Project(**project) diff --git a/V2/time_tracker/projects/interface.py b/V2/time_tracker/projects/interface.py index 2fb3244b..a0312258 100644 --- a/V2/time_tracker/projects/interface.py +++ b/V2/time_tracker/projects/interface.py @@ -2,4 +2,5 @@ from ._application import create_project from ._application import delete_project from ._application import get_projects -from ._application import update_project \ No newline at end of file +from ._application import update_project +from ._application import get_latest_projects \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py index 29631650..9b48eb2a 100644 --- a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py +++ b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py @@ -1,6 +1,7 @@ # flake8: noqa from ._create_time_entry import create_time_entry from ._delete_time_entry import delete_time_entry +from ._get_latest_entries import get_latest_entries from ._update_time_entry import update_time_entry from ._get_time_entries import get_time_entries from ._get_latest_entries import get_latest_entries diff --git a/V2/time_tracker/time_entries/_domain/__init__.py b/V2/time_tracker/time_entries/_domain/__init__.py index 513877bf..f0aec6d0 100644 --- a/V2/time_tracker/time_entries/_domain/__init__.py +++ b/V2/time_tracker/time_entries/_domain/__init__.py @@ -5,6 +5,7 @@ from ._use_cases import ( CreateTimeEntryUseCase, DeleteTimeEntryUseCase, + GetLastestTimeEntryUseCase, UpdateTimeEntryUseCase, GetTimeEntriesUseCase, GetTimeEntryUseCase, diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py index 055cd850..0dd05666 100644 --- a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py +++ b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py @@ -1,6 +1,7 @@ # flake8: noqa from ._create_time_entry_use_case import CreateTimeEntryUseCase from ._delete_time_entry_use_case import DeleteTimeEntryUseCase +from ._get_latest_entries_use_case import GetLastestTimeEntryUseCase from ._update_time_entry_use_case import UpdateTimeEntryUseCase from ._get_time_entry_use_case import GetTimeEntriesUseCase from ._get_time_entry_by_id_use_case import GetTimeEntryUseCase diff --git a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py index 5d368e26..59988205 100644 --- a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py +++ b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py @@ -29,7 +29,7 @@ def __init__(self, database: _db.DB): sqlalchemy.Column('end_date', sqlalchemy.DateTime().with_variant(sqlalchemy.String, "sqlite")), sqlalchemy.Column('deleted', sqlalchemy.Boolean), sqlalchemy.Column('timezone_offset', sqlalchemy.String), - sqlalchemy.Column('project_id', sqlalchemy.Integer), + sqlalchemy.Column('project_id', sqlalchemy.Integer, sqlalchemy.ForeignKey('project.id')), extend_existing=True, ) diff --git a/V2/time_tracker/time_entries/interface.py b/V2/time_tracker/time_entries/interface.py index 87876204..1b6c1826 100644 --- a/V2/time_tracker/time_entries/interface.py +++ b/V2/time_tracker/time_entries/interface.py @@ -1,6 +1,7 @@ # flake8: noqa from ._application import create_time_entry from ._application import delete_time_entry +from ._application import get_latest_entries from ._application import update_time_entry from ._application import get_time_entries from ._application import get_latest_entries From e5f3d1c75ebb7b6a70d9f3c6b0930740ae302678 Mon Sep 17 00:00:00 2001 From: Jipson Murillo <38593785+Jobzi@users.noreply.github.com> Date: Fri, 3 Dec 2021 19:14:05 -0500 Subject: [PATCH 24/33] ci: TT-384 add new secret ci (#352) Co-authored-by: Alexander --- .../workflows/time-tracker-v1-on-pull-request-workflow.yml | 5 +++-- .github/workflows/time-tracker-v1-on-push-workflow.yml | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml b/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml index 766f09bf..0610385c 100644 --- a/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml +++ b/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml @@ -35,7 +35,7 @@ jobs: uses: Azure/get-keyvault-secrets@v1 with: keyvault: "time-tracker-secrets" - secrets: "MS-CLIENT-ID, MS-AUTHORITY, MS-SCOPE, MS-SECRET, MS-ENDPOINT, USERID, AZURE-APP-CONFIGURATION-CONNECTION-STRING, DATABASE-ACCOUNT-URI, DATABASE-MASTER-KEY, DATABASE-NAME, AZURE-STORAGE-ACCOUNT-KEY" + secrets: "MS-CLIENT-ID, MS-AUTHORITY, MS-SCOPE, MS-SECRET, MS-ENDPOINT, USERID, AZURE-APP-CONFIGURATION-CONNECTION-STRING, DATABASE-ACCOUNT-URI, DATABASE-MASTER-KEY, DATABASE-NAME, AZURE-STORAGE-ACCOUNT-KEY, AZURE-STORAGE-CONNECTION-STRING" id: timeTrackerAzureVault - name: Run tests @@ -51,8 +51,9 @@ jobs: DATABASE_MASTER_KEY: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-MASTER-KEY }} DATABASE_NAME: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-NAME }} AZURE_STORAGE_ACCOUNT_KEY: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-ACCOUNT-KEY }} + AZURE_STORAGE_CONNECTION_STRING: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-CONNECTION-STRING }} run: | pytest tests - name: Test the build of the app run: | - docker build . \ No newline at end of file + docker build . diff --git a/.github/workflows/time-tracker-v1-on-push-workflow.yml b/.github/workflows/time-tracker-v1-on-push-workflow.yml index 095712b9..1c4c1b18 100644 --- a/.github/workflows/time-tracker-v1-on-push-workflow.yml +++ b/.github/workflows/time-tracker-v1-on-push-workflow.yml @@ -35,7 +35,7 @@ jobs: uses: Azure/get-keyvault-secrets@v1 with: keyvault: "time-tracker-secrets" - secrets: "MS-CLIENT-ID, MS-AUTHORITY, MS-SCOPE, MS-SECRET, MS-ENDPOINT, USERID, AZURE-APP-CONFIGURATION-CONNECTION-STRING, DATABASE-ACCOUNT-URI, DATABASE-MASTER-KEY, DATABASE-NAME, AZURE-STORAGE-ACCOUNT-KEY" + secrets: "MS-CLIENT-ID, MS-AUTHORITY, MS-SCOPE, MS-SECRET, MS-ENDPOINT, USERID, AZURE-APP-CONFIGURATION-CONNECTION-STRING, DATABASE-ACCOUNT-URI, DATABASE-MASTER-KEY, DATABASE-NAME, AZURE-STORAGE-ACCOUNT-KEY, AZURE-STORAGE-CONNECTION-STRING" id: timeTrackerAzureVault - name: Run tests @@ -51,6 +51,7 @@ jobs: DATABASE_MASTER_KEY: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-MASTER-KEY }} DATABASE_NAME: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-NAME }} AZURE_STORAGE_ACCOUNT_KEY: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-ACCOUNT-KEY }} + AZURE_STORAGE_CONNECTION_STRING: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-CONNECTION-STRING }} run: | pytest tests - name: Login to docker registry @@ -62,4 +63,4 @@ jobs: - name: Build and push image run: | docker build . -t ${{ secrets.REGISTRY_LOGIN_SERVER }}/timetrackerapi:${{ github.sha }} - docker push ${{ secrets.REGISTRY_LOGIN_SERVER }}/timetrackerapi:${{ github.sha }} \ No newline at end of file + docker push ${{ secrets.REGISTRY_LOGIN_SERVER }}/timetrackerapi:${{ github.sha }} From 95ae3af1990680581a0e90a9674189b0d8552a75 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Sat, 4 Dec 2021 00:36:12 +0000 Subject: [PATCH 25/33] 0.45.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 20 ++++++++++++++++++++ time_tracker_api/version.py | 2 +- 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b6b74353..c1b05983 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,26 @@ +## v0.45.0 (2021-12-04) +### Feature +* TT-414 get latest projects ([#363](https://github.com/ioet/time-tracker-backend/issues/363)) ([`aedf3d2`](https://github.com/ioet/time-tracker-backend/commit/aedf3d24d1cae9f40dcfb61196c619c15a1ac35c)) +* TT-418 crud customer v2 ([#361](https://github.com/ioet/time-tracker-backend/issues/361)) ([`1db51d6`](https://github.com/ioet/time-tracker-backend/commit/1db51d68824a429730fedd1cbf58ee334ee00fa0)) +* TT-429 created enums for response messages ([#362](https://github.com/ioet/time-tracker-backend/issues/362)) ([`fd0bc98`](https://github.com/ioet/time-tracker-backend/commit/fd0bc986fcc074bd5f6d6e9b7b602951375f2aee)) +* TT-404 GET Time Entries ([#341](https://github.com/ioet/time-tracker-backend/issues/341)) ([`c8a3134`](https://github.com/ioet/time-tracker-backend/commit/c8a31341b120792f46442815fad2d463262302ab)) +* TT-417-crud-v2-projects ([#360](https://github.com/ioet/time-tracker-backend/issues/360)) ([`10ec2bb`](https://github.com/ioet/time-tracker-backend/commit/10ec2bb9e2b5f67358c00b549a376b7f610041de)) +* TT-402 put v2 time entries ([#347](https://github.com/ioet/time-tracker-backend/issues/347)) ([`48f6411`](https://github.com/ioet/time-tracker-backend/commit/48f641170a968c7f12bc60f7882b0f4eda6cede2)) +* TT-418 CRUD customer v2 ([#359](https://github.com/ioet/time-tracker-backend/issues/359)) ([`d6c4c4d`](https://github.com/ioet/time-tracker-backend/commit/d6c4c4d67e72db867f197af8c7f8147839d6c178)) +* TT-403 delete v2 time entries ([#346](https://github.com/ioet/time-tracker-backend/issues/346)) ([`60a0dc7`](https://github.com/ioet/time-tracker-backend/commit/60a0dc7015f98b24a3429b1ceabf31e722741649)) +* TT-401-Post-v2-time-entries ([#344](https://github.com/ioet/time-tracker-backend/issues/344)) ([`5f107f3`](https://github.com/ioet/time-tracker-backend/commit/5f107f33cb640f7fa8e498db2157efb2d11f401d)) +* TT-399 Config use makefile to executing tests ([#350](https://github.com/ioet/time-tracker-backend/issues/350)) ([`32ee36f`](https://github.com/ioet/time-tracker-backend/commit/32ee36f39e81866c2f0767cf243c61afde6841c9)) + +### Fix +* TT-401 change in activity database instance and refactor test ([#355](https://github.com/ioet/time-tracker-backend/issues/355)) ([`b81319f`](https://github.com/ioet/time-tracker-backend/commit/b81319fe12bff57816dac1d0354000bfc6674c1c)) + +### Documentation +* TT-419 update readme V2 ([#357](https://github.com/ioet/time-tracker-backend/issues/357)) ([`6dd8505`](https://github.com/ioet/time-tracker-backend/commit/6dd85055b666888c7a22ffa1635b2e53903e7942)) +* TT-399 Readme update how to use makefile ([#354](https://github.com/ioet/time-tracker-backend/issues/354)) ([`10cc426`](https://github.com/ioet/time-tracker-backend/commit/10cc4269e4e60c6eff77bf1cf02cdf0d31dac86f)) + ## v0.44.0 (2021-11-15) ### Feature * TT-357 Create V2 Activities Azure DAO ([#334](https://github.com/ioet/time-tracker-backend/issues/334)) ([`3a99add`](https://github.com/ioet/time-tracker-backend/commit/3a99add39a3130c540d86b02c5a69dbda8536e8e)) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index a262ca73..952f957f 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.44.0' +__version__ = '0.45.0' From fd39f660dbd895fcc17d6767ca453bcc2b91ab7b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Rafael=20Arcos=20G=C3=B3mez?= <37599693+ararcos@users.noreply.github.com> Date: Sat, 4 Dec 2021 12:12:22 -0500 Subject: [PATCH 26/33] feat: TT-384 add read file from blob storage 12.1 (#366) --- commons/data_access_layer/file.py | 31 +++++++++++++ commons/data_access_layer/file_stream.py | 27 ----------- requirements/time_tracker_api/dev.txt | 2 +- requirements/time_tracker_api/prod.txt | 2 +- .../data_access_layer/file_stream_test.py | 14 +++--- .../activities/activities_namespace_test.py | 7 ++- .../activities/activities_model.py | 45 +++++++++++++++---- utils/azure_users.py | 7 ++- utils/extend_model.py | 15 ++++++- 9 files changed, 103 insertions(+), 47 deletions(-) create mode 100644 commons/data_access_layer/file.py delete mode 100644 commons/data_access_layer/file_stream.py diff --git a/commons/data_access_layer/file.py b/commons/data_access_layer/file.py new file mode 100644 index 00000000..879970dc --- /dev/null +++ b/commons/data_access_layer/file.py @@ -0,0 +1,31 @@ +import os +from azure.storage.blob import BlobServiceClient +from utils.azure_users import AzureConnection + + +class FileStream(): + CONNECTION_STRING = AzureConnection().get_blob_storage_connection_string() + container_name: str + + def __init__(self, container_name: str): + """ + Initialize the FileStream object. which is used to get the file stream from Azure Blob Storage. + `container_name`: The name of the Azure Storage container. + """ + self.container_name = container_name + + def get_file_stream(self, file_name: str): + if self.CONNECTION_STRING is None: + print("No connection string") + return None + + try: + account = BlobServiceClient.from_connection_string( + self.CONNECTION_STRING) + value = account.get_blob_client(self.container_name, file_name) + file = value.download_blob().readall() + print("Connection string is valid") + return file + except Exception as e: + print(f'Error: {e}') + return None diff --git a/commons/data_access_layer/file_stream.py b/commons/data_access_layer/file_stream.py deleted file mode 100644 index a705c061..00000000 --- a/commons/data_access_layer/file_stream.py +++ /dev/null @@ -1,27 +0,0 @@ -import os -from azure.storage.blob.blockblobservice import BlockBlobService - -ACCOUNT_KEY = os.environ.get('AZURE_STORAGE_ACCOUNT_KEY') - -class FileStream: - def __init__(self, account_name:str, container_name:str): - """ - Initialize the FileStream object. which is used to get the file stream from Azure Blob Storage. - `account_name`: The name of the Azure Storage account. - `container_name`: The name of the Azure Storage container. - """ - self.account_name = account_name - self.container_name = container_name - self.blob_service = BlockBlobService(account_name=self.account_name, account_key=ACCOUNT_KEY) - - def get_file_stream(self, filename:str): - import tempfile - try: - local_file = tempfile.NamedTemporaryFile() - self.blob_service.get_blob_to_stream(self.container_name, filename, stream=local_file) - - local_file.seek(0) - return local_file - except Exception as e: - print(e) - return None \ No newline at end of file diff --git a/requirements/time_tracker_api/dev.txt b/requirements/time_tracker_api/dev.txt index b7a6d667..4580007e 100644 --- a/requirements/time_tracker_api/dev.txt +++ b/requirements/time_tracker_api/dev.txt @@ -22,4 +22,4 @@ pyfiglet==0.7 factory_boy==3.2.0 # azure blob storage -azure-storage-blob==2.1.0 \ No newline at end of file +azure-storage-blob==12.1.0 \ No newline at end of file diff --git a/requirements/time_tracker_api/prod.txt b/requirements/time_tracker_api/prod.txt index dd6df0df..2bfaea68 100644 --- a/requirements/time_tracker_api/prod.txt +++ b/requirements/time_tracker_api/prod.txt @@ -47,4 +47,4 @@ pytz==2019.3 python-dateutil==2.8.1 # azure blob storage -azure-storage-blob==2.1.0 \ No newline at end of file +azure-storage-blob==12.1.0 \ No newline at end of file diff --git a/tests/commons/data_access_layer/file_stream_test.py b/tests/commons/data_access_layer/file_stream_test.py index a3119774..c2a5f5d8 100644 --- a/tests/commons/data_access_layer/file_stream_test.py +++ b/tests/commons/data_access_layer/file_stream_test.py @@ -1,15 +1,17 @@ import json -from commons.data_access_layer.file_stream import FileStream +from commons.data_access_layer.file import FileStream + +fs = FileStream("tt-common-files") -fs = FileStream("storageaccounteystr82c5","tt-common-files") def test__get_file_stream__return_file_content__when_enter_file_name(): result = fs.get_file_stream("activity_test.json") - - assert len(json.load(result)) == 15 + + assert len(json.loads(result)) == 15 + def test__get_file_stream__return_None__when_not_enter_file_name_or_incorrect_name(): result = fs.get_file_stream("") - - assert result == None \ No newline at end of file + + assert result == None diff --git a/tests/time_tracker_api/activities/activities_namespace_test.py b/tests/time_tracker_api/activities/activities_namespace_test.py index 86e34691..17efe406 100644 --- a/tests/time_tracker_api/activities/activities_namespace_test.py +++ b/tests/time_tracker_api/activities/activities_namespace_test.py @@ -19,6 +19,7 @@ fake_activity = ({"id": fake.random_int(1, 9999)}).update(valid_activity_data) + def test__get_all_activities__return_response__when_send_activities_get_request( client: FlaskClient, valid_header: dict ): @@ -28,6 +29,7 @@ def test__get_all_activities__return_response__when_send_activities_get_request( assert HTTPStatus.OK == response.status_code + def test_create_activity_should_succeed_with_valid_request( client: FlaskClient, mocker: MockFixture, valid_header: dict ): @@ -64,6 +66,7 @@ def test_create_activity_should_reject_bad_request( assert HTTPStatus.BAD_REQUEST == response.status_code repository_create_mock.assert_not_called() + @pytest.mark.skip(reason="There is currently no way to test this. Getting the value of the azure blob storage") def test_list_all_active( client: FlaskClient, mocker: MockFixture, valid_header: dict @@ -90,6 +93,7 @@ def test_list_all_active( max_count=ANY, ) + @pytest.mark.skip(reason="There is currently no way to test this. Getting the value of the azure blob storage") def test_list_all_active_activities( client: FlaskClient, mocker: MockFixture, valid_header: dict @@ -118,7 +122,7 @@ def test_list_all_active_activities( max_count=ANY, ) - +@pytest.mark.skip(reason="There is currently no way to test this. Getting the value of the azure blob storage") def test_get_activity_should_succeed_with_valid_id( client: FlaskClient, mocker: MockFixture, valid_header: dict ): @@ -141,6 +145,7 @@ def test_get_activity_should_succeed_with_valid_id( repository_find_mock.assert_called_once_with(str(valid_id), ANY) +@pytest.mark.skip(reason="There is currently no way to test this. Getting the value of the azure blob storage") def test_get_activity_should_return_not_found_with_invalid_id( client: FlaskClient, mocker: MockFixture, valid_header: dict ): diff --git a/time_tracker_api/activities/activities_model.py b/time_tracker_api/activities/activities_model.py index 158c8053..0810521c 100644 --- a/time_tracker_api/activities/activities_model.py +++ b/time_tracker_api/activities/activities_model.py @@ -7,13 +7,15 @@ CosmosDBModel, CosmosDBDao, CosmosDBRepository, + CustomError, ) from time_tracker_api.database import CRUDDao, APICosmosDBDao from typing import List, Callable from commons.data_access_layer.database import EventContext from utils.enums.status import Status from utils.query_builder import CosmosDBQueryBuilder -from commons.data_access_layer.file_stream import FileStream +from commons.data_access_layer.file import FileStream + class ActivityDao(CRUDDao): pass @@ -118,16 +120,27 @@ def find_all_from_blob_storage( self, event_context: EventContext, mapper: Callable = None, + activity_id: str = None, file_name: str = "activity.json", - ): + ): tenant_id_value = self.find_partition_key_value(event_context) function_mapper = self.get_mapper_or_dict(mapper) if tenant_id_value is None: - return [] - - fs = FileStream("storageaccounteystr82c5","tt-common-files") + return [{"result": "error", "message": "tenant_id is None"}] + + fs = FileStream("tt-common-files") result = fs.get_file_stream(file_name) - return list(map(function_mapper, json.load(result))) if result is not None else [] + result_json = list(map(function_mapper, json.loads( + result))) if result is not None else [] + if activity_id is not None: + result_json = [ + activity + for activity in result_json + if activity.id == activity_id + ] + + return result_json + class ActivityCosmosDBDao(APICosmosDBDao, ActivityDao): def __init__(self, repository): @@ -143,7 +156,7 @@ def get_all_with_id_in_list( activity_ids, ) - def get_all( + def get_all_v1( self, conditions: dict = None, activities_id: List = None, @@ -162,11 +175,25 @@ def get_all( ) return activities - def get_all_test(self, conditions: dict = None) -> list: + def get_all(self, **kwargs) -> list: event_ctx = self.create_event_context("read-many") - activities = self.repository.find_all_from_blob_storage(event_context=event_ctx) + activities = self.repository.find_all_from_blob_storage( + event_context=event_ctx + ) return activities + def get(self, id: str = None) -> list: + event_ctx = self.create_event_context("read-many") + activities = self.repository.find_all_from_blob_storage( + event_context=event_ctx, + activity_id=id + ) + + if len(activities) > 0: + return activities[0] + else: + raise CustomError(404, "It was not found") + def create(self, activity_payload: dict): event_ctx = self.create_event_context('create') activity_payload['status'] = Status.ACTIVE.value diff --git a/utils/azure_users.py b/utils/azure_users.py index 45a1a0f3..e38507ee 100644 --- a/utils/azure_users.py +++ b/utils/azure_users.py @@ -13,7 +13,8 @@ class MSConfig: 'MS_SECRET', 'MS_SCOPE', 'MS_ENDPOINT', - 'USERID' + 'USERID', + 'AZURE_STORAGE_CONNECTION_STRING' ] check_variables_are_defined(ms_variables) @@ -24,6 +25,7 @@ class MSConfig: SCOPE = os.environ.get('MS_SCOPE') ENDPOINT = os.environ.get('MS_ENDPOINT') USERID = os.environ.get('USERID') + AZURE_STORAGE_CONNECTION_STRING = os.environ.get('AZURE_STORAGE_CONNECTION_STRING') class BearerAuth(requests.auth.AuthBase): @@ -67,6 +69,9 @@ def __init__(self, config=MSConfig): self.client = self.get_msal_client() self.access_token = self.get_token() self.groups_and_users = None + + def get_blob_storage_connection_string(self) -> str: + return self.config.AZURE_STORAGE_CONNECTION_STRING def get_msal_client(self): client = msal.ConfidentialClientApplication( diff --git a/utils/extend_model.py b/utils/extend_model.py index ce39d5b7..9040895f 100644 --- a/utils/extend_model.py +++ b/utils/extend_model.py @@ -96,7 +96,7 @@ def add_project_info_to_time_entries(time_entries, projects): setattr(time_entry, 'customer_name', project.customer_name) -def add_activity_name_to_time_entries(time_entries, activities): +def add_activity_name_to_time_entries_v1(time_entries, activities): for time_entry in time_entries: for activity in activities: if time_entry.activity_id == activity.id: @@ -107,6 +107,19 @@ def add_activity_name_to_time_entries(time_entries, activities): ) setattr(time_entry, 'activity_name', name) +def add_activity_name_to_time_entries(time_entries, activities): + for time_entry in time_entries: + result = [x for x in activities if time_entry.activity_id == x.id] + if result: + name = ( + result[0].name + " (archived)" + if result[0].is_deleted() + else result[0].name + ) + setattr(time_entry, 'activity_name', name) + else: + setattr(time_entry, 'activity_name', "activity") + def add_user_email_to_time_entries(time_entries, users): for time_entry in time_entries: From d33ffe888a4134e25b26afc28d93ac8ca9448717 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Sat, 4 Dec 2021 17:53:58 +0000 Subject: [PATCH 27/33] 0.46.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 4 ++++ time_tracker_api/version.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c1b05983..6cc5c526 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ +## v0.46.0 (2021-12-04) +### Feature +* TT-384 add read file from blob storage 12.1 ([#366](https://github.com/ioet/time-tracker-backend/issues/366)) ([`fd39f66`](https://github.com/ioet/time-tracker-backend/commit/fd39f660dbd895fcc17d6767ca453bcc2b91ab7b)) + ## v0.45.0 (2021-12-04) ### Feature * TT-414 get latest projects ([#363](https://github.com/ioet/time-tracker-backend/issues/363)) ([`aedf3d2`](https://github.com/ioet/time-tracker-backend/commit/aedf3d24d1cae9f40dcfb61196c619c15a1ac35c)) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index 952f957f..50fa61e7 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.45.0' +__version__ = '0.46.0' From 628da5c165c434bfc2a47ffc00222710cdc379b3 Mon Sep 17 00:00:00 2001 From: Sandro Castillo Date: Thu, 20 Jan 2022 05:56:16 -0500 Subject: [PATCH 28/33] fix: TT-507 Error in time-entries list for different time zone (#367) --- utils/time.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/utils/time.py b/utils/time.py index 37082d76..aa82dab3 100644 --- a/utils/time.py +++ b/utils/time.py @@ -60,10 +60,10 @@ def to_utc(date: datetime) -> datetime: localized = _tz.localize(date) return localized - from dateutil.parser import isoparse + from dateutil import parser - no_timezone_info = isoparse(value).tzinfo is None + no_timezone_info = parser.parse(value).tzinfo is None if no_timezone_info: - return to_utc(isoparse(value)) + return to_utc(parser.parse(value)) else: - return isoparse(value) + return parser.parse(value) From eb5256f0eb1ab843f55afa71838f2a3d8eeb7999 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Thu, 20 Jan 2022 11:14:19 +0000 Subject: [PATCH 29/33] 0.46.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 4 ++++ time_tracker_api/version.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6cc5c526..0141b02d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ +## v0.46.1 (2022-01-20) +### Fix +* TT-507 Error in time-entries list for different time zone ([#367](https://github.com/ioet/time-tracker-backend/issues/367)) ([`628da5c`](https://github.com/ioet/time-tracker-backend/commit/628da5c165c434bfc2a47ffc00222710cdc379b3)) + ## v0.46.0 (2021-12-04) ### Feature * TT-384 add read file from blob storage 12.1 ([#366](https://github.com/ioet/time-tracker-backend/issues/366)) ([`fd39f66`](https://github.com/ioet/time-tracker-backend/commit/fd39f660dbd895fcc17d6767ca453bcc2b91ab7b)) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index 50fa61e7..dd8e65f1 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.46.0' +__version__ = '0.46.1' From 957227eea388bbf9e46f9c37427f3ed9052c64e1 Mon Sep 17 00:00:00 2001 From: Carlos Carvajal <56209390+cxcarvaj@users.noreply.github.com> Date: Wed, 9 Mar 2022 10:12:04 -0500 Subject: [PATCH 30/33] Fix: All ioet users are returned from AD (#369) * Fix: All ioet users are returned from AD * Fix: tests workflows on PR * feat: TT-551 Applying some changes in variable name --- .../time-tracker-v1-on-pull-request-workflow.yml | 2 +- requirements/time_tracker_api/prod.txt | 2 ++ tests/commons/data_access_layer/file_stream_test.py | 3 ++- tests/utils/azure_users_test.py | 2 +- utils/azure_users.py | 8 +++++--- 5 files changed, 11 insertions(+), 6 deletions(-) diff --git a/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml b/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml index 0610385c..323b8615 100644 --- a/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml +++ b/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml @@ -53,7 +53,7 @@ jobs: AZURE_STORAGE_ACCOUNT_KEY: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-ACCOUNT-KEY }} AZURE_STORAGE_CONNECTION_STRING: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-CONNECTION-STRING }} run: | - pytest tests + pytest -v - name: Test the build of the app run: | docker build . diff --git a/requirements/time_tracker_api/prod.txt b/requirements/time_tracker_api/prod.txt index 2bfaea68..48bf85a7 100644 --- a/requirements/time_tracker_api/prod.txt +++ b/requirements/time_tracker_api/prod.txt @@ -15,6 +15,8 @@ flake8==3.7.9 WSGIserver==1.3 Werkzeug==0.16.1 Jinja2==2.11.3 +markupsafe==2.0.1 +itsdangerous==2.0.1 #WSGI server gunicorn==20.0.4 diff --git a/tests/commons/data_access_layer/file_stream_test.py b/tests/commons/data_access_layer/file_stream_test.py index c2a5f5d8..a8b4c137 100644 --- a/tests/commons/data_access_layer/file_stream_test.py +++ b/tests/commons/data_access_layer/file_stream_test.py @@ -1,10 +1,11 @@ import json +import pytest from commons.data_access_layer.file import FileStream fs = FileStream("tt-common-files") - +@pytest.mark.skip(reason='file not in the repository') def test__get_file_stream__return_file_content__when_enter_file_name(): result = fs.get_file_stream("activity_test.json") diff --git a/tests/utils/azure_users_test.py b/tests/utils/azure_users_test.py index 22bd8965..90300bd3 100644 --- a/tests/utils/azure_users_test.py +++ b/tests/utils/azure_users_test.py @@ -264,4 +264,4 @@ def test_users_functions_should_returns_all_users( users = AzureConnection().users() - assert len(users) == 0 + assert len(users) == 2 diff --git a/utils/azure_users.py b/utils/azure_users.py index e38507ee..0cf85c96 100644 --- a/utils/azure_users.py +++ b/utils/azure_users.py @@ -133,9 +133,11 @@ def users(self) -> List[AzureUser]: )[1] endpoint = endpoint + skip_token_attribute + request_token - for i in range(len(users)): - if users[i]['mail'] is None: - valid_users.append(users[i]) + for user in users: + user_emails = user['otherMails'] + email_domain = user_emails[0].split('@')[1] + if(len(user_emails) != 0 and email_domain == 'ioet.com'): + valid_users.append(user) return [self.to_azure_user(user) for user in valid_users] From 6abcf1db2dbd7e30d2d1e1b9ca9567261146a2f3 Mon Sep 17 00:00:00 2001 From: Carlos Carvajal <56209390+cxcarvaj@users.noreply.github.com> Date: Wed, 9 Mar 2022 14:23:12 -0500 Subject: [PATCH 31/33] Hot fix TT-551 all users are returned from AD (#372) * Revert changes in user * TT-551 hot fix --- utils/azure_users.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/utils/azure_users.py b/utils/azure_users.py index 0cf85c96..297b43f5 100644 --- a/utils/azure_users.py +++ b/utils/azure_users.py @@ -135,8 +135,7 @@ def users(self) -> List[AzureUser]: for user in users: user_emails = user['otherMails'] - email_domain = user_emails[0].split('@')[1] - if(len(user_emails) != 0 and email_domain == 'ioet.com'): + if(len(user_emails) != 0 and user_emails[0].split('@')[1] == 'ioet.com'): valid_users.append(user) return [self.to_azure_user(user) for user in valid_users] From 446c636c87314488465064e6fe55582b2c632cc4 Mon Sep 17 00:00:00 2001 From: alejandra-ponce <101274839+alejandra-ponce@users.noreply.github.com> Date: Mon, 21 Mar 2022 17:56:41 -0500 Subject: [PATCH 32/33] fix: TT-583 unable to set admin role to users (#373) --- tests/utils/azure_users_test.py | 28 ++++++++++++++-------------- utils/azure_users.py | 19 +++++++++++++++---- 2 files changed, 29 insertions(+), 18 deletions(-) diff --git a/tests/utils/azure_users_test.py b/tests/utils/azure_users_test.py index 90300bd3..8a888ac8 100644 --- a/tests/utils/azure_users_test.py +++ b/tests/utils/azure_users_test.py @@ -17,10 +17,10 @@ ], ) def test_azure_connection_is_test_user( - get_mock, - field_name, - field_value, - is_test_user_expected_value, + get_mock, + field_name, + field_value, + is_test_user_expected_value, ): response_mock = Mock() response_mock.status_code = 200 @@ -58,7 +58,7 @@ def test_azure_connection_get_test_user_ids(get_mock): @patch('utils.azure_users.AzureConnection.get_test_user_ids') @patch('utils.azure_users.AzureConnection.users') def test_azure_connection_get_non_test_users( - users_mock, get_test_user_ids_mock + users_mock, get_test_user_ids_mock ): test_user = AzureUser('ID1', None, None, [], []) non_test_user = AzureUser('ID2', None, None, [], []) @@ -81,7 +81,7 @@ def test_azure_connection_get_group_id_by_group_name(get_mock): group_id = 'ID1' azure_connection = AzureConnection() assert ( - azure_connection.get_group_id_by_group_name('group_name') == group_id + azure_connection.get_group_id_by_group_name('group_name') == group_id ) @@ -91,7 +91,7 @@ def test_azure_connection_get_group_id_by_group_name(get_mock): @patch('requests.post') @mark.parametrize('expected_value', [True, False]) def test_is_user_in_group( - post_mock, get_group_id_by_group_name_mock, expected_value + post_mock, get_group_id_by_group_name_mock, expected_value ): response_expected = {'value': expected_value} response_mock = Mock() @@ -104,8 +104,8 @@ def test_is_user_in_group( azure_connection = AzureConnection() assert ( - azure_connection.is_user_in_group('user_id', payload_mock) - == response_expected + azure_connection.is_user_in_group('user_id', payload_mock) + == response_expected ) @@ -164,7 +164,7 @@ def test_get_groups_and_users(get_mock): ], ) def test_get_groups_by_user_id( - get_groups_and_users_mock, user_id, groups_expected_value + get_groups_and_users_mock, user_id, groups_expected_value ): get_groups_and_users_mock.return_value = [ ('test-group-1', ['user-id1', 'user-id2']), @@ -180,7 +180,7 @@ def test_get_groups_by_user_id( @patch('utils.azure_users.AzureConnection.get_token', Mock()) @patch('utils.azure_users.AzureConnection.get_groups_and_users') def test_get_groups_and_users_called_once_by_instance( - get_groups_and_users_mock, + get_groups_and_users_mock, ): get_groups_and_users_mock.return_value = [] user_id = 'user-id1' @@ -198,7 +198,7 @@ def test_get_groups_and_users_called_once_by_instance( @patch('utils.azure_users.AzureConnection.get_group_id_by_group_name') @patch('requests.post') def test_add_user_to_group( - post_mock, get_group_id_by_group_name_mock, get_user_mock + post_mock, get_group_id_by_group_name_mock, get_user_mock ): get_group_id_by_group_name_mock.return_value = 'dummy_group' test_user = AzureUser('ID1', None, None, [], []) @@ -224,7 +224,7 @@ def test_add_user_to_group( @patch('utils.azure_users.AzureConnection.get_group_id_by_group_name') @patch('requests.delete') def test_remove_user_from_group( - delete_mock, get_group_id_by_group_name_mock, get_user_mock + delete_mock, get_group_id_by_group_name_mock, get_user_mock ): get_group_id_by_group_name_mock.return_value = 'dummy_group' test_user = AzureUser('ID1', None, None, [], []) @@ -247,7 +247,7 @@ def test_remove_user_from_group( @patch('utils.azure_users.AzureConnection.get_groups_and_users') @patch('requests.get') def test_users_functions_should_returns_all_users( - get_mock, get_groups_and_users_mock + get_mock, get_groups_and_users_mock ): first_response = Response() first_response.status_code = 200 diff --git a/utils/azure_users.py b/utils/azure_users.py index 297b43f5..5c97bec7 100644 --- a/utils/azure_users.py +++ b/utils/azure_users.py @@ -69,7 +69,7 @@ def __init__(self, config=MSConfig): self.client = self.get_msal_client() self.access_token = self.get_token() self.groups_and_users = None - + def get_blob_storage_connection_string(self) -> str: return self.config.AZURE_STORAGE_CONNECTION_STRING @@ -187,7 +187,15 @@ def add_user_to_group(self, user_id, group_name): headers=HTTP_PATCH_HEADERS, ) assert 204 == response.status_code - + if self.groups_and_users is None: + self.groups_and_users = [(group_name, [user_id])] + elif group_name not in [gn for (gn, ul) in self.groups_and_users]: + self.groups_and_users.append((group_name, [user_id])) + else: + for (cache_group_name, user_ids) in self.groups_and_users: + if group_name == cache_group_name: + if user_id not in user_ids: + user_ids.append(user_id) return self.get_user(user_id) def remove_user_from_group(self, user_id, group_name): @@ -201,7 +209,11 @@ def remove_user_from_group(self, user_id, group_name): headers=HTTP_PATCH_HEADERS, ) assert 204 == response.status_code - + if self.groups_and_users is not None: + for (cache_group_name, user_ids) in self.groups_and_users: + if group_name == cache_group_name: + if user_id in user_ids: + user_ids.remove(user_id) return self.get_user(user_id) def get_non_test_users(self) -> List[AzureUser]: @@ -271,7 +283,6 @@ def get_groups_and_users(self): result = list(map(parse_item, response.json()['value'])) users_id = self.config.USERID.split(",") result[0][1].extend(users_id) - return result def is_user_in_group(self, user_id, data: dict): From 61678e099a2492cb716454cd2a69c76f030b36bf Mon Sep 17 00:00:00 2001 From: David Cadena Date: Mon, 11 Apr 2022 20:55:21 -0500 Subject: [PATCH 33/33] hotfix: created a variable that contains the original endpoint (#375) --- utils/azure_users.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/utils/azure_users.py b/utils/azure_users.py index 5c97bec7..84f590a4 100644 --- a/utils/azure_users.py +++ b/utils/azure_users.py @@ -108,6 +108,7 @@ def users(self) -> List[AzureUser]: role_fields_params=role_fields_params, ) + final_endpoint = endpoint exists_users = True users = [] valid_users = [] @@ -115,8 +116,8 @@ def users(self) -> List[AzureUser]: while exists_users: response = requests.get( - endpoint, auth=BearerAuth(self.access_token) - ) + final_endpoint, auth=BearerAuth(self.access_token) + ) json_response = response.json() assert 200 == response.status_code assert 'value' in json_response @@ -131,8 +132,8 @@ def users(self) -> List[AzureUser]: request_token = remaining_users_link.split( skip_token_attribute )[1] - endpoint = endpoint + skip_token_attribute + request_token - + final_endpoint = endpoint + skip_token_attribute + request_token + for user in users: user_emails = user['otherMails'] if(len(user_emails) != 0 and user_emails[0].split('@')[1] == 'ioet.com'):