From 4dfbe7c2ae60982e1ce03d8205fd7d623a894c0c Mon Sep 17 00:00:00 2001 From: PaulRC-ioet Date: Fri, 30 Oct 2020 18:13:41 -0500 Subject: [PATCH 1/6] feat: Create end point last entries #215 --- .../time_entries_namespace_test.py | 20 ++++++ .../time_entries/time_entries_model.py | 70 +++++++++++++++++-- .../time_entries/time_entries_namespace.py | 12 ++++ 3 files changed, 95 insertions(+), 7 deletions(-) diff --git a/tests/time_tracker_api/time_entries/time_entries_namespace_test.py b/tests/time_tracker_api/time_entries/time_entries_namespace_test.py index 0d5c19ef..06f1e545 100644 --- a/tests/time_tracker_api/time_entries/time_entries_namespace_test.py +++ b/tests/time_tracker_api/time_entries/time_entries_namespace_test.py @@ -158,6 +158,26 @@ def test_list_all_time_entries( dao_get_all_mock.assert_called_once() +def test_list_last_time_entries( + client: FlaskClient, mocker: MockFixture, valid_header: dict +): + from time_tracker_api.time_entries.time_entries_namespace import ( + time_entries_dao, + ) + + dao_get_all_mock = mocker.patch.object( + time_entries_dao, 'get_last_projects_worked', return_value=[] + ) + + response = client.get( + "/time-entries/latest", headers=valid_header, follow_redirects=True + ) + + assert HTTPStatus.OK == response.status_code + assert [] == json.loads(response.data) + dao_get_all_mock.assert_called_once() + + def test_get_time_entry_should_succeed_with_valid_id( client: FlaskClient, mocker: MockFixture, valid_header: dict ): diff --git a/time_tracker_api/time_entries/time_entries_model.py b/time_tracker_api/time_entries/time_entries_model.py index 5abb71d1..9214d977 100644 --- a/time_tracker_api/time_entries/time_entries_model.py +++ b/time_tracker_api/time_entries/time_entries_model.py @@ -164,6 +164,8 @@ def find_all_entries( conditions=conditions, custom_sql_conditions=custom_sql_conditions, custom_params=custom_params, + max_count=kwargs.get("max_count", None), + offset=kwargs.get("offset", 0), ) return time_entries @@ -173,6 +175,7 @@ def count( conditions: dict = None, custom_sql_conditions: List[str] = None, date_range: dict = None, + **kwargs, ): conditions = conditions if conditions else {} custom_sql_conditions = ( @@ -424,7 +427,8 @@ def get_all(self, conditions: dict = None, **kwargs) -> list: conditions.update({"owner_id": event_ctx.user_id}) custom_query = self.build_custom_query( - is_admin=event_ctx.is_admin, conditions=conditions, + is_admin=event_ctx.is_admin, + conditions=conditions, ) date_range = self.handle_date_filter_args(args=conditions) limit = conditions.get("limit", None) @@ -437,6 +441,47 @@ def get_all(self, conditions: dict = None, **kwargs) -> list: max_count=limit, ) + def get_last_projects_worked( + self, conditions: dict = None, **kwargs + ) -> list: + event_ctx = self.create_event_context("read-many") + conditions.update({"owner_id": event_ctx.user_id}) + custom_query = self.build_custom_query( + is_admin=event_ctx.is_admin, + conditions=conditions, + ) + date_range = self.handle_date_filter_args(args=conditions) + + project_dao = projects_model.create_dao() + projects = project_dao.get_all() + projects_ids = [project.id for project in projects] + + activity_dao = activities_model.create_dao() + activities = activity_dao.get_all( + visible_only=False, + ) + + result = [] + for id_project in projects_ids: + conditions.update({"project_id": id_project}) + + limit = 2 + latest = self.repository.find_all_entries( + event_ctx, + conditions=conditions, + custom_sql_conditions=custom_query, + date_range=date_range, + max_count=limit, + ) + + if len(latest) >= 1: + result.append(latest[0]) + + add_activity_name_to_time_entries(result, activities) + add_project_info_to_time_entries(result, projects) + + return result + def get_all_paginated(self, conditions: dict = None, **kwargs) -> list: get_all_conditions = dict(conditions) get_all_conditions.pop("length") @@ -444,7 +489,8 @@ def get_all_paginated(self, conditions: dict = None, **kwargs) -> list: event_ctx = self.create_event_context("read-many") get_all_conditions.update({"owner_id": event_ctx.user_id}) custom_query = self.build_custom_query( - is_admin=event_ctx.is_admin, conditions=get_all_conditions, + is_admin=event_ctx.is_admin, + conditions=get_all_conditions, ) date_range = self.handle_date_filter_args(args=get_all_conditions) records_total = self.repository.count( @@ -455,7 +501,8 @@ def get_all_paginated(self, conditions: dict = None, **kwargs) -> list: ) conditions.update({"owner_id": event_ctx.user_id}) custom_query = self.build_custom_query( - is_admin=event_ctx.is_admin, conditions=conditions, + is_admin=event_ctx.is_admin, + conditions=conditions, ) date_range = self.handle_date_filter_args(args=conditions) length = conditions.get("length", None) @@ -499,7 +546,11 @@ def update(self, id, data: dict, description=None): time_entry = self.repository.find(id, event_ctx) self.check_whether_current_user_owns_item(time_entry) - return self.repository.partial_update(id, data, event_ctx,) + return self.repository.partial_update( + id, + data, + event_ctx, + ) def stop(self, id): event_ctx = self.create_event_context("update", "Stop time entry") @@ -509,7 +560,9 @@ def stop(self, id): self.check_time_entry_is_not_stopped(time_entry) return self.repository.partial_update( - id, {'end_date': current_datetime_str()}, event_ctx, + id, + {'end_date': current_datetime_str()}, + event_ctx, ) def restart(self, id): @@ -520,7 +573,9 @@ def restart(self, id): self.check_time_entry_is_not_started(time_entry) return self.repository.partial_update( - id, {'end_date': None}, event_ctx, + id, + {'end_date': None}, + event_ctx, ) def delete(self, id): @@ -528,7 +583,8 @@ def delete(self, id): time_entry = self.repository.find(id, event_ctx) self.check_whether_current_user_owns_item(time_entry) self.repository.delete( - id, event_ctx, + id, + event_ctx, ) def find_running(self): diff --git a/time_tracker_api/time_entries/time_entries_namespace.py b/time_tracker_api/time_entries/time_entries_namespace.py index da614e03..7d598183 100644 --- a/time_tracker_api/time_entries/time_entries_namespace.py +++ b/time_tracker_api/time_entries/time_entries_namespace.py @@ -256,6 +256,18 @@ def post(self): return time_entries_dao.create(ns.payload), HTTPStatus.CREATED +@ns.route('/latest') +class TimeEntries(Resource): + @ns.doc('list_latest_time_entries') + @ns.expect(attributes_filter) + @ns.marshal_list_with(time_entry) + @ns.response(HTTPStatus.NOT_FOUND, 'Time entry not found') + def get(self): + """List the latest time entries""" + conditions = attributes_filter.parse_args() + return time_entries_dao.get_last_projects_worked(conditions=conditions) + + @ns.route('/') @ns.response(HTTPStatus.NOT_FOUND, 'This time entry does not exist') @ns.response(HTTPStatus.UNPROCESSABLE_ENTITY, 'The id has an invalid format') From 9c5479aa2c07b753c892c5cb8468b5cb1a7f4768 Mon Sep 17 00:00:00 2001 From: PaulRC-ioet Date: Fri, 30 Oct 2020 18:13:41 -0500 Subject: [PATCH 2/6] feat: Create end point last entries #215 --- time_tracker_api/time_entries/time_entries_model.py | 1 + 1 file changed, 1 insertion(+) diff --git a/time_tracker_api/time_entries/time_entries_model.py b/time_tracker_api/time_entries/time_entries_model.py index 9214d977..7fda9678 100644 --- a/time_tracker_api/time_entries/time_entries_model.py +++ b/time_tracker_api/time_entries/time_entries_model.py @@ -146,6 +146,7 @@ def find_all_entries( conditions: dict = None, custom_sql_conditions: List[str] = None, date_range: dict = None, + **kwargs, ): conditions = conditions if conditions else {} custom_sql_conditions = ( From 77e7d539044b2adfa66733dd4afb89454305eac6 Mon Sep 17 00:00:00 2001 From: Jose Puebla Date: Thu, 12 Nov 2020 16:26:23 -0500 Subject: [PATCH 3/6] feat: #215 Return Latest Entries and refactor Time Entries --- tests/conftest.py | 8 +- .../time_entries/time_entries_model_test.py | 24 +- .../time_entries_namespace_test.py | 191 +++--- .../time_entries/time_entries_dao.py | 313 ++++++++++ .../time_entries/time_entries_model.py | 542 ------------------ .../time_entries/time_entries_namespace.py | 15 +- .../time_entries/time_entries_repository.py | 297 ++++++++++ 7 files changed, 736 insertions(+), 654 deletions(-) create mode 100644 time_tracker_api/time_entries/time_entries_dao.py create mode 100644 time_tracker_api/time_entries/time_entries_repository.py diff --git a/tests/conftest.py b/tests/conftest.py index 5cb5c18d..3c55d3e1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -11,7 +11,7 @@ from time_tracker_api import create_app from time_tracker_api.database import init_sql from time_tracker_api.security import get_or_generate_dev_secret_key -from time_tracker_api.time_entries.time_entries_model import ( +from time_tracker_api.time_entries.time_entries_repository import ( TimeEntryCosmosDBRepository, ) @@ -84,7 +84,11 @@ def cosmos_db_model(): return { 'id': 'test', 'partition_key': PartitionKey(path='/tenant_id'), - 'unique_key_policy': {'uniqueKeys': [{'paths': ['/email']},]}, + 'unique_key_policy': { + 'uniqueKeys': [ + {'paths': ['/email']}, + ] + }, } diff --git a/tests/time_tracker_api/time_entries/time_entries_model_test.py b/tests/time_tracker_api/time_entries/time_entries_model_test.py index 002e8ccf..3883113c 100644 --- a/tests/time_tracker_api/time_entries/time_entries_model_test.py +++ b/tests/time_tracker_api/time_entries/time_entries_model_test.py @@ -3,9 +3,11 @@ from commons.data_access_layer.database import EventContext from time_tracker_api.time_entries.time_entries_model import ( - TimeEntryCosmosDBRepository, TimeEntryCosmosDBModel, ) +from time_tracker_api.time_entries.time_entries_repository import ( + TimeEntryCosmosDBRepository, +) def create_time_entry( @@ -174,16 +176,20 @@ def test_find_interception_should_ignore_id_of_existing_item( ) try: - colliding_result = time_entry_repository.find_interception_with_date_range( - start_date, end_date, owner_id, tenant_id + colliding_result = ( + time_entry_repository.find_interception_with_date_range( + start_date, end_date, owner_id, tenant_id + ) ) - non_colliding_result = time_entry_repository.find_interception_with_date_range( - start_date, - end_date, - owner_id, - tenant_id, - ignore_id=existing_item.id, + non_colliding_result = ( + time_entry_repository.find_interception_with_date_range( + start_date, + end_date, + owner_id, + tenant_id, + ignore_id=existing_item.id, + ) ) assert colliding_result is not None diff --git a/tests/time_tracker_api/time_entries/time_entries_namespace_test.py b/tests/time_tracker_api/time_entries/time_entries_namespace_test.py index 06f1e545..429f594a 100644 --- a/tests/time_tracker_api/time_entries/time_entries_namespace_test.py +++ b/tests/time_tracker_api/time_entries/time_entries_namespace_test.py @@ -39,12 +39,11 @@ def test_create_time_entry_with_invalid_date_range_should_raise_bad_request( - client: FlaskClient, mocker: MockFixture, valid_header: dict + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - repository_container_create_item_mock = mocker.patch.object( time_entries_dao.repository.container, 'create_item', @@ -65,12 +64,11 @@ def test_create_time_entry_with_invalid_date_range_should_raise_bad_request( def test_create_time_entry_with_end_date_in_future_should_raise_bad_request( - client: FlaskClient, mocker: MockFixture, valid_header: dict + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - repository_container_create_item_mock = mocker.patch.object( time_entries_dao.repository.container, 'create_item', @@ -92,12 +90,11 @@ def test_create_time_entry_with_end_date_in_future_should_raise_bad_request( def test_create_time_entry_should_succeed_with_valid_request( - client: FlaskClient, mocker: MockFixture, valid_header: dict + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - repository_create_mock = mocker.patch.object( time_entries_dao.repository, 'create', return_value=fake_time_entry ) @@ -114,12 +111,11 @@ def test_create_time_entry_should_succeed_with_valid_request( def test_create_time_entry_with_missing_req_field_should_return_bad_request( - client: FlaskClient, mocker: MockFixture, valid_header: dict + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - repository_create_mock = mocker.patch.object( time_entries_dao.repository, 'create', return_value=fake_time_entry ) @@ -139,12 +135,11 @@ def test_create_time_entry_with_missing_req_field_should_return_bad_request( def test_list_all_time_entries( - client: FlaskClient, mocker: MockFixture, valid_header: dict + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - dao_get_all_mock = mocker.patch.object( time_entries_dao, 'get_all', return_value=[] ) @@ -166,7 +161,7 @@ def test_list_last_time_entries( ) dao_get_all_mock = mocker.patch.object( - time_entries_dao, 'get_last_projects_worked', return_value=[] + time_entries_dao, 'get_lastest_entries_by_project', return_value=[] ) response = client.get( @@ -181,10 +176,25 @@ def test_list_last_time_entries( def test_get_time_entry_should_succeed_with_valid_id( client: FlaskClient, mocker: MockFixture, valid_header: dict ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, + dao_get_all_mock = mocker.patch.object( + time_entries_dao, 'get_lastest_entries_by_project', return_value=[] ) + response = client.get( + "/time-entries/latest", headers=valid_header, follow_redirects=True + ) + + assert HTTPStatus.OK == response.status_code + assert [] == json.loads(response.data) + dao_get_all_mock.assert_called_once() + + +def test_get_time_entry_should_succeed_with_valid_id( + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + time_entries_dao, +): dao_get_mock = mocker.patch.object( time_entries_dao, 'get', return_value={} ) @@ -215,11 +225,8 @@ def test_get_time_entry_raise_http_exception( valid_id: str, http_exception: HTTPException, http_status: tuple, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - time_entries_dao.repository.find = Mock(side_effect=http_exception) response = client.get( @@ -233,12 +240,12 @@ def test_get_time_entry_raise_http_exception( def test_update_time_entry_calls_partial_update_with_incoming_payload( - client: FlaskClient, mocker: MockFixture, valid_header: dict, valid_id: str + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + valid_id: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - time_entries_dao.repository.partial_update = Mock(return_value={}) time_entries_dao.repository.find = Mock(return_value={}) @@ -261,12 +268,11 @@ def test_update_time_entry_calls_partial_update_with_incoming_payload( def test_update_time_entry_should_reject_bad_request( - client: FlaskClient, mocker: MockFixture, valid_header: dict + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - invalid_time_entry_data = valid_time_entry_input.copy() invalid_time_entry_data.update( {"project_id": fake.pyint(min_value=1, max_value=100)} @@ -288,11 +294,12 @@ def test_update_time_entry_should_reject_bad_request( def test_update_time_entry_raise_not_found( - client: FlaskClient, mocker: MockFixture, valid_header: dict, valid_id: str + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + valid_id: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) from werkzeug.exceptions import NotFound time_entries_dao.repository.partial_update = Mock(side_effect=NotFound) @@ -317,12 +324,12 @@ def test_update_time_entry_raise_not_found( def test_delete_time_entry_calls_delete( - client: FlaskClient, mocker: MockFixture, valid_header: dict, valid_id: str + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + valid_id: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - time_entries_dao.repository.delete = Mock(return_value=None) time_entries_dao.repository.find = Mock() time_entries_dao.check_whether_current_user_owns_item = Mock() @@ -353,11 +360,8 @@ def test_delete_time_entry_raise_http_exception( valid_id: str, http_exception: HTTPException, http_status: tuple, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - time_entries_dao.repository.delete = Mock(side_effect=http_exception) time_entries_dao.repository.find = Mock() time_entries_dao.check_whether_current_user_owns_item = Mock() @@ -375,12 +379,12 @@ def test_delete_time_entry_raise_http_exception( def test_stop_time_entry_calls_partial_update( - client: FlaskClient, mocker: MockFixture, valid_header: dict, valid_id: str + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + valid_id: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - time_entries_dao.repository.partial_update = Mock(return_value={}) time_entries_dao.repository.find = Mock(return_value={}) @@ -402,11 +406,12 @@ def test_stop_time_entry_calls_partial_update( def test_stop_time_entry_raise_unprocessable_entity( - client: FlaskClient, mocker: MockFixture, valid_header: dict, valid_id: str + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + valid_id: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) from werkzeug.exceptions import UnprocessableEntity time_entries_dao.repository.partial_update = Mock( @@ -431,12 +436,12 @@ def test_stop_time_entry_raise_unprocessable_entity( def test_restart_time_entry_calls_partial_update( - client: FlaskClient, mocker: MockFixture, valid_header: dict, valid_id: str + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + valid_id: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - time_entries_dao.repository.partial_update = Mock(return_value={}) time_entries_dao.repository.find = Mock(return_value={}) @@ -458,11 +463,12 @@ def test_restart_time_entry_calls_partial_update( def test_restart_time_entry_raise_unprocessable_entity( - client: FlaskClient, mocker: MockFixture, valid_header: dict, valid_id: str + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + valid_id: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) from werkzeug.exceptions import UnprocessableEntity time_entries_dao.repository.partial_update = Mock( @@ -493,11 +499,8 @@ def test_get_running_should_call_find_running( valid_header: dict, tenant_id: str, owner_id: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - repository_update_mock = mocker.patch.object( time_entries_dao.repository, 'find_running', @@ -519,11 +522,8 @@ def test_get_running_should_return_not_found_if_StopIteration( valid_header: dict, tenant_id: str, owner_id: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - repository_update_mock = mocker.patch.object( time_entries_dao.repository, 'find_running', side_effect=StopIteration ) @@ -545,11 +545,8 @@ def test_create_with_invalid_uuid_format_should_return_bad_request( mocker: MockFixture, valid_header: dict, invalid_uuid: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - repository_container_create_item_mock = mocker.patch.object( time_entries_dao.repository.container, 'create_item', @@ -576,11 +573,8 @@ def test_create_with_valid_uuid_format_should_return_created( mocker: MockFixture, valid_header: dict, valid_uuid: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - repository_container_create_item_mock = mocker.patch.object( time_entries_dao.repository.container, 'create_item', @@ -614,7 +608,10 @@ def test_create_with_valid_uuid_format_should_return_created( ], ) def test_get_all_passes_date_range_built_from_params_to_find_all( - client: FlaskClient, valid_header: dict, url: str, time_entries_dao + client: FlaskClient, + valid_header: dict, + url: str, + time_entries_dao, ): time_entries_dao.repository.find_all = Mock(return_value=[]) @@ -724,11 +721,8 @@ def test_summary_is_called_with_date_range_from_worked_time_module( mocker: MockFixture, valid_header: dict, owner_id: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - worked_time.date_range = Mock(return_value=worked_time.date_range()) repository_find_all_mock = mocker.patch.object( time_entries_dao.repository, 'find_all_entries', return_value=[] @@ -749,14 +743,16 @@ def test_summary_is_called_with_date_range_from_worked_time_module( def test_paginated_fails_with_no_params( - client: FlaskClient, valid_header: dict, + client: FlaskClient, + valid_header: dict, ): response = client.get('/time-entries/paginated', headers=valid_header) assert HTTPStatus.BAD_REQUEST == response.status_code def test_paginated_succeeds_with_valid_params( - client: FlaskClient, valid_header: dict, + client: FlaskClient, + valid_header: dict, ): response = client.get( '/time-entries/paginated?start_date=2020-09-10T00:00:00-05:00&end_date=2020-09-10T23:59:59-05:00&timezone_offset=300&start=0&length=5', @@ -766,7 +762,8 @@ def test_paginated_succeeds_with_valid_params( def test_paginated_response_contains_expected_props( - client: FlaskClient, valid_header: dict, + client: FlaskClient, + valid_header: dict, ): response = client.get( '/time-entries/paginated?start_date=2020-09-10T00:00:00-05:00&end_date=2020-09-10T23:59:59-05:00&timezone_offset=300&start=0&length=5', diff --git a/time_tracker_api/time_entries/time_entries_dao.py b/time_tracker_api/time_entries/time_entries_dao.py new file mode 100644 index 00000000..e9a418a3 --- /dev/null +++ b/time_tracker_api/time_entries/time_entries_dao.py @@ -0,0 +1,313 @@ +import abc +from commons.data_access_layer.cosmos_db import ( + CosmosDBDao, + # CosmosDBRepository, + CustomError, + # CosmosDBModel, +) +from utils.extend_model import ( + add_project_info_to_time_entries, + add_activity_name_to_time_entries, + # create_in_condition, + create_custom_query_from_str, + add_user_email_to_time_entries, +) +from utils.time import ( + datetime_str, + str_to_datetime, + get_current_year, + get_current_month, + get_date_range_of_month, + current_datetime_str, +) +from flask_restplus import abort +from flask_restplus._http import HTTPStatus +from time_tracker_api.activities import activities_model + +from time_tracker_api.projects import projects_model +from utils import worked_time +from datetime import timedelta +from time_tracker_api.time_entries.time_entries_repository import ( + TimeEntryCosmosDBRepository, +) +from time_tracker_api.database import CRUDDao, APICosmosDBDao + + +class TimeEntriesDao(CRUDDao): + @staticmethod + def current_user_id(): + return current_user_id() + + @abc.abstractmethod + def find_running(self): + pass + + @abc.abstractmethod + def stop(self, id: str): + pass + + @abc.abstractmethod + def restart(self, id: str): + pass + + +class TimeEntriesCosmosDBDao(APICosmosDBDao, TimeEntriesDao): + def __init__(self, repository): + CosmosDBDao.__init__(self, repository) + + def check_whether_current_user_owns_item(self, data): + if ( + data.owner_id is not None + and data.owner_id != self.current_user_id() + ): + raise CustomError( + HTTPStatus.FORBIDDEN, + "The current user is not the owner of this time entry", + ) + + def check_time_entry_is_not_stopped(self, data): + if data.end_date is not None: + raise CustomError( + HTTPStatus.UNPROCESSABLE_ENTITY, + "The specified time entry is already stopped", + ) + + def check_time_entry_is_not_started(self, data): + if data.end_date is None: + raise CustomError( + HTTPStatus.UNPROCESSABLE_ENTITY, + "The specified time entry is already running", + ) + + def build_custom_query(self, is_admin: bool, conditions: dict = None): + custom_query = [] + if "user_id" in conditions: + if is_admin: + conditions.pop("owner_id") + custom_query = ( + [] + if conditions.get("user_id") == "*" + else [ + create_custom_query_from_str( + conditions.get("user_id"), "c.owner_id" + ) + ] + ) + conditions.pop("user_id") + else: + abort( + HTTPStatus.FORBIDDEN, "You don't have enough permissions." + ) + return custom_query + + def get_all(self, conditions: dict = None, **kwargs) -> list: + event_ctx = self.create_event_context("read-many") + conditions.update({"owner_id": event_ctx.user_id}) + + custom_query = self.build_custom_query( + is_admin=event_ctx.is_admin, + conditions=conditions, + ) + date_range = self.handle_date_filter_args(args=conditions) + limit = conditions.get("limit", None) + conditions.pop("limit", None) + return self.repository.find_all( + event_ctx, + conditions=conditions, + custom_sql_conditions=custom_query, + date_range=date_range, + max_count=limit, + ) + + def get_lastest_entries_by_project( + self, conditions: dict = None, **kwargs + ) -> list: + event_ctx = self.create_event_context("read-many") + conditions.update({"owner_id": event_ctx.user_id}) + custom_query = self.build_custom_query( + is_admin=event_ctx.is_admin, + conditions=conditions, + ) + date_range = self.handle_date_filter_args(args=conditions) + + project_dao = projects_model.create_dao() + projects = project_dao.get_all() + projects_ids = [project.id for project in projects] + + activity_dao = activities_model.create_dao() + activities = activity_dao.get_all( + visible_only=False, + ) + + result = [] + for id_project in projects_ids: + conditions.update({"project_id": id_project}) + + limit = 1 + latest = self.repository.find_all_entries( + event_ctx, + conditions=conditions, + custom_sql_conditions=custom_query, + date_range=date_range, + max_count=limit, + ) + + if len(latest) > 0: + result.append(latest[0]) + + add_activity_name_to_time_entries(result, activities) + add_project_info_to_time_entries(result, projects) + + return result + + def get_all_paginated(self, conditions: dict = None, **kwargs) -> list: + get_all_conditions = dict(conditions) + get_all_conditions.pop("length") + get_all_conditions.pop("start") + event_ctx = self.create_event_context("read-many") + get_all_conditions.update({"owner_id": event_ctx.user_id}) + custom_query = self.build_custom_query( + is_admin=event_ctx.is_admin, + conditions=get_all_conditions, + ) + date_range = self.handle_date_filter_args(args=get_all_conditions) + records_total = self.repository.count( + event_ctx, + conditions=get_all_conditions, + custom_sql_conditions=custom_query, + date_range=date_range, + ) + conditions.update({"owner_id": event_ctx.user_id}) + custom_query = self.build_custom_query( + is_admin=event_ctx.is_admin, + conditions=conditions, + ) + date_range = self.handle_date_filter_args(args=conditions) + length = conditions.get("length", None) + conditions.pop("length", None) + start = conditions.get("start", None) + conditions.pop("start", None) + + time_entries = self.repository.find_all( + event_ctx, + conditions=conditions, + custom_sql_conditions=custom_query, + date_range=date_range, + max_count=length, + offset=start, + ) + + return { + 'records_total': records_total, + 'data': time_entries, + } + + def get(self, id): + event_ctx = self.create_event_context("read") + + time_entry = self.repository.find(id, event_ctx) + self.check_whether_current_user_owns_item(time_entry) + + project_dao = projects_model.create_dao() + project = project_dao.get(time_entry.project_id) + setattr(time_entry, 'project_name', project.name) + return time_entry + + def create(self, data: dict): + event_ctx = self.create_event_context("create") + data['owner_id'] = event_ctx.user_id + return self.repository.create(data, event_ctx) + + def update(self, id, data: dict, description=None): + event_ctx = self.create_event_context("update", description) + + time_entry = self.repository.find(id, event_ctx) + self.check_whether_current_user_owns_item(time_entry) + + return self.repository.partial_update( + id, + data, + event_ctx, + ) + + def stop(self, id): + event_ctx = self.create_event_context("update", "Stop time entry") + + time_entry = self.repository.find(id, event_ctx) + self.check_whether_current_user_owns_item(time_entry) + self.check_time_entry_is_not_stopped(time_entry) + + return self.repository.partial_update( + id, + {'end_date': current_datetime_str()}, + event_ctx, + ) + + def restart(self, id): + event_ctx = self.create_event_context("update", "Restart time entry") + + time_entry = self.repository.find(id, event_ctx) + self.check_whether_current_user_owns_item(time_entry) + self.check_time_entry_is_not_started(time_entry) + + return self.repository.partial_update( + id, + {'end_date': None}, + event_ctx, + ) + + def delete(self, id): + event_ctx = self.create_event_context("delete") + time_entry = self.repository.find(id, event_ctx) + self.check_whether_current_user_owns_item(time_entry) + self.repository.delete( + id, + event_ctx, + ) + + def find_running(self): + event_ctx = self.create_event_context("find_running") + time_entry = self.repository.find_running( + event_ctx.tenant_id, event_ctx.user_id + ) + return time_entry + + def get_worked_time(self, args: dict): + event_ctx = self.create_event_context( + "read", "Summary of worked time in the current month" + ) + + conditions = {"owner_id": event_ctx.user_id} + time_entries = self.repository.find_all_entries( + event_ctx, + conditions=conditions, + date_range=worked_time.date_range(), + ) + return worked_time.summary( + time_entries, time_offset=args.get('time_offset') + ) + + @staticmethod + def handle_date_filter_args(args: dict) -> dict: + if "start_date" and "end_date" in args: + start_date = str_to_datetime(args.pop('start_date')) + end_date = str_to_datetime(args.pop('end_date')) + else: + month = int(args.pop("month", get_current_month())) + year = int(args.pop("year", get_current_year())) + start_date, end_date = get_date_range_of_month(year, month) + + offset_in_minutes = int(args.pop('timezone_offset', 300)) + start_date = start_date + timedelta(minutes=offset_in_minutes) + end_date = end_date + timedelta(minutes=offset_in_minutes) + + return { + 'start_date': datetime_str(start_date), + 'end_date': datetime_str(end_date), + } + + +def create_dao() -> TimeEntriesDao: + repository = TimeEntryCosmosDBRepository() + + return TimeEntriesCosmosDBDao(repository) diff --git a/time_tracker_api/time_entries/time_entries_model.py b/time_tracker_api/time_entries/time_entries_model.py index 7fda9678..68cb725e 100644 --- a/time_tracker_api/time_entries/time_entries_model.py +++ b/time_tracker_api/time_entries/time_entries_model.py @@ -40,25 +40,6 @@ from time_tracker_api.database import CRUDDao, APICosmosDBDao from time_tracker_api.security import current_user_id - -class TimeEntriesDao(CRUDDao): - @staticmethod - def current_user_id(): - return current_user_id() - - @abc.abstractmethod - def find_running(self): - pass - - @abc.abstractmethod - def stop(self, id: str): - pass - - @abc.abstractmethod - def restart(self, id: str): - pass - - container_definition = { 'id': 'time_entry', 'partition_key': PartitionKey(path='/tenant_id'), @@ -111,526 +92,3 @@ def __str___(self): return ( "Time Entry started in \"%s\"" % self.start_date ) # pragma: no cover - - -class TimeEntryCosmosDBRepository(CosmosDBRepository): - def __init__(self): - CosmosDBRepository.__init__( - self, - container_id=container_definition['id'], - partition_key_attribute='tenant_id', - order_fields=['start_date DESC'], - mapper=TimeEntryCosmosDBModel, - ) - - @staticmethod - def create_sql_ignore_id_condition(id: str): - if id is None: - return '' - else: - return "AND c.id!=@ignore_id" - - @staticmethod - def create_sql_date_range_filter(date_range: dict) -> str: - if 'start_date' and 'end_date' in date_range: - return """ - ((c.start_date BETWEEN @start_date AND @end_date) OR - (c.end_date BETWEEN @start_date AND @end_date)) - """ - else: - return '' - - def find_all_entries( - self, - event_context: EventContext, - conditions: dict = None, - custom_sql_conditions: List[str] = None, - date_range: dict = None, - **kwargs, - ): - conditions = conditions if conditions else {} - custom_sql_conditions = ( - custom_sql_conditions if custom_sql_conditions else [] - ) - date_range = date_range if date_range else {} - - custom_sql_conditions.append( - self.create_sql_date_range_filter(date_range) - ) - - custom_params = self.generate_params(date_range) - time_entries = CosmosDBRepository.find_all( - self, - event_context=event_context, - conditions=conditions, - custom_sql_conditions=custom_sql_conditions, - custom_params=custom_params, - max_count=kwargs.get("max_count", None), - offset=kwargs.get("offset", 0), - ) - return time_entries - - def count( - self, - event_context: EventContext, - conditions: dict = None, - custom_sql_conditions: List[str] = None, - date_range: dict = None, - **kwargs, - ): - conditions = conditions if conditions else {} - custom_sql_conditions = ( - custom_sql_conditions if custom_sql_conditions else [] - ) - date_range = date_range if date_range else {} - - custom_sql_conditions.append( - self.create_sql_date_range_filter(date_range) - ) - - custom_params = self.generate_params(date_range) - counter = CosmosDBRepository.count( - self, - event_context=event_context, - conditions=conditions, - custom_sql_conditions=custom_sql_conditions, - custom_params=custom_params, - ) - return counter - - def find_all( - self, - event_context: EventContext, - conditions: dict = None, - custom_sql_conditions: List[str] = None, - date_range: dict = None, - **kwargs, - ): - conditions = conditions if conditions else {} - custom_sql_conditions = ( - custom_sql_conditions if custom_sql_conditions else [] - ) - date_range = date_range if date_range else {} - - custom_sql_conditions.append( - self.create_sql_date_range_filter(date_range) - ) - - custom_params = self.generate_params(date_range) - time_entries = CosmosDBRepository.find_all( - self, - event_context=event_context, - conditions=conditions, - custom_sql_conditions=custom_sql_conditions, - custom_params=custom_params, - max_count=kwargs.get("max_count", None), - offset=kwargs.get("offset", 0), - ) - - if time_entries: - custom_conditions = create_in_condition(time_entries, "project_id") - custom_conditions_activity = create_in_condition( - time_entries, "activity_id" - ) - - project_dao = projects_model.create_dao() - projects = project_dao.get_all( - custom_sql_conditions=[custom_conditions], - visible_only=False, - max_count=kwargs.get("max_count", None), - ) - - add_project_info_to_time_entries(time_entries, projects) - - activity_dao = activities_model.create_dao() - activities = activity_dao.get_all( - custom_sql_conditions=[custom_conditions_activity], - visible_only=False, - max_count=kwargs.get("max_count", None), - ) - add_activity_name_to_time_entries(time_entries, activities) - - users = AzureConnection().users() - add_user_email_to_time_entries(time_entries, users) - elif not time_entries and len(conditions) > 1: - abort(HTTPStatus.NOT_FOUND, "Time entry not found") - return time_entries - - def on_create(self, new_item_data: dict, event_context: EventContext): - CosmosDBRepository.on_create(self, new_item_data, event_context) - - if new_item_data.get("start_date") is None: - new_item_data['start_date'] = current_datetime_str() - - self.validate_data(new_item_data, event_context) - - def on_update(self, updated_item_data: dict, event_context: EventContext): - CosmosDBRepository.on_update(self, updated_item_data, event_context) - self.validate_data(updated_item_data, event_context) - self.replace_empty_value_per_none(updated_item_data) - - def find_interception_with_date_range( - self, - start_date, - end_date, - owner_id, - tenant_id, - ignore_id=None, - visible_only=True, - mapper: Callable = None, - ): - conditions = { - "owner_id": owner_id, - "tenant_id": tenant_id, - } - params = [ - {"name": "@start_date", "value": start_date}, - {"name": "@end_date", "value": end_date or current_datetime_str()}, - {"name": "@ignore_id", "value": ignore_id}, - ] - params.extend(self.generate_params(conditions)) - result = self.container.query_items( - query=""" - SELECT * FROM c - WHERE ((c.start_date BETWEEN @start_date AND @end_date) - OR (c.end_date BETWEEN @start_date AND @end_date)) - AND c.start_date!= @end_date - AND c.end_date!= @start_date - {conditions_clause} - {ignore_id_condition} - {visibility_condition} - {order_clause} - """.format( - ignore_id_condition=self.create_sql_ignore_id_condition( - ignore_id - ), - visibility_condition=self.create_sql_condition_for_visibility( - visible_only - ), - conditions_clause=self.create_sql_where_conditions(conditions), - order_clause=self.create_sql_order_clause(), - ), - parameters=params, - partition_key=tenant_id, - ) - - function_mapper = self.get_mapper_or_dict(mapper) - return list(map(function_mapper, result)) - - def find_running( - self, tenant_id: str, owner_id: str, mapper: Callable = None - ): - conditions = { - "owner_id": owner_id, - "tenant_id": tenant_id, - } - result = self.container.query_items( - query=""" - SELECT * from c - WHERE (NOT IS_DEFINED(c.end_date) OR c.end_date = null) - {conditions_clause} - {visibility_condition} - OFFSET 0 LIMIT 1 - """.format( - visibility_condition=self.create_sql_condition_for_visibility( - True - ), - conditions_clause=self.create_sql_where_conditions(conditions), - ), - parameters=self.generate_params(conditions), - partition_key=tenant_id, - max_item_count=1, - ) - - function_mapper = self.get_mapper_or_dict(mapper) - return function_mapper(next(result)) - - def validate_data(self, data, event_context: EventContext): - start_date = data.get('start_date') - - if data.get('end_date') is not None: - if data['end_date'] <= start_date: - raise CustomError( - HTTPStatus.BAD_REQUEST, - description="You must end the time entry after it started", - ) - if data['end_date'] >= current_datetime_str(): - raise CustomError( - HTTPStatus.BAD_REQUEST, - description="You cannot end a time entry in the future", - ) - - collision = self.find_interception_with_date_range( - start_date=start_date, - end_date=data.get('end_date'), - owner_id=event_context.user_id, - tenant_id=event_context.tenant_id, - ignore_id=data.get('id'), - ) - if len(collision) > 0: - raise CustomError( - HTTPStatus.UNPROCESSABLE_ENTITY, - description="There is another time entry in that date range", - ) - - -class TimeEntriesCosmosDBDao(APICosmosDBDao, TimeEntriesDao): - def __init__(self, repository): - CosmosDBDao.__init__(self, repository) - - def check_whether_current_user_owns_item(self, data): - if ( - data.owner_id is not None - and data.owner_id != self.current_user_id() - ): - raise CustomError( - HTTPStatus.FORBIDDEN, - "The current user is not the owner of this time entry", - ) - - def check_time_entry_is_not_stopped(self, data): - if data.end_date is not None: - raise CustomError( - HTTPStatus.UNPROCESSABLE_ENTITY, - "The specified time entry is already stopped", - ) - - def check_time_entry_is_not_started(self, data): - if data.end_date is None: - raise CustomError( - HTTPStatus.UNPROCESSABLE_ENTITY, - "The specified time entry is already running", - ) - - def build_custom_query(self, is_admin: bool, conditions: dict = None): - custom_query = [] - if "user_id" in conditions: - if is_admin: - conditions.pop("owner_id") - custom_query = ( - [] - if conditions.get("user_id") == "*" - else [ - create_custom_query_from_str( - conditions.get("user_id"), "c.owner_id" - ) - ] - ) - conditions.pop("user_id") - else: - abort( - HTTPStatus.FORBIDDEN, "You don't have enough permissions." - ) - return custom_query - - def get_all(self, conditions: dict = None, **kwargs) -> list: - event_ctx = self.create_event_context("read-many") - conditions.update({"owner_id": event_ctx.user_id}) - - custom_query = self.build_custom_query( - is_admin=event_ctx.is_admin, - conditions=conditions, - ) - date_range = self.handle_date_filter_args(args=conditions) - limit = conditions.get("limit", None) - conditions.pop("limit", None) - return self.repository.find_all( - event_ctx, - conditions=conditions, - custom_sql_conditions=custom_query, - date_range=date_range, - max_count=limit, - ) - - def get_last_projects_worked( - self, conditions: dict = None, **kwargs - ) -> list: - event_ctx = self.create_event_context("read-many") - conditions.update({"owner_id": event_ctx.user_id}) - custom_query = self.build_custom_query( - is_admin=event_ctx.is_admin, - conditions=conditions, - ) - date_range = self.handle_date_filter_args(args=conditions) - - project_dao = projects_model.create_dao() - projects = project_dao.get_all() - projects_ids = [project.id for project in projects] - - activity_dao = activities_model.create_dao() - activities = activity_dao.get_all( - visible_only=False, - ) - - result = [] - for id_project in projects_ids: - conditions.update({"project_id": id_project}) - - limit = 2 - latest = self.repository.find_all_entries( - event_ctx, - conditions=conditions, - custom_sql_conditions=custom_query, - date_range=date_range, - max_count=limit, - ) - - if len(latest) >= 1: - result.append(latest[0]) - - add_activity_name_to_time_entries(result, activities) - add_project_info_to_time_entries(result, projects) - - return result - - def get_all_paginated(self, conditions: dict = None, **kwargs) -> list: - get_all_conditions = dict(conditions) - get_all_conditions.pop("length") - get_all_conditions.pop("start") - event_ctx = self.create_event_context("read-many") - get_all_conditions.update({"owner_id": event_ctx.user_id}) - custom_query = self.build_custom_query( - is_admin=event_ctx.is_admin, - conditions=get_all_conditions, - ) - date_range = self.handle_date_filter_args(args=get_all_conditions) - records_total = self.repository.count( - event_ctx, - conditions=get_all_conditions, - custom_sql_conditions=custom_query, - date_range=date_range, - ) - conditions.update({"owner_id": event_ctx.user_id}) - custom_query = self.build_custom_query( - is_admin=event_ctx.is_admin, - conditions=conditions, - ) - date_range = self.handle_date_filter_args(args=conditions) - length = conditions.get("length", None) - conditions.pop("length", None) - start = conditions.get("start", None) - conditions.pop("start", None) - - time_entries = self.repository.find_all( - event_ctx, - conditions=conditions, - custom_sql_conditions=custom_query, - date_range=date_range, - max_count=length, - offset=start, - ) - - return { - 'records_total': records_total, - 'data': time_entries, - } - - def get(self, id): - event_ctx = self.create_event_context("read") - - time_entry = self.repository.find(id, event_ctx) - self.check_whether_current_user_owns_item(time_entry) - - project_dao = projects_model.create_dao() - project = project_dao.get(time_entry.project_id) - setattr(time_entry, 'project_name', project.name) - return time_entry - - def create(self, data: dict): - event_ctx = self.create_event_context("create") - data['owner_id'] = event_ctx.user_id - return self.repository.create(data, event_ctx) - - def update(self, id, data: dict, description=None): - event_ctx = self.create_event_context("update", description) - - time_entry = self.repository.find(id, event_ctx) - self.check_whether_current_user_owns_item(time_entry) - - return self.repository.partial_update( - id, - data, - event_ctx, - ) - - def stop(self, id): - event_ctx = self.create_event_context("update", "Stop time entry") - - time_entry = self.repository.find(id, event_ctx) - self.check_whether_current_user_owns_item(time_entry) - self.check_time_entry_is_not_stopped(time_entry) - - return self.repository.partial_update( - id, - {'end_date': current_datetime_str()}, - event_ctx, - ) - - def restart(self, id): - event_ctx = self.create_event_context("update", "Restart time entry") - - time_entry = self.repository.find(id, event_ctx) - self.check_whether_current_user_owns_item(time_entry) - self.check_time_entry_is_not_started(time_entry) - - return self.repository.partial_update( - id, - {'end_date': None}, - event_ctx, - ) - - def delete(self, id): - event_ctx = self.create_event_context("delete") - time_entry = self.repository.find(id, event_ctx) - self.check_whether_current_user_owns_item(time_entry) - self.repository.delete( - id, - event_ctx, - ) - - def find_running(self): - event_ctx = self.create_event_context("find_running") - time_entry = self.repository.find_running( - event_ctx.tenant_id, event_ctx.user_id - ) - return time_entry - - def get_worked_time(self, args: dict): - event_ctx = self.create_event_context( - "read", "Summary of worked time in the current month" - ) - - conditions = {"owner_id": event_ctx.user_id} - time_entries = self.repository.find_all_entries( - event_ctx, - conditions=conditions, - date_range=worked_time.date_range(), - ) - return worked_time.summary( - time_entries, time_offset=args.get('time_offset') - ) - - @staticmethod - def handle_date_filter_args(args: dict) -> dict: - if "start_date" and "end_date" in args: - start_date = str_to_datetime(args.pop('start_date')) - end_date = str_to_datetime(args.pop('end_date')) - else: - month = int(args.pop("month", get_current_month())) - year = int(args.pop("year", get_current_year())) - start_date, end_date = get_date_range_of_month(year, month) - - offset_in_minutes = int(args.pop('timezone_offset', 300)) - start_date = start_date + timedelta(minutes=offset_in_minutes) - end_date = end_date + timedelta(minutes=offset_in_minutes) - - return { - 'start_date': datetime_str(start_date), - 'end_date': datetime_str(end_date), - } - - -def create_dao() -> TimeEntriesDao: - repository = TimeEntryCosmosDBRepository() - - return TimeEntriesCosmosDBDao(repository) diff --git a/time_tracker_api/time_entries/time_entries_namespace.py b/time_tracker_api/time_entries/time_entries_namespace.py index 7d598183..9984862c 100644 --- a/time_tracker_api/time_entries/time_entries_namespace.py +++ b/time_tracker_api/time_entries/time_entries_namespace.py @@ -16,7 +16,7 @@ NullableString, remove_required_constraint, ) -from time_tracker_api.time_entries.time_entries_model import create_dao +from time_tracker_api.time_entries.time_entries_dao import create_dao faker = Faker() @@ -256,8 +256,13 @@ def post(self): return time_entries_dao.create(ns.payload), HTTPStatus.CREATED +# TODO: Once this endpoint is working as expected in prod, review and +# remove unnecessary filter args. As we are using same attributes_filter +# as the get_all endpoint and some of the args are unnecessary for this endpoint. + + @ns.route('/latest') -class TimeEntries(Resource): +class LatestTimeEntries(Resource): @ns.doc('list_latest_time_entries') @ns.expect(attributes_filter) @ns.marshal_list_with(time_entry) @@ -265,12 +270,14 @@ class TimeEntries(Resource): def get(self): """List the latest time entries""" conditions = attributes_filter.parse_args() - return time_entries_dao.get_last_projects_worked(conditions=conditions) + + return time_entries_dao.get_lastest_entries_by_project( + conditions=conditions + ) @ns.route('/') @ns.response(HTTPStatus.NOT_FOUND, 'This time entry does not exist') -@ns.response(HTTPStatus.UNPROCESSABLE_ENTITY, 'The id has an invalid format') @ns.param('id', 'The unique identifier of the time entry') class TimeEntry(Resource): @ns.doc('get_time_entry') diff --git a/time_tracker_api/time_entries/time_entries_repository.py b/time_tracker_api/time_entries/time_entries_repository.py new file mode 100644 index 00000000..0540cad7 --- /dev/null +++ b/time_tracker_api/time_entries/time_entries_repository.py @@ -0,0 +1,297 @@ +from commons.data_access_layer.cosmos_db import ( + # CosmosDBDao, + CosmosDBRepository, + CustomError, + # CosmosDBModel, +) + +from time_tracker_api.time_entries.time_entries_model import ( + container_definition, + TimeEntryCosmosDBModel, +) + +from utils.time import ( + # datetime_str, + # str_to_datetime, + # get_current_year, + # get_current_month, + # get_date_range_of_month, + current_datetime_str, +) + +from utils.extend_model import ( + add_project_info_to_time_entries, + add_activity_name_to_time_entries, + create_in_condition, + # create_custom_query_from_str, + add_user_email_to_time_entries, +) + +from flask_restplus import abort +from flask_restplus._http import HTTPStatus +from utils.azure_users import AzureConnection +from time_tracker_api.activities import activities_model +from commons.data_access_layer.database import EventContext +from typing import List, Callable +from time_tracker_api.projects import projects_model + + +class TimeEntryCosmosDBRepository(CosmosDBRepository): + def __init__(self): + CosmosDBRepository.__init__( + self, + container_id=container_definition['id'], + partition_key_attribute='tenant_id', + order_fields=['start_date DESC'], + mapper=TimeEntryCosmosDBModel, + ) + + @staticmethod + def create_sql_ignore_id_condition(id: str): + if id is None: + return '' + else: + return "AND c.id!=@ignore_id" + + @staticmethod + def create_sql_date_range_filter(date_range: dict) -> str: + if 'start_date' and 'end_date' in date_range: + return """ + ((c.start_date BETWEEN @start_date AND @end_date) OR + (c.end_date BETWEEN @start_date AND @end_date)) + """ + else: + return '' + + def find_all_entries( + self, + event_context: EventContext, + conditions: dict = None, + custom_sql_conditions: List[str] = None, + date_range: dict = None, + **kwargs, + ): + conditions = conditions if conditions else {} + custom_sql_conditions = ( + custom_sql_conditions if custom_sql_conditions else [] + ) + date_range = date_range if date_range else {} + + custom_sql_conditions.append( + self.create_sql_date_range_filter(date_range) + ) + + custom_params = self.generate_params(date_range) + time_entries = CosmosDBRepository.find_all( + self, + event_context=event_context, + conditions=conditions, + custom_sql_conditions=custom_sql_conditions, + custom_params=custom_params, + max_count=kwargs.get("max_count", None), + offset=kwargs.get("offset", 0), + ) + return time_entries + + def count( + self, + event_context: EventContext, + conditions: dict = None, + custom_sql_conditions: List[str] = None, + date_range: dict = None, + **kwargs, + ): + conditions = conditions if conditions else {} + custom_sql_conditions = ( + custom_sql_conditions if custom_sql_conditions else [] + ) + date_range = date_range if date_range else {} + + custom_sql_conditions.append( + self.create_sql_date_range_filter(date_range) + ) + + custom_params = self.generate_params(date_range) + counter = CosmosDBRepository.count( + self, + event_context=event_context, + conditions=conditions, + custom_sql_conditions=custom_sql_conditions, + custom_params=custom_params, + ) + return counter + + def find_all( + self, + event_context: EventContext, + conditions: dict = None, + custom_sql_conditions: List[str] = None, + date_range: dict = None, + **kwargs, + ): + conditions = conditions if conditions else {} + custom_sql_conditions = ( + custom_sql_conditions if custom_sql_conditions else [] + ) + date_range = date_range if date_range else {} + + custom_sql_conditions.append( + self.create_sql_date_range_filter(date_range) + ) + + custom_params = self.generate_params(date_range) + time_entries = CosmosDBRepository.find_all( + self, + event_context=event_context, + conditions=conditions, + custom_sql_conditions=custom_sql_conditions, + custom_params=custom_params, + max_count=kwargs.get("max_count", None), + offset=kwargs.get("offset", 0), + ) + + if time_entries: + custom_conditions = create_in_condition(time_entries, "project_id") + custom_conditions_activity = create_in_condition( + time_entries, "activity_id" + ) + + project_dao = projects_model.create_dao() + projects = project_dao.get_all( + custom_sql_conditions=[custom_conditions], + visible_only=False, + max_count=kwargs.get("max_count", None), + ) + + add_project_info_to_time_entries(time_entries, projects) + + activity_dao = activities_model.create_dao() + activities = activity_dao.get_all( + custom_sql_conditions=[custom_conditions_activity], + visible_only=False, + max_count=kwargs.get("max_count", None), + ) + add_activity_name_to_time_entries(time_entries, activities) + + users = AzureConnection().users() + add_user_email_to_time_entries(time_entries, users) + elif not time_entries and len(conditions) > 1: + abort(HTTPStatus.NOT_FOUND, "Time entry not found") + return time_entries + + def on_create(self, new_item_data: dict, event_context: EventContext): + CosmosDBRepository.on_create(self, new_item_data, event_context) + + if new_item_data.get("start_date") is None: + new_item_data['start_date'] = current_datetime_str() + + self.validate_data(new_item_data, event_context) + + def on_update(self, updated_item_data: dict, event_context: EventContext): + CosmosDBRepository.on_update(self, updated_item_data, event_context) + self.validate_data(updated_item_data, event_context) + self.replace_empty_value_per_none(updated_item_data) + + def find_interception_with_date_range( + self, + start_date, + end_date, + owner_id, + tenant_id, + ignore_id=None, + visible_only=True, + mapper: Callable = None, + ): + conditions = { + "owner_id": owner_id, + "tenant_id": tenant_id, + } + params = [ + {"name": "@start_date", "value": start_date}, + {"name": "@end_date", "value": end_date or current_datetime_str()}, + {"name": "@ignore_id", "value": ignore_id}, + ] + params.extend(self.generate_params(conditions)) + result = self.container.query_items( + query=""" + SELECT * FROM c + WHERE ((c.start_date BETWEEN @start_date AND @end_date) + OR (c.end_date BETWEEN @start_date AND @end_date)) + AND c.start_date!= @end_date + AND c.end_date!= @start_date + {conditions_clause} + {ignore_id_condition} + {visibility_condition} + {order_clause} + """.format( + ignore_id_condition=self.create_sql_ignore_id_condition( + ignore_id + ), + visibility_condition=self.create_sql_condition_for_visibility( + visible_only + ), + conditions_clause=self.create_sql_where_conditions(conditions), + order_clause=self.create_sql_order_clause(), + ), + parameters=params, + partition_key=tenant_id, + ) + + function_mapper = self.get_mapper_or_dict(mapper) + return list(map(function_mapper, result)) + + def find_running( + self, tenant_id: str, owner_id: str, mapper: Callable = None + ): + conditions = { + "owner_id": owner_id, + "tenant_id": tenant_id, + } + result = self.container.query_items( + query=""" + SELECT * from c + WHERE (NOT IS_DEFINED(c.end_date) OR c.end_date = null) + {conditions_clause} + {visibility_condition} + OFFSET 0 LIMIT 1 + """.format( + visibility_condition=self.create_sql_condition_for_visibility( + True + ), + conditions_clause=self.create_sql_where_conditions(conditions), + ), + parameters=self.generate_params(conditions), + partition_key=tenant_id, + max_item_count=1, + ) + + function_mapper = self.get_mapper_or_dict(mapper) + return function_mapper(next(result)) + + def validate_data(self, data, event_context: EventContext): + start_date = data.get('start_date') + + if data.get('end_date') is not None: + if data['end_date'] <= start_date: + raise CustomError( + HTTPStatus.BAD_REQUEST, + description="You must end the time entry after it started", + ) + if data['end_date'] >= current_datetime_str(): + raise CustomError( + HTTPStatus.BAD_REQUEST, + description="You cannot end a time entry in the future", + ) + + collision = self.find_interception_with_date_range( + start_date=start_date, + end_date=data.get('end_date'), + owner_id=event_context.user_id, + tenant_id=event_context.tenant_id, + ignore_id=data.get('id'), + ) + if len(collision) > 0: + raise CustomError( + HTTPStatus.UNPROCESSABLE_ENTITY, + description="There is another time entry in that date range", + ) From fec9f23ee90fd4878a7178dc5ba4117627bf658d Mon Sep 17 00:00:00 2001 From: PaulRC-ioet Date: Fri, 13 Nov 2020 10:43:38 -0500 Subject: [PATCH 4/6] fix: #215 Remove unnecessary comments --- time_tracker_api/time_entries/time_entries_dao.py | 3 --- time_tracker_api/time_entries/time_entries_repository.py | 8 -------- 2 files changed, 11 deletions(-) diff --git a/time_tracker_api/time_entries/time_entries_dao.py b/time_tracker_api/time_entries/time_entries_dao.py index e9a418a3..a8705752 100644 --- a/time_tracker_api/time_entries/time_entries_dao.py +++ b/time_tracker_api/time_entries/time_entries_dao.py @@ -1,14 +1,11 @@ import abc from commons.data_access_layer.cosmos_db import ( CosmosDBDao, - # CosmosDBRepository, CustomError, - # CosmosDBModel, ) from utils.extend_model import ( add_project_info_to_time_entries, add_activity_name_to_time_entries, - # create_in_condition, create_custom_query_from_str, add_user_email_to_time_entries, ) diff --git a/time_tracker_api/time_entries/time_entries_repository.py b/time_tracker_api/time_entries/time_entries_repository.py index 0540cad7..b909b80d 100644 --- a/time_tracker_api/time_entries/time_entries_repository.py +++ b/time_tracker_api/time_entries/time_entries_repository.py @@ -1,8 +1,6 @@ from commons.data_access_layer.cosmos_db import ( - # CosmosDBDao, CosmosDBRepository, CustomError, - # CosmosDBModel, ) from time_tracker_api.time_entries.time_entries_model import ( @@ -11,11 +9,6 @@ ) from utils.time import ( - # datetime_str, - # str_to_datetime, - # get_current_year, - # get_current_month, - # get_date_range_of_month, current_datetime_str, ) @@ -23,7 +16,6 @@ add_project_info_to_time_entries, add_activity_name_to_time_entries, create_in_condition, - # create_custom_query_from_str, add_user_email_to_time_entries, ) From 32b5111c3bbd0c2b38019803995b5c1ec7e61ae4 Mon Sep 17 00:00:00 2001 From: PaulRC-ioet Date: Fri, 13 Nov 2020 15:51:01 -0500 Subject: [PATCH 5/6] fix: #215 remove atribute filters in last time entry --- time_tracker_api/time_entries/time_entries_dao.py | 1 + .../time_entries/time_entries_namespace.py | 12 ++---------- 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/time_tracker_api/time_entries/time_entries_dao.py b/time_tracker_api/time_entries/time_entries_dao.py index a8705752..6ce3e415 100644 --- a/time_tracker_api/time_entries/time_entries_dao.py +++ b/time_tracker_api/time_entries/time_entries_dao.py @@ -28,6 +28,7 @@ TimeEntryCosmosDBRepository, ) from time_tracker_api.database import CRUDDao, APICosmosDBDao +from time_tracker_api.security import current_user_id class TimeEntriesDao(CRUDDao): diff --git a/time_tracker_api/time_entries/time_entries_namespace.py b/time_tracker_api/time_entries/time_entries_namespace.py index 9984862c..fa969a46 100644 --- a/time_tracker_api/time_entries/time_entries_namespace.py +++ b/time_tracker_api/time_entries/time_entries_namespace.py @@ -256,28 +256,20 @@ def post(self): return time_entries_dao.create(ns.payload), HTTPStatus.CREATED -# TODO: Once this endpoint is working as expected in prod, review and -# remove unnecessary filter args. As we are using same attributes_filter -# as the get_all endpoint and some of the args are unnecessary for this endpoint. - - @ns.route('/latest') class LatestTimeEntries(Resource): @ns.doc('list_latest_time_entries') - @ns.expect(attributes_filter) @ns.marshal_list_with(time_entry) @ns.response(HTTPStatus.NOT_FOUND, 'Time entry not found') def get(self): """List the latest time entries""" - conditions = attributes_filter.parse_args() - return time_entries_dao.get_lastest_entries_by_project( - conditions=conditions - ) + return time_entries_dao.get_lastest_entries_by_project(conditions={}) @ns.route('/') @ns.response(HTTPStatus.NOT_FOUND, 'This time entry does not exist') +@ns.response(HTTPStatus.UNPROCESSABLE_ENTITY, 'The id has an invalid format') @ns.param('id', 'The unique identifier of the time entry') class TimeEntry(Resource): @ns.doc('get_time_entry') From c31cd1b453929c55bd48c6e425f96dbea653a97f Mon Sep 17 00:00:00 2001 From: PaulRC-ioet Date: Mon, 16 Nov 2020 10:33:05 -0500 Subject: [PATCH 6/6] fix: #215 change message and remove unnecessary import --- time_tracker_api/time_entries/time_entries_dao.py | 1 - time_tracker_api/time_entries/time_entries_namespace.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/time_tracker_api/time_entries/time_entries_dao.py b/time_tracker_api/time_entries/time_entries_dao.py index 6ce3e415..07b01e4b 100644 --- a/time_tracker_api/time_entries/time_entries_dao.py +++ b/time_tracker_api/time_entries/time_entries_dao.py @@ -7,7 +7,6 @@ add_project_info_to_time_entries, add_activity_name_to_time_entries, create_custom_query_from_str, - add_user_email_to_time_entries, ) from utils.time import ( datetime_str, diff --git a/time_tracker_api/time_entries/time_entries_namespace.py b/time_tracker_api/time_entries/time_entries_namespace.py index fa969a46..f2c115fc 100644 --- a/time_tracker_api/time_entries/time_entries_namespace.py +++ b/time_tracker_api/time_entries/time_entries_namespace.py @@ -260,7 +260,7 @@ def post(self): class LatestTimeEntries(Resource): @ns.doc('list_latest_time_entries') @ns.marshal_list_with(time_entry) - @ns.response(HTTPStatus.NOT_FOUND, 'Time entry not found') + @ns.response(HTTPStatus.NOT_FOUND, 'No time entries found') def get(self): """List the latest time entries"""