diff --git a/.github/ISSUE_TEMPLATE/back-end.md b/.github/ISSUE_TEMPLATE/back-end.md new file mode 100644 index 00000000..e4aaa071 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/back-end.md @@ -0,0 +1,29 @@ +--- +name: Back end +about: new tasks of Back-end +title: BACK-END +labels: mvp, nice-to-have, tech-debt, enhancement +assignees: '' + +--- + +**Purpose** +A clear and concise description of this new task or feature + +** Purpose** +A clear and concise description of what you want to happen. + +**Optional diagrams** +Optional designs of diagrams. + +**Criteria of acceptance** +List the parameters that the task must meet to be approved. +- parameter one +- parameter two +- ... + +**Scope** +Defined scope of this task, for example business logic, the recommendation is to create a meeting with more than one person to define this scope. + +**Note** +Information adicional, for example links of documentation to work in xz task. diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 00000000..ea197258 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,38 @@ +--- +name: Bug report +about: Create a report to help us improve +title: BUG +labels: low, medium, high +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**Desktop (please complete the following information):** + - OS: [e.g. iOS] + - Browser [e.g. chrome, safari] + - Version [e.g. 22] + +**Smartphone (please complete the following information):** + - Device: [e.g. iPhone6] + - OS: [e.g. iOS8.1] + - Browser [e.g. stock browser, safari] + - Version [e.g. 22] + +**Additional context** +Add any other context about the problem here. diff --git a/CHANGELOG.md b/CHANGELOG.md index 956d2902..d408f7e9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.24.4 (2020-11-14) +### Fix +* Delete ui-task template ([`0be1370`](https://github.com/ioet/time-tracker-backend/commit/0be1370d81f4dcec739ed2bf8f3eff25640af36b)) +* Issue templates ([`55b73bb`](https://github.com/ioet/time-tracker-backend/commit/55b73bb9cc546306f2c81785581fd593d71172fe)) +* Issue templates ([`76728f4`](https://github.com/ioet/time-tracker-backend/commit/76728f491820aa95d1f45ddb2565bd05ed34648f)) + ## v0.24.3 (2020-11-10) ### Fix * Allow limits overlap in time entries #217 ([`ce99603`](https://github.com/ioet/time-tracker-backend/commit/ce996032bf4cf20c129b84ad293db4c031571c95)) diff --git a/tests/conftest.py b/tests/conftest.py index 5cb5c18d..3c55d3e1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -11,7 +11,7 @@ from time_tracker_api import create_app from time_tracker_api.database import init_sql from time_tracker_api.security import get_or_generate_dev_secret_key -from time_tracker_api.time_entries.time_entries_model import ( +from time_tracker_api.time_entries.time_entries_repository import ( TimeEntryCosmosDBRepository, ) @@ -84,7 +84,11 @@ def cosmos_db_model(): return { 'id': 'test', 'partition_key': PartitionKey(path='/tenant_id'), - 'unique_key_policy': {'uniqueKeys': [{'paths': ['/email']},]}, + 'unique_key_policy': { + 'uniqueKeys': [ + {'paths': ['/email']}, + ] + }, } diff --git a/tests/time_tracker_api/time_entries/time_entries_model_test.py b/tests/time_tracker_api/time_entries/time_entries_model_test.py index 002e8ccf..3883113c 100644 --- a/tests/time_tracker_api/time_entries/time_entries_model_test.py +++ b/tests/time_tracker_api/time_entries/time_entries_model_test.py @@ -3,9 +3,11 @@ from commons.data_access_layer.database import EventContext from time_tracker_api.time_entries.time_entries_model import ( - TimeEntryCosmosDBRepository, TimeEntryCosmosDBModel, ) +from time_tracker_api.time_entries.time_entries_repository import ( + TimeEntryCosmosDBRepository, +) def create_time_entry( @@ -174,16 +176,20 @@ def test_find_interception_should_ignore_id_of_existing_item( ) try: - colliding_result = time_entry_repository.find_interception_with_date_range( - start_date, end_date, owner_id, tenant_id + colliding_result = ( + time_entry_repository.find_interception_with_date_range( + start_date, end_date, owner_id, tenant_id + ) ) - non_colliding_result = time_entry_repository.find_interception_with_date_range( - start_date, - end_date, - owner_id, - tenant_id, - ignore_id=existing_item.id, + non_colliding_result = ( + time_entry_repository.find_interception_with_date_range( + start_date, + end_date, + owner_id, + tenant_id, + ignore_id=existing_item.id, + ) ) assert colliding_result is not None diff --git a/tests/time_tracker_api/time_entries/time_entries_namespace_test.py b/tests/time_tracker_api/time_entries/time_entries_namespace_test.py index 0d5c19ef..429f594a 100644 --- a/tests/time_tracker_api/time_entries/time_entries_namespace_test.py +++ b/tests/time_tracker_api/time_entries/time_entries_namespace_test.py @@ -39,12 +39,11 @@ def test_create_time_entry_with_invalid_date_range_should_raise_bad_request( - client: FlaskClient, mocker: MockFixture, valid_header: dict + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - repository_container_create_item_mock = mocker.patch.object( time_entries_dao.repository.container, 'create_item', @@ -65,12 +64,11 @@ def test_create_time_entry_with_invalid_date_range_should_raise_bad_request( def test_create_time_entry_with_end_date_in_future_should_raise_bad_request( - client: FlaskClient, mocker: MockFixture, valid_header: dict + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - repository_container_create_item_mock = mocker.patch.object( time_entries_dao.repository.container, 'create_item', @@ -92,12 +90,11 @@ def test_create_time_entry_with_end_date_in_future_should_raise_bad_request( def test_create_time_entry_should_succeed_with_valid_request( - client: FlaskClient, mocker: MockFixture, valid_header: dict + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - repository_create_mock = mocker.patch.object( time_entries_dao.repository, 'create', return_value=fake_time_entry ) @@ -114,12 +111,11 @@ def test_create_time_entry_should_succeed_with_valid_request( def test_create_time_entry_with_missing_req_field_should_return_bad_request( - client: FlaskClient, mocker: MockFixture, valid_header: dict + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - repository_create_mock = mocker.patch.object( time_entries_dao.repository, 'create', return_value=fake_time_entry ) @@ -139,6 +135,25 @@ def test_create_time_entry_with_missing_req_field_should_return_bad_request( def test_list_all_time_entries( + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + time_entries_dao, +): + dao_get_all_mock = mocker.patch.object( + time_entries_dao, 'get_all', return_value=[] + ) + + response = client.get( + "/time-entries", headers=valid_header, follow_redirects=True + ) + + assert HTTPStatus.OK == response.status_code + assert [] == json.loads(response.data) + dao_get_all_mock.assert_called_once() + + +def test_list_last_time_entries( client: FlaskClient, mocker: MockFixture, valid_header: dict ): from time_tracker_api.time_entries.time_entries_namespace import ( @@ -146,11 +161,11 @@ def test_list_all_time_entries( ) dao_get_all_mock = mocker.patch.object( - time_entries_dao, 'get_all', return_value=[] + time_entries_dao, 'get_lastest_entries_by_project', return_value=[] ) response = client.get( - "/time-entries", headers=valid_header, follow_redirects=True + "/time-entries/latest", headers=valid_header, follow_redirects=True ) assert HTTPStatus.OK == response.status_code @@ -161,10 +176,25 @@ def test_list_all_time_entries( def test_get_time_entry_should_succeed_with_valid_id( client: FlaskClient, mocker: MockFixture, valid_header: dict ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, + dao_get_all_mock = mocker.patch.object( + time_entries_dao, 'get_lastest_entries_by_project', return_value=[] ) + response = client.get( + "/time-entries/latest", headers=valid_header, follow_redirects=True + ) + + assert HTTPStatus.OK == response.status_code + assert [] == json.loads(response.data) + dao_get_all_mock.assert_called_once() + + +def test_get_time_entry_should_succeed_with_valid_id( + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + time_entries_dao, +): dao_get_mock = mocker.patch.object( time_entries_dao, 'get', return_value={} ) @@ -195,11 +225,8 @@ def test_get_time_entry_raise_http_exception( valid_id: str, http_exception: HTTPException, http_status: tuple, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - time_entries_dao.repository.find = Mock(side_effect=http_exception) response = client.get( @@ -213,12 +240,12 @@ def test_get_time_entry_raise_http_exception( def test_update_time_entry_calls_partial_update_with_incoming_payload( - client: FlaskClient, mocker: MockFixture, valid_header: dict, valid_id: str + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + valid_id: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - time_entries_dao.repository.partial_update = Mock(return_value={}) time_entries_dao.repository.find = Mock(return_value={}) @@ -241,12 +268,11 @@ def test_update_time_entry_calls_partial_update_with_incoming_payload( def test_update_time_entry_should_reject_bad_request( - client: FlaskClient, mocker: MockFixture, valid_header: dict + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - invalid_time_entry_data = valid_time_entry_input.copy() invalid_time_entry_data.update( {"project_id": fake.pyint(min_value=1, max_value=100)} @@ -268,11 +294,12 @@ def test_update_time_entry_should_reject_bad_request( def test_update_time_entry_raise_not_found( - client: FlaskClient, mocker: MockFixture, valid_header: dict, valid_id: str + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + valid_id: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) from werkzeug.exceptions import NotFound time_entries_dao.repository.partial_update = Mock(side_effect=NotFound) @@ -297,12 +324,12 @@ def test_update_time_entry_raise_not_found( def test_delete_time_entry_calls_delete( - client: FlaskClient, mocker: MockFixture, valid_header: dict, valid_id: str + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + valid_id: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - time_entries_dao.repository.delete = Mock(return_value=None) time_entries_dao.repository.find = Mock() time_entries_dao.check_whether_current_user_owns_item = Mock() @@ -333,11 +360,8 @@ def test_delete_time_entry_raise_http_exception( valid_id: str, http_exception: HTTPException, http_status: tuple, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - time_entries_dao.repository.delete = Mock(side_effect=http_exception) time_entries_dao.repository.find = Mock() time_entries_dao.check_whether_current_user_owns_item = Mock() @@ -355,12 +379,12 @@ def test_delete_time_entry_raise_http_exception( def test_stop_time_entry_calls_partial_update( - client: FlaskClient, mocker: MockFixture, valid_header: dict, valid_id: str + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + valid_id: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - time_entries_dao.repository.partial_update = Mock(return_value={}) time_entries_dao.repository.find = Mock(return_value={}) @@ -382,11 +406,12 @@ def test_stop_time_entry_calls_partial_update( def test_stop_time_entry_raise_unprocessable_entity( - client: FlaskClient, mocker: MockFixture, valid_header: dict, valid_id: str + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + valid_id: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) from werkzeug.exceptions import UnprocessableEntity time_entries_dao.repository.partial_update = Mock( @@ -411,12 +436,12 @@ def test_stop_time_entry_raise_unprocessable_entity( def test_restart_time_entry_calls_partial_update( - client: FlaskClient, mocker: MockFixture, valid_header: dict, valid_id: str + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + valid_id: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - time_entries_dao.repository.partial_update = Mock(return_value={}) time_entries_dao.repository.find = Mock(return_value={}) @@ -438,11 +463,12 @@ def test_restart_time_entry_calls_partial_update( def test_restart_time_entry_raise_unprocessable_entity( - client: FlaskClient, mocker: MockFixture, valid_header: dict, valid_id: str + client: FlaskClient, + mocker: MockFixture, + valid_header: dict, + valid_id: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) from werkzeug.exceptions import UnprocessableEntity time_entries_dao.repository.partial_update = Mock( @@ -473,11 +499,8 @@ def test_get_running_should_call_find_running( valid_header: dict, tenant_id: str, owner_id: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - repository_update_mock = mocker.patch.object( time_entries_dao.repository, 'find_running', @@ -499,11 +522,8 @@ def test_get_running_should_return_not_found_if_StopIteration( valid_header: dict, tenant_id: str, owner_id: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - repository_update_mock = mocker.patch.object( time_entries_dao.repository, 'find_running', side_effect=StopIteration ) @@ -525,11 +545,8 @@ def test_create_with_invalid_uuid_format_should_return_bad_request( mocker: MockFixture, valid_header: dict, invalid_uuid: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - repository_container_create_item_mock = mocker.patch.object( time_entries_dao.repository.container, 'create_item', @@ -556,11 +573,8 @@ def test_create_with_valid_uuid_format_should_return_created( mocker: MockFixture, valid_header: dict, valid_uuid: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - repository_container_create_item_mock = mocker.patch.object( time_entries_dao.repository.container, 'create_item', @@ -594,7 +608,10 @@ def test_create_with_valid_uuid_format_should_return_created( ], ) def test_get_all_passes_date_range_built_from_params_to_find_all( - client: FlaskClient, valid_header: dict, url: str, time_entries_dao + client: FlaskClient, + valid_header: dict, + url: str, + time_entries_dao, ): time_entries_dao.repository.find_all = Mock(return_value=[]) @@ -704,11 +721,8 @@ def test_summary_is_called_with_date_range_from_worked_time_module( mocker: MockFixture, valid_header: dict, owner_id: str, + time_entries_dao, ): - from time_tracker_api.time_entries.time_entries_namespace import ( - time_entries_dao, - ) - worked_time.date_range = Mock(return_value=worked_time.date_range()) repository_find_all_mock = mocker.patch.object( time_entries_dao.repository, 'find_all_entries', return_value=[] @@ -729,14 +743,16 @@ def test_summary_is_called_with_date_range_from_worked_time_module( def test_paginated_fails_with_no_params( - client: FlaskClient, valid_header: dict, + client: FlaskClient, + valid_header: dict, ): response = client.get('/time-entries/paginated', headers=valid_header) assert HTTPStatus.BAD_REQUEST == response.status_code def test_paginated_succeeds_with_valid_params( - client: FlaskClient, valid_header: dict, + client: FlaskClient, + valid_header: dict, ): response = client.get( '/time-entries/paginated?start_date=2020-09-10T00:00:00-05:00&end_date=2020-09-10T23:59:59-05:00&timezone_offset=300&start=0&length=5', @@ -746,7 +762,8 @@ def test_paginated_succeeds_with_valid_params( def test_paginated_response_contains_expected_props( - client: FlaskClient, valid_header: dict, + client: FlaskClient, + valid_header: dict, ): response = client.get( '/time-entries/paginated?start_date=2020-09-10T00:00:00-05:00&end_date=2020-09-10T23:59:59-05:00&timezone_offset=300&start=0&length=5', diff --git a/time_tracker_api/time_entries/time_entries_dao.py b/time_tracker_api/time_entries/time_entries_dao.py new file mode 100644 index 00000000..07b01e4b --- /dev/null +++ b/time_tracker_api/time_entries/time_entries_dao.py @@ -0,0 +1,310 @@ +import abc +from commons.data_access_layer.cosmos_db import ( + CosmosDBDao, + CustomError, +) +from utils.extend_model import ( + add_project_info_to_time_entries, + add_activity_name_to_time_entries, + create_custom_query_from_str, +) +from utils.time import ( + datetime_str, + str_to_datetime, + get_current_year, + get_current_month, + get_date_range_of_month, + current_datetime_str, +) +from flask_restplus import abort +from flask_restplus._http import HTTPStatus +from time_tracker_api.activities import activities_model + +from time_tracker_api.projects import projects_model +from utils import worked_time +from datetime import timedelta +from time_tracker_api.time_entries.time_entries_repository import ( + TimeEntryCosmosDBRepository, +) +from time_tracker_api.database import CRUDDao, APICosmosDBDao +from time_tracker_api.security import current_user_id + + +class TimeEntriesDao(CRUDDao): + @staticmethod + def current_user_id(): + return current_user_id() + + @abc.abstractmethod + def find_running(self): + pass + + @abc.abstractmethod + def stop(self, id: str): + pass + + @abc.abstractmethod + def restart(self, id: str): + pass + + +class TimeEntriesCosmosDBDao(APICosmosDBDao, TimeEntriesDao): + def __init__(self, repository): + CosmosDBDao.__init__(self, repository) + + def check_whether_current_user_owns_item(self, data): + if ( + data.owner_id is not None + and data.owner_id != self.current_user_id() + ): + raise CustomError( + HTTPStatus.FORBIDDEN, + "The current user is not the owner of this time entry", + ) + + def check_time_entry_is_not_stopped(self, data): + if data.end_date is not None: + raise CustomError( + HTTPStatus.UNPROCESSABLE_ENTITY, + "The specified time entry is already stopped", + ) + + def check_time_entry_is_not_started(self, data): + if data.end_date is None: + raise CustomError( + HTTPStatus.UNPROCESSABLE_ENTITY, + "The specified time entry is already running", + ) + + def build_custom_query(self, is_admin: bool, conditions: dict = None): + custom_query = [] + if "user_id" in conditions: + if is_admin: + conditions.pop("owner_id") + custom_query = ( + [] + if conditions.get("user_id") == "*" + else [ + create_custom_query_from_str( + conditions.get("user_id"), "c.owner_id" + ) + ] + ) + conditions.pop("user_id") + else: + abort( + HTTPStatus.FORBIDDEN, "You don't have enough permissions." + ) + return custom_query + + def get_all(self, conditions: dict = None, **kwargs) -> list: + event_ctx = self.create_event_context("read-many") + conditions.update({"owner_id": event_ctx.user_id}) + + custom_query = self.build_custom_query( + is_admin=event_ctx.is_admin, + conditions=conditions, + ) + date_range = self.handle_date_filter_args(args=conditions) + limit = conditions.get("limit", None) + conditions.pop("limit", None) + return self.repository.find_all( + event_ctx, + conditions=conditions, + custom_sql_conditions=custom_query, + date_range=date_range, + max_count=limit, + ) + + def get_lastest_entries_by_project( + self, conditions: dict = None, **kwargs + ) -> list: + event_ctx = self.create_event_context("read-many") + conditions.update({"owner_id": event_ctx.user_id}) + custom_query = self.build_custom_query( + is_admin=event_ctx.is_admin, + conditions=conditions, + ) + date_range = self.handle_date_filter_args(args=conditions) + + project_dao = projects_model.create_dao() + projects = project_dao.get_all() + projects_ids = [project.id for project in projects] + + activity_dao = activities_model.create_dao() + activities = activity_dao.get_all( + visible_only=False, + ) + + result = [] + for id_project in projects_ids: + conditions.update({"project_id": id_project}) + + limit = 1 + latest = self.repository.find_all_entries( + event_ctx, + conditions=conditions, + custom_sql_conditions=custom_query, + date_range=date_range, + max_count=limit, + ) + + if len(latest) > 0: + result.append(latest[0]) + + add_activity_name_to_time_entries(result, activities) + add_project_info_to_time_entries(result, projects) + + return result + + def get_all_paginated(self, conditions: dict = None, **kwargs) -> list: + get_all_conditions = dict(conditions) + get_all_conditions.pop("length") + get_all_conditions.pop("start") + event_ctx = self.create_event_context("read-many") + get_all_conditions.update({"owner_id": event_ctx.user_id}) + custom_query = self.build_custom_query( + is_admin=event_ctx.is_admin, + conditions=get_all_conditions, + ) + date_range = self.handle_date_filter_args(args=get_all_conditions) + records_total = self.repository.count( + event_ctx, + conditions=get_all_conditions, + custom_sql_conditions=custom_query, + date_range=date_range, + ) + conditions.update({"owner_id": event_ctx.user_id}) + custom_query = self.build_custom_query( + is_admin=event_ctx.is_admin, + conditions=conditions, + ) + date_range = self.handle_date_filter_args(args=conditions) + length = conditions.get("length", None) + conditions.pop("length", None) + start = conditions.get("start", None) + conditions.pop("start", None) + + time_entries = self.repository.find_all( + event_ctx, + conditions=conditions, + custom_sql_conditions=custom_query, + date_range=date_range, + max_count=length, + offset=start, + ) + + return { + 'records_total': records_total, + 'data': time_entries, + } + + def get(self, id): + event_ctx = self.create_event_context("read") + + time_entry = self.repository.find(id, event_ctx) + self.check_whether_current_user_owns_item(time_entry) + + project_dao = projects_model.create_dao() + project = project_dao.get(time_entry.project_id) + setattr(time_entry, 'project_name', project.name) + return time_entry + + def create(self, data: dict): + event_ctx = self.create_event_context("create") + data['owner_id'] = event_ctx.user_id + return self.repository.create(data, event_ctx) + + def update(self, id, data: dict, description=None): + event_ctx = self.create_event_context("update", description) + + time_entry = self.repository.find(id, event_ctx) + self.check_whether_current_user_owns_item(time_entry) + + return self.repository.partial_update( + id, + data, + event_ctx, + ) + + def stop(self, id): + event_ctx = self.create_event_context("update", "Stop time entry") + + time_entry = self.repository.find(id, event_ctx) + self.check_whether_current_user_owns_item(time_entry) + self.check_time_entry_is_not_stopped(time_entry) + + return self.repository.partial_update( + id, + {'end_date': current_datetime_str()}, + event_ctx, + ) + + def restart(self, id): + event_ctx = self.create_event_context("update", "Restart time entry") + + time_entry = self.repository.find(id, event_ctx) + self.check_whether_current_user_owns_item(time_entry) + self.check_time_entry_is_not_started(time_entry) + + return self.repository.partial_update( + id, + {'end_date': None}, + event_ctx, + ) + + def delete(self, id): + event_ctx = self.create_event_context("delete") + time_entry = self.repository.find(id, event_ctx) + self.check_whether_current_user_owns_item(time_entry) + self.repository.delete( + id, + event_ctx, + ) + + def find_running(self): + event_ctx = self.create_event_context("find_running") + time_entry = self.repository.find_running( + event_ctx.tenant_id, event_ctx.user_id + ) + return time_entry + + def get_worked_time(self, args: dict): + event_ctx = self.create_event_context( + "read", "Summary of worked time in the current month" + ) + + conditions = {"owner_id": event_ctx.user_id} + time_entries = self.repository.find_all_entries( + event_ctx, + conditions=conditions, + date_range=worked_time.date_range(), + ) + return worked_time.summary( + time_entries, time_offset=args.get('time_offset') + ) + + @staticmethod + def handle_date_filter_args(args: dict) -> dict: + if "start_date" and "end_date" in args: + start_date = str_to_datetime(args.pop('start_date')) + end_date = str_to_datetime(args.pop('end_date')) + else: + month = int(args.pop("month", get_current_month())) + year = int(args.pop("year", get_current_year())) + start_date, end_date = get_date_range_of_month(year, month) + + offset_in_minutes = int(args.pop('timezone_offset', 300)) + start_date = start_date + timedelta(minutes=offset_in_minutes) + end_date = end_date + timedelta(minutes=offset_in_minutes) + + return { + 'start_date': datetime_str(start_date), + 'end_date': datetime_str(end_date), + } + + +def create_dao() -> TimeEntriesDao: + repository = TimeEntryCosmosDBRepository() + + return TimeEntriesCosmosDBDao(repository) diff --git a/time_tracker_api/time_entries/time_entries_model.py b/time_tracker_api/time_entries/time_entries_model.py index 5abb71d1..68cb725e 100644 --- a/time_tracker_api/time_entries/time_entries_model.py +++ b/time_tracker_api/time_entries/time_entries_model.py @@ -40,25 +40,6 @@ from time_tracker_api.database import CRUDDao, APICosmosDBDao from time_tracker_api.security import current_user_id - -class TimeEntriesDao(CRUDDao): - @staticmethod - def current_user_id(): - return current_user_id() - - @abc.abstractmethod - def find_running(self): - pass - - @abc.abstractmethod - def stop(self, id: str): - pass - - @abc.abstractmethod - def restart(self, id: str): - pass - - container_definition = { 'id': 'time_entry', 'partition_key': PartitionKey(path='/tenant_id'), @@ -111,469 +92,3 @@ def __str___(self): return ( "Time Entry started in \"%s\"" % self.start_date ) # pragma: no cover - - -class TimeEntryCosmosDBRepository(CosmosDBRepository): - def __init__(self): - CosmosDBRepository.__init__( - self, - container_id=container_definition['id'], - partition_key_attribute='tenant_id', - order_fields=['start_date DESC'], - mapper=TimeEntryCosmosDBModel, - ) - - @staticmethod - def create_sql_ignore_id_condition(id: str): - if id is None: - return '' - else: - return "AND c.id!=@ignore_id" - - @staticmethod - def create_sql_date_range_filter(date_range: dict) -> str: - if 'start_date' and 'end_date' in date_range: - return """ - ((c.start_date BETWEEN @start_date AND @end_date) OR - (c.end_date BETWEEN @start_date AND @end_date)) - """ - else: - return '' - - def find_all_entries( - self, - event_context: EventContext, - conditions: dict = None, - custom_sql_conditions: List[str] = None, - date_range: dict = None, - ): - conditions = conditions if conditions else {} - custom_sql_conditions = ( - custom_sql_conditions if custom_sql_conditions else [] - ) - date_range = date_range if date_range else {} - - custom_sql_conditions.append( - self.create_sql_date_range_filter(date_range) - ) - - custom_params = self.generate_params(date_range) - time_entries = CosmosDBRepository.find_all( - self, - event_context=event_context, - conditions=conditions, - custom_sql_conditions=custom_sql_conditions, - custom_params=custom_params, - ) - return time_entries - - def count( - self, - event_context: EventContext, - conditions: dict = None, - custom_sql_conditions: List[str] = None, - date_range: dict = None, - ): - conditions = conditions if conditions else {} - custom_sql_conditions = ( - custom_sql_conditions if custom_sql_conditions else [] - ) - date_range = date_range if date_range else {} - - custom_sql_conditions.append( - self.create_sql_date_range_filter(date_range) - ) - - custom_params = self.generate_params(date_range) - counter = CosmosDBRepository.count( - self, - event_context=event_context, - conditions=conditions, - custom_sql_conditions=custom_sql_conditions, - custom_params=custom_params, - ) - return counter - - def find_all( - self, - event_context: EventContext, - conditions: dict = None, - custom_sql_conditions: List[str] = None, - date_range: dict = None, - **kwargs, - ): - conditions = conditions if conditions else {} - custom_sql_conditions = ( - custom_sql_conditions if custom_sql_conditions else [] - ) - date_range = date_range if date_range else {} - - custom_sql_conditions.append( - self.create_sql_date_range_filter(date_range) - ) - - custom_params = self.generate_params(date_range) - time_entries = CosmosDBRepository.find_all( - self, - event_context=event_context, - conditions=conditions, - custom_sql_conditions=custom_sql_conditions, - custom_params=custom_params, - max_count=kwargs.get("max_count", None), - offset=kwargs.get("offset", 0), - ) - - if time_entries: - custom_conditions = create_in_condition(time_entries, "project_id") - custom_conditions_activity = create_in_condition( - time_entries, "activity_id" - ) - - project_dao = projects_model.create_dao() - projects = project_dao.get_all( - custom_sql_conditions=[custom_conditions], - visible_only=False, - max_count=kwargs.get("max_count", None), - ) - - add_project_info_to_time_entries(time_entries, projects) - - activity_dao = activities_model.create_dao() - activities = activity_dao.get_all( - custom_sql_conditions=[custom_conditions_activity], - visible_only=False, - max_count=kwargs.get("max_count", None), - ) - add_activity_name_to_time_entries(time_entries, activities) - - users = AzureConnection().users() - add_user_email_to_time_entries(time_entries, users) - elif not time_entries and len(conditions) > 1: - abort(HTTPStatus.NOT_FOUND, "Time entry not found") - return time_entries - - def on_create(self, new_item_data: dict, event_context: EventContext): - CosmosDBRepository.on_create(self, new_item_data, event_context) - - if new_item_data.get("start_date") is None: - new_item_data['start_date'] = current_datetime_str() - - self.validate_data(new_item_data, event_context) - - def on_update(self, updated_item_data: dict, event_context: EventContext): - CosmosDBRepository.on_update(self, updated_item_data, event_context) - self.validate_data(updated_item_data, event_context) - self.replace_empty_value_per_none(updated_item_data) - - def find_interception_with_date_range( - self, - start_date, - end_date, - owner_id, - tenant_id, - ignore_id=None, - visible_only=True, - mapper: Callable = None, - ): - conditions = { - "owner_id": owner_id, - "tenant_id": tenant_id, - } - params = [ - {"name": "@start_date", "value": start_date}, - {"name": "@end_date", "value": end_date or current_datetime_str()}, - {"name": "@ignore_id", "value": ignore_id}, - ] - params.extend(self.generate_params(conditions)) - result = self.container.query_items( - query=""" - SELECT * FROM c - WHERE ((c.start_date BETWEEN @start_date AND @end_date) - OR (c.end_date BETWEEN @start_date AND @end_date)) - AND c.start_date!= @end_date - AND c.end_date!= @start_date - {conditions_clause} - {ignore_id_condition} - {visibility_condition} - {order_clause} - """.format( - ignore_id_condition=self.create_sql_ignore_id_condition( - ignore_id - ), - visibility_condition=self.create_sql_condition_for_visibility( - visible_only - ), - conditions_clause=self.create_sql_where_conditions(conditions), - order_clause=self.create_sql_order_clause(), - ), - parameters=params, - partition_key=tenant_id, - ) - - function_mapper = self.get_mapper_or_dict(mapper) - return list(map(function_mapper, result)) - - def find_running( - self, tenant_id: str, owner_id: str, mapper: Callable = None - ): - conditions = { - "owner_id": owner_id, - "tenant_id": tenant_id, - } - result = self.container.query_items( - query=""" - SELECT * from c - WHERE (NOT IS_DEFINED(c.end_date) OR c.end_date = null) - {conditions_clause} - {visibility_condition} - OFFSET 0 LIMIT 1 - """.format( - visibility_condition=self.create_sql_condition_for_visibility( - True - ), - conditions_clause=self.create_sql_where_conditions(conditions), - ), - parameters=self.generate_params(conditions), - partition_key=tenant_id, - max_item_count=1, - ) - - function_mapper = self.get_mapper_or_dict(mapper) - return function_mapper(next(result)) - - def validate_data(self, data, event_context: EventContext): - start_date = data.get('start_date') - - if data.get('end_date') is not None: - if data['end_date'] <= start_date: - raise CustomError( - HTTPStatus.BAD_REQUEST, - description="You must end the time entry after it started", - ) - if data['end_date'] >= current_datetime_str(): - raise CustomError( - HTTPStatus.BAD_REQUEST, - description="You cannot end a time entry in the future", - ) - - collision = self.find_interception_with_date_range( - start_date=start_date, - end_date=data.get('end_date'), - owner_id=event_context.user_id, - tenant_id=event_context.tenant_id, - ignore_id=data.get('id'), - ) - if len(collision) > 0: - raise CustomError( - HTTPStatus.UNPROCESSABLE_ENTITY, - description="There is another time entry in that date range", - ) - - -class TimeEntriesCosmosDBDao(APICosmosDBDao, TimeEntriesDao): - def __init__(self, repository): - CosmosDBDao.__init__(self, repository) - - def check_whether_current_user_owns_item(self, data): - if ( - data.owner_id is not None - and data.owner_id != self.current_user_id() - ): - raise CustomError( - HTTPStatus.FORBIDDEN, - "The current user is not the owner of this time entry", - ) - - def check_time_entry_is_not_stopped(self, data): - if data.end_date is not None: - raise CustomError( - HTTPStatus.UNPROCESSABLE_ENTITY, - "The specified time entry is already stopped", - ) - - def check_time_entry_is_not_started(self, data): - if data.end_date is None: - raise CustomError( - HTTPStatus.UNPROCESSABLE_ENTITY, - "The specified time entry is already running", - ) - - def build_custom_query(self, is_admin: bool, conditions: dict = None): - custom_query = [] - if "user_id" in conditions: - if is_admin: - conditions.pop("owner_id") - custom_query = ( - [] - if conditions.get("user_id") == "*" - else [ - create_custom_query_from_str( - conditions.get("user_id"), "c.owner_id" - ) - ] - ) - conditions.pop("user_id") - else: - abort( - HTTPStatus.FORBIDDEN, "You don't have enough permissions." - ) - return custom_query - - def get_all(self, conditions: dict = None, **kwargs) -> list: - event_ctx = self.create_event_context("read-many") - conditions.update({"owner_id": event_ctx.user_id}) - - custom_query = self.build_custom_query( - is_admin=event_ctx.is_admin, conditions=conditions, - ) - date_range = self.handle_date_filter_args(args=conditions) - limit = conditions.get("limit", None) - conditions.pop("limit", None) - return self.repository.find_all( - event_ctx, - conditions=conditions, - custom_sql_conditions=custom_query, - date_range=date_range, - max_count=limit, - ) - - def get_all_paginated(self, conditions: dict = None, **kwargs) -> list: - get_all_conditions = dict(conditions) - get_all_conditions.pop("length") - get_all_conditions.pop("start") - event_ctx = self.create_event_context("read-many") - get_all_conditions.update({"owner_id": event_ctx.user_id}) - custom_query = self.build_custom_query( - is_admin=event_ctx.is_admin, conditions=get_all_conditions, - ) - date_range = self.handle_date_filter_args(args=get_all_conditions) - records_total = self.repository.count( - event_ctx, - conditions=get_all_conditions, - custom_sql_conditions=custom_query, - date_range=date_range, - ) - conditions.update({"owner_id": event_ctx.user_id}) - custom_query = self.build_custom_query( - is_admin=event_ctx.is_admin, conditions=conditions, - ) - date_range = self.handle_date_filter_args(args=conditions) - length = conditions.get("length", None) - conditions.pop("length", None) - start = conditions.get("start", None) - conditions.pop("start", None) - - time_entries = self.repository.find_all( - event_ctx, - conditions=conditions, - custom_sql_conditions=custom_query, - date_range=date_range, - max_count=length, - offset=start, - ) - - return { - 'records_total': records_total, - 'data': time_entries, - } - - def get(self, id): - event_ctx = self.create_event_context("read") - - time_entry = self.repository.find(id, event_ctx) - self.check_whether_current_user_owns_item(time_entry) - - project_dao = projects_model.create_dao() - project = project_dao.get(time_entry.project_id) - setattr(time_entry, 'project_name', project.name) - return time_entry - - def create(self, data: dict): - event_ctx = self.create_event_context("create") - data['owner_id'] = event_ctx.user_id - return self.repository.create(data, event_ctx) - - def update(self, id, data: dict, description=None): - event_ctx = self.create_event_context("update", description) - - time_entry = self.repository.find(id, event_ctx) - self.check_whether_current_user_owns_item(time_entry) - - return self.repository.partial_update(id, data, event_ctx,) - - def stop(self, id): - event_ctx = self.create_event_context("update", "Stop time entry") - - time_entry = self.repository.find(id, event_ctx) - self.check_whether_current_user_owns_item(time_entry) - self.check_time_entry_is_not_stopped(time_entry) - - return self.repository.partial_update( - id, {'end_date': current_datetime_str()}, event_ctx, - ) - - def restart(self, id): - event_ctx = self.create_event_context("update", "Restart time entry") - - time_entry = self.repository.find(id, event_ctx) - self.check_whether_current_user_owns_item(time_entry) - self.check_time_entry_is_not_started(time_entry) - - return self.repository.partial_update( - id, {'end_date': None}, event_ctx, - ) - - def delete(self, id): - event_ctx = self.create_event_context("delete") - time_entry = self.repository.find(id, event_ctx) - self.check_whether_current_user_owns_item(time_entry) - self.repository.delete( - id, event_ctx, - ) - - def find_running(self): - event_ctx = self.create_event_context("find_running") - time_entry = self.repository.find_running( - event_ctx.tenant_id, event_ctx.user_id - ) - return time_entry - - def get_worked_time(self, args: dict): - event_ctx = self.create_event_context( - "read", "Summary of worked time in the current month" - ) - - conditions = {"owner_id": event_ctx.user_id} - time_entries = self.repository.find_all_entries( - event_ctx, - conditions=conditions, - date_range=worked_time.date_range(), - ) - return worked_time.summary( - time_entries, time_offset=args.get('time_offset') - ) - - @staticmethod - def handle_date_filter_args(args: dict) -> dict: - if "start_date" and "end_date" in args: - start_date = str_to_datetime(args.pop('start_date')) - end_date = str_to_datetime(args.pop('end_date')) - else: - month = int(args.pop("month", get_current_month())) - year = int(args.pop("year", get_current_year())) - start_date, end_date = get_date_range_of_month(year, month) - - offset_in_minutes = int(args.pop('timezone_offset', 300)) - start_date = start_date + timedelta(minutes=offset_in_minutes) - end_date = end_date + timedelta(minutes=offset_in_minutes) - - return { - 'start_date': datetime_str(start_date), - 'end_date': datetime_str(end_date), - } - - -def create_dao() -> TimeEntriesDao: - repository = TimeEntryCosmosDBRepository() - - return TimeEntriesCosmosDBDao(repository) diff --git a/time_tracker_api/time_entries/time_entries_namespace.py b/time_tracker_api/time_entries/time_entries_namespace.py index da614e03..f2c115fc 100644 --- a/time_tracker_api/time_entries/time_entries_namespace.py +++ b/time_tracker_api/time_entries/time_entries_namespace.py @@ -16,7 +16,7 @@ NullableString, remove_required_constraint, ) -from time_tracker_api.time_entries.time_entries_model import create_dao +from time_tracker_api.time_entries.time_entries_dao import create_dao faker = Faker() @@ -256,6 +256,17 @@ def post(self): return time_entries_dao.create(ns.payload), HTTPStatus.CREATED +@ns.route('/latest') +class LatestTimeEntries(Resource): + @ns.doc('list_latest_time_entries') + @ns.marshal_list_with(time_entry) + @ns.response(HTTPStatus.NOT_FOUND, 'No time entries found') + def get(self): + """List the latest time entries""" + + return time_entries_dao.get_lastest_entries_by_project(conditions={}) + + @ns.route('/') @ns.response(HTTPStatus.NOT_FOUND, 'This time entry does not exist') @ns.response(HTTPStatus.UNPROCESSABLE_ENTITY, 'The id has an invalid format') diff --git a/time_tracker_api/time_entries/time_entries_repository.py b/time_tracker_api/time_entries/time_entries_repository.py new file mode 100644 index 00000000..b909b80d --- /dev/null +++ b/time_tracker_api/time_entries/time_entries_repository.py @@ -0,0 +1,289 @@ +from commons.data_access_layer.cosmos_db import ( + CosmosDBRepository, + CustomError, +) + +from time_tracker_api.time_entries.time_entries_model import ( + container_definition, + TimeEntryCosmosDBModel, +) + +from utils.time import ( + current_datetime_str, +) + +from utils.extend_model import ( + add_project_info_to_time_entries, + add_activity_name_to_time_entries, + create_in_condition, + add_user_email_to_time_entries, +) + +from flask_restplus import abort +from flask_restplus._http import HTTPStatus +from utils.azure_users import AzureConnection +from time_tracker_api.activities import activities_model +from commons.data_access_layer.database import EventContext +from typing import List, Callable +from time_tracker_api.projects import projects_model + + +class TimeEntryCosmosDBRepository(CosmosDBRepository): + def __init__(self): + CosmosDBRepository.__init__( + self, + container_id=container_definition['id'], + partition_key_attribute='tenant_id', + order_fields=['start_date DESC'], + mapper=TimeEntryCosmosDBModel, + ) + + @staticmethod + def create_sql_ignore_id_condition(id: str): + if id is None: + return '' + else: + return "AND c.id!=@ignore_id" + + @staticmethod + def create_sql_date_range_filter(date_range: dict) -> str: + if 'start_date' and 'end_date' in date_range: + return """ + ((c.start_date BETWEEN @start_date AND @end_date) OR + (c.end_date BETWEEN @start_date AND @end_date)) + """ + else: + return '' + + def find_all_entries( + self, + event_context: EventContext, + conditions: dict = None, + custom_sql_conditions: List[str] = None, + date_range: dict = None, + **kwargs, + ): + conditions = conditions if conditions else {} + custom_sql_conditions = ( + custom_sql_conditions if custom_sql_conditions else [] + ) + date_range = date_range if date_range else {} + + custom_sql_conditions.append( + self.create_sql_date_range_filter(date_range) + ) + + custom_params = self.generate_params(date_range) + time_entries = CosmosDBRepository.find_all( + self, + event_context=event_context, + conditions=conditions, + custom_sql_conditions=custom_sql_conditions, + custom_params=custom_params, + max_count=kwargs.get("max_count", None), + offset=kwargs.get("offset", 0), + ) + return time_entries + + def count( + self, + event_context: EventContext, + conditions: dict = None, + custom_sql_conditions: List[str] = None, + date_range: dict = None, + **kwargs, + ): + conditions = conditions if conditions else {} + custom_sql_conditions = ( + custom_sql_conditions if custom_sql_conditions else [] + ) + date_range = date_range if date_range else {} + + custom_sql_conditions.append( + self.create_sql_date_range_filter(date_range) + ) + + custom_params = self.generate_params(date_range) + counter = CosmosDBRepository.count( + self, + event_context=event_context, + conditions=conditions, + custom_sql_conditions=custom_sql_conditions, + custom_params=custom_params, + ) + return counter + + def find_all( + self, + event_context: EventContext, + conditions: dict = None, + custom_sql_conditions: List[str] = None, + date_range: dict = None, + **kwargs, + ): + conditions = conditions if conditions else {} + custom_sql_conditions = ( + custom_sql_conditions if custom_sql_conditions else [] + ) + date_range = date_range if date_range else {} + + custom_sql_conditions.append( + self.create_sql_date_range_filter(date_range) + ) + + custom_params = self.generate_params(date_range) + time_entries = CosmosDBRepository.find_all( + self, + event_context=event_context, + conditions=conditions, + custom_sql_conditions=custom_sql_conditions, + custom_params=custom_params, + max_count=kwargs.get("max_count", None), + offset=kwargs.get("offset", 0), + ) + + if time_entries: + custom_conditions = create_in_condition(time_entries, "project_id") + custom_conditions_activity = create_in_condition( + time_entries, "activity_id" + ) + + project_dao = projects_model.create_dao() + projects = project_dao.get_all( + custom_sql_conditions=[custom_conditions], + visible_only=False, + max_count=kwargs.get("max_count", None), + ) + + add_project_info_to_time_entries(time_entries, projects) + + activity_dao = activities_model.create_dao() + activities = activity_dao.get_all( + custom_sql_conditions=[custom_conditions_activity], + visible_only=False, + max_count=kwargs.get("max_count", None), + ) + add_activity_name_to_time_entries(time_entries, activities) + + users = AzureConnection().users() + add_user_email_to_time_entries(time_entries, users) + elif not time_entries and len(conditions) > 1: + abort(HTTPStatus.NOT_FOUND, "Time entry not found") + return time_entries + + def on_create(self, new_item_data: dict, event_context: EventContext): + CosmosDBRepository.on_create(self, new_item_data, event_context) + + if new_item_data.get("start_date") is None: + new_item_data['start_date'] = current_datetime_str() + + self.validate_data(new_item_data, event_context) + + def on_update(self, updated_item_data: dict, event_context: EventContext): + CosmosDBRepository.on_update(self, updated_item_data, event_context) + self.validate_data(updated_item_data, event_context) + self.replace_empty_value_per_none(updated_item_data) + + def find_interception_with_date_range( + self, + start_date, + end_date, + owner_id, + tenant_id, + ignore_id=None, + visible_only=True, + mapper: Callable = None, + ): + conditions = { + "owner_id": owner_id, + "tenant_id": tenant_id, + } + params = [ + {"name": "@start_date", "value": start_date}, + {"name": "@end_date", "value": end_date or current_datetime_str()}, + {"name": "@ignore_id", "value": ignore_id}, + ] + params.extend(self.generate_params(conditions)) + result = self.container.query_items( + query=""" + SELECT * FROM c + WHERE ((c.start_date BETWEEN @start_date AND @end_date) + OR (c.end_date BETWEEN @start_date AND @end_date)) + AND c.start_date!= @end_date + AND c.end_date!= @start_date + {conditions_clause} + {ignore_id_condition} + {visibility_condition} + {order_clause} + """.format( + ignore_id_condition=self.create_sql_ignore_id_condition( + ignore_id + ), + visibility_condition=self.create_sql_condition_for_visibility( + visible_only + ), + conditions_clause=self.create_sql_where_conditions(conditions), + order_clause=self.create_sql_order_clause(), + ), + parameters=params, + partition_key=tenant_id, + ) + + function_mapper = self.get_mapper_or_dict(mapper) + return list(map(function_mapper, result)) + + def find_running( + self, tenant_id: str, owner_id: str, mapper: Callable = None + ): + conditions = { + "owner_id": owner_id, + "tenant_id": tenant_id, + } + result = self.container.query_items( + query=""" + SELECT * from c + WHERE (NOT IS_DEFINED(c.end_date) OR c.end_date = null) + {conditions_clause} + {visibility_condition} + OFFSET 0 LIMIT 1 + """.format( + visibility_condition=self.create_sql_condition_for_visibility( + True + ), + conditions_clause=self.create_sql_where_conditions(conditions), + ), + parameters=self.generate_params(conditions), + partition_key=tenant_id, + max_item_count=1, + ) + + function_mapper = self.get_mapper_or_dict(mapper) + return function_mapper(next(result)) + + def validate_data(self, data, event_context: EventContext): + start_date = data.get('start_date') + + if data.get('end_date') is not None: + if data['end_date'] <= start_date: + raise CustomError( + HTTPStatus.BAD_REQUEST, + description="You must end the time entry after it started", + ) + if data['end_date'] >= current_datetime_str(): + raise CustomError( + HTTPStatus.BAD_REQUEST, + description="You cannot end a time entry in the future", + ) + + collision = self.find_interception_with_date_range( + start_date=start_date, + end_date=data.get('end_date'), + owner_id=event_context.user_id, + tenant_id=event_context.tenant_id, + ignore_id=data.get('id'), + ) + if len(collision) > 0: + raise CustomError( + HTTPStatus.UNPROCESSABLE_ENTITY, + description="There is another time entry in that date range", + ) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index c6d6a56b..1f4f7fb6 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.24.3' +__version__ = '0.24.4'