diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 36bff27a..1c700563 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -14,6 +14,9 @@ jobs: runs-on: ubuntu-latest strategy: max-parallel: 5 + env: + ENVIRONMENT: ${{ secrets.ENVIRONMENT }} + TEST_DB_CONNECTION: ${{ secrets.TEST_DB_CONNECTION }} steps: - uses: actions/checkout@v2 - name: Set up Python 3.10.0 diff --git a/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml b/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml new file mode 100644 index 00000000..323b8615 --- /dev/null +++ b/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml @@ -0,0 +1,59 @@ +name: Time Tacker V1 CI - ON PR + +on: + pull_request: + branches: [master] + +jobs: + time-tracker-ci-v1-on-pr: + runs-on: ubuntu-latest + + strategy: + matrix: + python-version: [3.9] + + steps: + - name: Checking out code from the repository + uses: actions/checkout@v2 + + - name: Setting up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements/time_tracker_api/dev.txt + pip install -r requirements/time_tracker_events/dev.txt + - name: Login to azure + uses: Azure/login@v1 + with: + creds: ${{ secrets.AZURE_CREDENTIALS }} + + - name: Get vault from azure + uses: Azure/get-keyvault-secrets@v1 + with: + keyvault: "time-tracker-secrets" + secrets: "MS-CLIENT-ID, MS-AUTHORITY, MS-SCOPE, MS-SECRET, MS-ENDPOINT, USERID, AZURE-APP-CONFIGURATION-CONNECTION-STRING, DATABASE-ACCOUNT-URI, DATABASE-MASTER-KEY, DATABASE-NAME, AZURE-STORAGE-ACCOUNT-KEY, AZURE-STORAGE-CONNECTION-STRING" + id: timeTrackerAzureVault + + - name: Run tests + env: + MS_AUTHORITY: ${{ steps.timeTrackerAzureVault.outputs.MS-AUTHORITY }} + MS_CLIENT_ID: ${{ steps.timeTrackerAzureVault.outputs.MS-CLIENT-ID }} + MS_SCOPE: ${{ steps.timeTrackerAzureVault.outputs.MS-SCOPE }} + MS_SECRET: ${{ steps.timeTrackerAzureVault.outputs.MS-SECRET }} + MS_ENDPOINT: ${{ steps.timeTrackerAzureVault.outputs.MS-ENDPOINT }} + USERID: ${{ steps.timeTrackerAzureVault.outputs.USERID }} + AZURE_APP_CONFIGURATION_CONNECTION_STRING: ${{ steps.timeTrackerAzureVault.outputs.AZURE-APP-CONFIGURATION-CONNECTION-STRING }} + DATABASE_ACCOUNT_URI: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-ACCOUNT-URI }} + DATABASE_MASTER_KEY: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-MASTER-KEY }} + DATABASE_NAME: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-NAME }} + AZURE_STORAGE_ACCOUNT_KEY: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-ACCOUNT-KEY }} + AZURE_STORAGE_CONNECTION_STRING: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-CONNECTION-STRING }} + run: | + pytest -v + - name: Test the build of the app + run: | + docker build . diff --git a/.github/workflows/time-tracker-v1-on-push-workflow.yml b/.github/workflows/time-tracker-v1-on-push-workflow.yml new file mode 100644 index 00000000..1c4c1b18 --- /dev/null +++ b/.github/workflows/time-tracker-v1-on-push-workflow.yml @@ -0,0 +1,66 @@ +name: Time Tacker V1 CI - ON PUSH + +on: + push: + branches: [master] + +jobs: + time-tracker-ci-v1-on-push: + runs-on: ubuntu-latest + + strategy: + matrix: + python-version: [3.9] + + steps: + - name: Checking out code from the repository + uses: actions/checkout@v2 + + - name: Setting up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements/time_tracker_api/dev.txt + pip install -r requirements/time_tracker_events/dev.txt + - name: Login to azure + uses: Azure/login@v1 + with: + creds: ${{ secrets.AZURE_CREDENTIALS }} + + - name: Get vault from azure + uses: Azure/get-keyvault-secrets@v1 + with: + keyvault: "time-tracker-secrets" + secrets: "MS-CLIENT-ID, MS-AUTHORITY, MS-SCOPE, MS-SECRET, MS-ENDPOINT, USERID, AZURE-APP-CONFIGURATION-CONNECTION-STRING, DATABASE-ACCOUNT-URI, DATABASE-MASTER-KEY, DATABASE-NAME, AZURE-STORAGE-ACCOUNT-KEY, AZURE-STORAGE-CONNECTION-STRING" + id: timeTrackerAzureVault + + - name: Run tests + env: + MS_AUTHORITY: ${{ steps.timeTrackerAzureVault.outputs.MS-AUTHORITY }} + MS_CLIENT_ID: ${{ steps.timeTrackerAzureVault.outputs.MS-CLIENT-ID }} + MS_SCOPE: ${{ steps.timeTrackerAzureVault.outputs.MS-SCOPE }} + MS_SECRET: ${{ steps.timeTrackerAzureVault.outputs.MS-SECRET }} + MS_ENDPOINT: ${{ steps.timeTrackerAzureVault.outputs.MS-ENDPOINT }} + USERID: ${{ steps.timeTrackerAzureVault.outputs.USERID }} + AZURE_APP_CONFIGURATION_CONNECTION_STRING: ${{ steps.timeTrackerAzureVault.outputs.AZURE-APP-CONFIGURATION-CONNECTION-STRING }} + DATABASE_ACCOUNT_URI: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-ACCOUNT-URI }} + DATABASE_MASTER_KEY: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-MASTER-KEY }} + DATABASE_NAME: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-NAME }} + AZURE_STORAGE_ACCOUNT_KEY: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-ACCOUNT-KEY }} + AZURE_STORAGE_CONNECTION_STRING: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-CONNECTION-STRING }} + run: | + pytest tests + - name: Login to docker registry + uses: azure/docker-login@v1 + with: + login-server: ${{ secrets.REGISTRY_LOGIN_SERVER }} + username: ${{ secrets.REGISTRY_USERNAME }} + password: ${{ secrets.REGISTRY_PASSWORD }} + - name: Build and push image + run: | + docker build . -t ${{ secrets.REGISTRY_LOGIN_SERVER }}/timetrackerapi:${{ github.sha }} + docker push ${{ secrets.REGISTRY_LOGIN_SERVER }}/timetrackerapi:${{ github.sha }} diff --git a/CHANGELOG.md b/CHANGELOG.md index b6b74353..0141b02d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,34 @@ +## v0.46.1 (2022-01-20) +### Fix +* TT-507 Error in time-entries list for different time zone ([#367](https://github.com/ioet/time-tracker-backend/issues/367)) ([`628da5c`](https://github.com/ioet/time-tracker-backend/commit/628da5c165c434bfc2a47ffc00222710cdc379b3)) + +## v0.46.0 (2021-12-04) +### Feature +* TT-384 add read file from blob storage 12.1 ([#366](https://github.com/ioet/time-tracker-backend/issues/366)) ([`fd39f66`](https://github.com/ioet/time-tracker-backend/commit/fd39f660dbd895fcc17d6767ca453bcc2b91ab7b)) + +## v0.45.0 (2021-12-04) +### Feature +* TT-414 get latest projects ([#363](https://github.com/ioet/time-tracker-backend/issues/363)) ([`aedf3d2`](https://github.com/ioet/time-tracker-backend/commit/aedf3d24d1cae9f40dcfb61196c619c15a1ac35c)) +* TT-418 crud customer v2 ([#361](https://github.com/ioet/time-tracker-backend/issues/361)) ([`1db51d6`](https://github.com/ioet/time-tracker-backend/commit/1db51d68824a429730fedd1cbf58ee334ee00fa0)) +* TT-429 created enums for response messages ([#362](https://github.com/ioet/time-tracker-backend/issues/362)) ([`fd0bc98`](https://github.com/ioet/time-tracker-backend/commit/fd0bc986fcc074bd5f6d6e9b7b602951375f2aee)) +* TT-404 GET Time Entries ([#341](https://github.com/ioet/time-tracker-backend/issues/341)) ([`c8a3134`](https://github.com/ioet/time-tracker-backend/commit/c8a31341b120792f46442815fad2d463262302ab)) +* TT-417-crud-v2-projects ([#360](https://github.com/ioet/time-tracker-backend/issues/360)) ([`10ec2bb`](https://github.com/ioet/time-tracker-backend/commit/10ec2bb9e2b5f67358c00b549a376b7f610041de)) +* TT-402 put v2 time entries ([#347](https://github.com/ioet/time-tracker-backend/issues/347)) ([`48f6411`](https://github.com/ioet/time-tracker-backend/commit/48f641170a968c7f12bc60f7882b0f4eda6cede2)) +* TT-418 CRUD customer v2 ([#359](https://github.com/ioet/time-tracker-backend/issues/359)) ([`d6c4c4d`](https://github.com/ioet/time-tracker-backend/commit/d6c4c4d67e72db867f197af8c7f8147839d6c178)) +* TT-403 delete v2 time entries ([#346](https://github.com/ioet/time-tracker-backend/issues/346)) ([`60a0dc7`](https://github.com/ioet/time-tracker-backend/commit/60a0dc7015f98b24a3429b1ceabf31e722741649)) +* TT-401-Post-v2-time-entries ([#344](https://github.com/ioet/time-tracker-backend/issues/344)) ([`5f107f3`](https://github.com/ioet/time-tracker-backend/commit/5f107f33cb640f7fa8e498db2157efb2d11f401d)) +* TT-399 Config use makefile to executing tests ([#350](https://github.com/ioet/time-tracker-backend/issues/350)) ([`32ee36f`](https://github.com/ioet/time-tracker-backend/commit/32ee36f39e81866c2f0767cf243c61afde6841c9)) + +### Fix +* TT-401 change in activity database instance and refactor test ([#355](https://github.com/ioet/time-tracker-backend/issues/355)) ([`b81319f`](https://github.com/ioet/time-tracker-backend/commit/b81319fe12bff57816dac1d0354000bfc6674c1c)) + +### Documentation +* TT-419 update readme V2 ([#357](https://github.com/ioet/time-tracker-backend/issues/357)) ([`6dd8505`](https://github.com/ioet/time-tracker-backend/commit/6dd85055b666888c7a22ffa1635b2e53903e7942)) +* TT-399 Readme update how to use makefile ([#354](https://github.com/ioet/time-tracker-backend/issues/354)) ([`10cc426`](https://github.com/ioet/time-tracker-backend/commit/10cc4269e4e60c6eff77bf1cf02cdf0d31dac86f)) + ## v0.44.0 (2021-11-15) ### Feature * TT-357 Create V2 Activities Azure DAO ([#334](https://github.com/ioet/time-tracker-backend/issues/334)) ([`3a99add`](https://github.com/ioet/time-tracker-backend/commit/3a99add39a3130c540d86b02c5a69dbda8536e8e)) diff --git a/V2/Makefile b/V2/Makefile index 45080238..cf02904b 100644 --- a/V2/Makefile +++ b/V2/Makefile @@ -1,8 +1,37 @@ +.PHONY: help +help: + @echo "---------------HELP-----------------" + @echo "- make install --> Install the dependencies" + @echo "- make test --> Run all tests" + @echo "- make test specific_test= --> Run specific test" + @echo "- make start-local --> Run local database" + @echo "- make ci --> Install the dependencies and run all tests" + @echo "------------------------------------" + +.PHONY: install install: - @echo "Installing Time Tracker" + @echo "=========================================Installing dependencies Time Tracker=========================================" npm install pip install --upgrade pip pip install -r requirements.txt @echo "Completed! " + +.PHONY: test +test: export ENVIRONMENT = test +test: + @echo "=========================================Lint with flake8=========================================" + flake8 . --show-source --statistics + @echo "Completed flake8!" + @echo "=========================================Test with pytest=========================================" + @if [ "$(specific_test)" ]; then \ + python -m pytest -vv -s -k $(specific_test);\ + else \ + python -m pytest -v;\ + fi + @echo "Completed test!" + start-local: - docker compose up \ No newline at end of file + docker compose up + +.PHONY: ci +ci: install test \ No newline at end of file diff --git a/V2/README.md b/V2/README.md index e84c0268..64726b30 100644 --- a/V2/README.md +++ b/V2/README.md @@ -1,3 +1,108 @@ -# Azure Functions +# **Time-tracker-api V2 Architecture** +Architecture +The application follows a DDD approach with a hexagonal clean architecture. BIG WORDS!, what does it mean? it means the following: -Refer to [Serverless docs](https://serverless.com/framework/docs/providers/azure/guide/intro/) for more information. +We have a directory for each domain entitiy (i.e. time entries, technologies, activities, etc) +Inside each entity directory we have other 3 directories (application, domain and infrastructure) +I'll leave this drawing to understand how these three folders work and what logic should be included in these directories + +![ddd.png](https://raw.githubusercontent.com/eguezgustavo/time_tracker_app_skeleton/master/ddd.png) +More information [Here](https://github.com/eguezgustavo/time_tracker_app_skeleton) + +## **Stack Technologies** + - [Serverless](https://serverless.com/framework/docs/providers/azure/guide/intro/) + - Python + - Pytest + - Docker Compose + +Recommended link [tdd_dojo](https://github.com/eguezgustavo/tdd_dojo) + +## **Setup environment** + +### **Requeriments** + +- Install python 3.6 or 3.7 (recommendation to install python [pyenv](https://github.com/pyenv/pyenv)) +- Install node (recommendation to install node [nvm](https://www.digitalocean.com/community/tutorials/how-to-install-node-js-on-ubuntu-20-04-es#:~:text=de%20Node.js.-,Opci%C3%B3n%203%3A%20Instalar%20Node%20usando%20el%20administrador%20de%20versiones%20de%20Node,-Otra%20forma%20de)) + +### **Add variables** +In the root directory /time-tracker-backend create a file .env with these values + +``` +export MS_AUTHORITY=XXX +export MS_CLIENT_ID=XXX +export MS_SCOPE=XXX +export MS_SECRET=XXX +export MS_ENDPOINT=XXX +export DATABASE_ACCOUNT_URI=XXX +export DATABASE_MASTER_KEY=XXX +export DATABASE_NAME=XXX +export FLASK_APP=XXX +export FLASK_ENV=XXX +export AZURE_APP_CONFIGURATION_CONNECTION_STRING=XXX +export USERID=XXX +export FLASK_DEBUG=True +export PYTHONPATH=XXX +export DB_CONNECTION=XXX +export ENVIRONMENT=XXX +``` + +In the directory /V2 create a file .env with these values +``` +DB_USER=XXX +DB_PASS=XXX +DB_NAME=XXX +``` + +### **Install dependencies** +In the Directory /V2 +``` +make install +``` + +## **Start Project** +In the directory /V2 +``` +npm run offline +docker compose up or make start-local +``` + + +## **Makefile to run a locally CI** + +Execute the next command to show makefile help: + +```shell +$ make help +``` + +- To install the dependencies type the command ```make install``` +- To test the project type the command ```make test``` +- To run the local database type the command ```make start-local``` + +## **How to contribute to the project** +Clone the repository and from the master branch create a new branch for each new task. +### **Branch names format** +For example if your task in Jira is **TT-48 implement semantic versioning** your branch name is: +``` + TT-48-implement-semantic-versioning +``` +### **Commit messages format** + + + Below there are some common examples you can use for your commit messages [semantic version](https://semver.org/) : + + - **feat**: A new feature. + - **fix**: A bug fix. + - **perf**: A code change that improves performance. + - **build**: Changes that affect the build system or external dependencies (example scopes: npm, ts configuration). + - **ci**: Changes to our CI or CD configuration files and scripts (example scopes: Azure devops, github actions). + - **docs**: Documentation only changes. + - **refactor**: A code change that neither fixes a bug nor adds a feature. + It is important to mention that this key is not related to css styles. + - **test**: Adding missing tests or correcting existing tests. + + ### Example + fix: TT-48 implement semantic versioning + + Prefix to use in the space fix: + `(fix: |feat: |perf: |build: |ci: |docs: |refactor: |style: |test: )` \ No newline at end of file diff --git a/V2/create_activity/function.json b/V2/create_activity/function.json deleted file mode 100644 index ed3454a9..00000000 --- a/V2/create_activity/function.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "disabled": false, - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "route": "activities/", - "authLevel": "anonymous", - "methods": [ - "POST" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ], - "entryPoint": "create_activity", - "scriptFile": "../time_tracker/activities/interface.py" -} \ No newline at end of file diff --git a/V2/delete_activity/function.json b/V2/delete_activity/function.json deleted file mode 100644 index d51170fd..00000000 --- a/V2/delete_activity/function.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "disabled": false, - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "route": "activities/{id}", - "authLevel": "anonymous", - "methods": [ - "DELETE" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ], - "entryPoint": "delete_activity", - "scriptFile": "../time_tracker/activities/interface.py" -} \ No newline at end of file diff --git a/V2/get_activities/function.json b/V2/get_activities/function.json deleted file mode 100644 index ee1efe53..00000000 --- a/V2/get_activities/function.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "disabled": false, - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "route": "activities/{id:?}", - "authLevel": "anonymous", - "methods": [ - "GET" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ], - "entryPoint": "get_activities", - "scriptFile": "../time_tracker/activities/interface.py" -} \ No newline at end of file diff --git a/V2/serverless.yml b/V2/serverless.yml index 0eb3f42f..66fa83af 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -1,6 +1,6 @@ service: azure-time-tracker -frameworkVersion: "2" +frameworkVersion: '2' provider: name: azure @@ -23,20 +23,25 @@ plugins: package: patterns: - - "!env/**" - - "!.env/**" - - "!local.settings.json" - - "!.vscode/**" - - "!__pycache__/**" - - "!node_modules/**" - - "!.python_packages/**" - - "!.funcignore" - - "!package.json" - - "!package-lock.json" - - "!.gitignore" - - "!.git/**" + - '!env/**' + - '!.env/**' + - '!local.settings.json' + - '!.vscode/**' + - '!__pycache__/**' + - '!node_modules/**' + - '!.python_packages/**' + - '!.funcignore' + - '!package.json' + - '!package-lock.json' + - '!.gitignore' + - '!.git/**' + +#region start Functions functions: + +#region Start Functions Activities + get_activities: handler: time_tracker/activities/interface.get_activities events: @@ -63,9 +68,9 @@ functions: - http: true x-azure-settings: methods: - - PUT + - PUT route: activities/{id} - authLevel: anonymous + authLevel: anonymous create_activity: handler: time_tracker/activities/interface.create_activity @@ -76,3 +81,160 @@ functions: - POST route: activities/ authLevel: anonymous + +#endregion End Functions Activities + +#region Start Functions Time-Entries + + create_time_entry: + handler: time_tracker/time_entries/interface.create_time_entry + events: + - http: true + x-azure-settings: + methods: + - POST + route: time-entries/ + authLevel: anonymous + + get_time_entries: + handler: time_tracker/time_entries/interface.get_time_entries + events: + - http: true + x-azure-settings: + methods: + - GET + route: time-entries/{id:?} + authLevel: anonymous + + delete_time_entry: + handler: time_tracker/time_entries/interface.delete_time_entry + events: + - http: true + x-azure-settings: + methods: + - DELETE + route: time-entries/{id} + authLevel: anonymous + + update_time_entry: + handler: time_tracker/time_entries/interface.update_time_entry + events: + - http: true + x-azure-settings: + methods: + - PUT + route: time-entries/{id} + authLevel: anonymous + + get_latest_time_entry: + handler: time_tracker/time_entries/interface.get_latest_entries + events: + - http: true + x-azure-settings: + methods: + - GET + route: time-entries/latest/ + authLevel: anonymous + +#endregion End Functions Time-Entries + +#region Start Functions Customers + + create_customer: + handler: time_tracker/customers/interface.create_customer + events: + - http: true + x-azure-settings: + methods: + - POST + route: customers/ + authLevel: anonymous + + get_customers: + handler: time_tracker/customers/interface.get_customers + events: + - http: true + x-azure-settings: + methods: + - GET + route: customers/{id:?} + authLevel: anonymous + + update_customer: + handler: time_tracker/customers/interface.update_customer + events: + - http: true + x-azure-settings: + methods: + - PUT + route: customers/{id} + authLevel: anonymous + + delete_customer: + handler: time_tracker/customers/interface.delete_customer + events: + - http: true + x-azure-settings: + methods: + - DELETE + route: customers/{id} + authLevel: anonymous + +#endregion End Functions Customers + +#region Start Functions Projects + + get_projects: + handler: time_tracker/projects/interface.get_projects + events: + - http: true + x-azure-settings: + methods: + - GET + route: projects/{id:?} + authLevel: anonymous + + delete_project: + handler: time_tracker/projects/interface.delete_project + events: + - http: true + x-azure-settings: + methods: + - DELETE + route: projects/{id} + authLevel: anonymous + + update_project: + handler: time_tracker/projects/interface.update_project + events: + - http: true + x-azure-settings: + methods: + - PUT + route: projects/{id} + authLevel: anonymous + + create_project: + handler: time_tracker/projects/interface.create_project + events: + - http: true + x-azure-settings: + methods: + - POST + route: projects/ + + authLevel: anonymous + + get_latest_projects: + handler: time_tracker/projects/interface.get_latest_projects + events: + - http: true + x-azure-settings: + methods: + - GET + route: projects/latest + authLevel: anonymous + +#endregion End Functions Projects + +#endregion End Functions diff --git a/V2/tests/api/azure/activity_azure_endpoints_test.py b/V2/tests/api/azure/activity_azure_endpoints_test.py index 9b2618a8..7c0de311 100644 --- a/V2/tests/api/azure/activity_azure_endpoints_test.py +++ b/V2/tests/api/azure/activity_azure_endpoints_test.py @@ -1,37 +1,22 @@ -import pytest import json from faker import Faker import azure.functions as func import time_tracker.activities._application._activities as azure_activities -import time_tracker.activities._infrastructure as infrastructure -from time_tracker._infrastructure import DB -from time_tracker.activities import _domain ACTIVITY_URL = '/api/activities/' -@pytest.fixture(name='insert_activity') -def _insert_activity() -> dict: - def _new_activity(activity: _domain.Activity, database: DB): - dao = infrastructure.ActivitiesSQLDao(database) - new_activity = dao.create(activity) - return new_activity.__dict__ - return _new_activity - - def test__activity_azure_endpoint__returns_all_activities( - create_fake_database, activity_factory, insert_activity + test_db, activity_factory, insert_activity ): - fake_database = create_fake_database existent_activities = [activity_factory(), activity_factory()] inserted_activities = [ - insert_activity(existent_activities[0], fake_database), - insert_activity(existent_activities[1], fake_database) + insert_activity(existent_activities[0], test_db).__dict__, + insert_activity(existent_activities[1], test_db).__dict__ ] - azure_activities._get_activities.DATABASE = fake_database req = func.HttpRequest(method='GET', body=None, url=ACTIVITY_URL) response = azure_activities._get_activities.get_activities(req) activities_json_data = response.get_body().decode("utf-8") @@ -41,13 +26,11 @@ def test__activity_azure_endpoint__returns_all_activities( def test__activity_azure_endpoint__returns_an_activity__when_activity_matches_its_id( - create_fake_database, activity_factory, insert_activity + test_db, activity_factory, insert_activity ): - fake_database = create_fake_database existent_activity = activity_factory() - inserted_activity = insert_activity(existent_activity, fake_database) + inserted_activity = insert_activity(existent_activity, test_db).__dict__ - azure_activities._get_activities.DATABASE = fake_database req = func.HttpRequest( method='GET', body=None, @@ -63,13 +46,11 @@ def test__activity_azure_endpoint__returns_an_activity__when_activity_matches_it def test__activity_azure_endpoint__returns_an_activity_with_inactive_status__when_an_activity_matching_its_id_is_found( - create_fake_database, activity_factory, insert_activity + test_db, activity_factory, insert_activity ): - fake_database = create_fake_database existent_activity = activity_factory() - inserted_activity = insert_activity(existent_activity, fake_database) + inserted_activity = insert_activity(existent_activity, test_db).__dict__ - azure_activities._delete_activity.DATABASE = fake_database req = func.HttpRequest( method='DELETE', body=None, @@ -86,13 +67,11 @@ def test__activity_azure_endpoint__returns_an_activity_with_inactive_status__whe def test__update_activity_azure_endpoint__returns_an_activity__when_found_an_activity_to_update( - create_fake_database, activity_factory, insert_activity + test_db, activity_factory, insert_activity ): - fake_database = create_fake_database existent_activity = activity_factory() - inserted_activity = insert_activity(existent_activity, fake_database) + inserted_activity = insert_activity(existent_activity, test_db).__dict__ - azure_activities._update_activity.DATABASE = fake_database activity_body = {"description": Faker().sentence()} req = func.HttpRequest( method='PUT', @@ -109,10 +88,7 @@ def test__update_activity_azure_endpoint__returns_an_activity__when_found_an_act assert activitiy_json_data == json.dumps(inserted_activity) -def test__activity_azure_endpoint__creates_an_activity__when_activity_has_all_attributes( - create_fake_database, - ): - azure_activities._create_activity.DATABASE = create_fake_database +def test__activity_azure_endpoint__creates_an_activity__when_activity_has_all_attributes(): activity_body = { 'id': None, 'name': Faker().user_name(), diff --git a/V2/tests/api/azure/customer_azure_endpoints_test.py b/V2/tests/api/azure/customer_azure_endpoints_test.py new file mode 100644 index 00000000..f1f35d4f --- /dev/null +++ b/V2/tests/api/azure/customer_azure_endpoints_test.py @@ -0,0 +1,214 @@ +from http import HTTPStatus +import json +from faker import Faker + +import azure.functions as func + +import time_tracker.customers._application._customers as azure_customers + +CUSTOMER_URL = "/api/customers/" + + +def test__create_customer_azure_endpoint__creates_a_customer__when_customer_has_all_necesary_attributes( + customer_factory +): + customer_body = customer_factory().__dict__ + + body = json.dumps(customer_body).encode("utf-8") + req = func.HttpRequest( + method='POST', + body=body, + url=CUSTOMER_URL, + ) + + response = azure_customers._create_customer.create_customer(req) + customer_json_data = json.loads(response.get_body()) + customer_body['id'] = customer_json_data['id'] + + assert response.status_code == HTTPStatus.CREATED + assert customer_json_data == customer_body + + +def test__create_customer_azure_endpoint__returns_a_status_400__when_dont_recieve_all_necessary_attributes(): + customer_to_insert = { + "id": None, + "name": Faker().user_name(), + "deleted": False, + "status": 1 + } + + body = json.dumps(customer_to_insert).encode("utf-8") + req = func.HttpRequest( + method='POST', + body=body, + url=CUSTOMER_URL, + ) + + response = azure_customers._create_customer.create_customer(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b'Invalid format or structure of the attributes of the customer' + + +def test__delete_customer_azure_endpoint__returns_a_customer_with_true_deleted__when_its_id_is_found( + test_db, customer_factory, insert_customer +): + customer_preinsert = customer_factory() + inserted_customer = insert_customer(customer_preinsert, test_db).__dict__ + + req = func.HttpRequest( + method='DELETE', + body=None, + url=CUSTOMER_URL, + route_params={"id": inserted_customer["id"]}, + ) + + response = azure_customers._delete_customer.delete_customer(req) + customer_json_data = json.loads(response.get_body().decode("utf-8")) + + assert response.status_code == HTTPStatus.OK + assert customer_json_data['deleted'] is True + + +def test__delete_customer_azure_endpoint__returns_not_found__when_its_id_is_not_found(): + req = func.HttpRequest( + method='DELETE', + body=None, + url=CUSTOMER_URL, + route_params={"id": Faker().pyint()}, + ) + + response = azure_customers._delete_customer.delete_customer(req) + + assert response.status_code == HTTPStatus.NOT_FOUND + assert response.get_body() == b'Not found' + + +def test__update_customer_azure_endpoint__returns_an_updated_customer__when_customer_has_all_necesary_attributes( + test_db, customer_factory, insert_customer +): + existent_customer = customer_factory() + inserted_customer = insert_customer(existent_customer, test_db).__dict__ + + inserted_customer["description"] = Faker().sentence() + + body = json.dumps(inserted_customer).encode("utf-8") + req = func.HttpRequest( + method='PUT', + body=body, + url=CUSTOMER_URL, + route_params={"id": inserted_customer["id"]}, + ) + + response = azure_customers._update_customer.update_customer(req) + customer_json_data = json.loads(response.get_body()) + + assert response.status_code == HTTPStatus.OK + assert customer_json_data == inserted_customer + + +def test__update_customer_azure_endpoint__returns_update_a_customer__when_customer_has_all_necesary_attributes( + customer_factory +): + existent_customer = customer_factory().__dict__ + + body = json.dumps(existent_customer).encode("utf-8") + req = func.HttpRequest( + method='PUT', + body=body, + url=CUSTOMER_URL, + route_params={"id": Faker().pyint()}, + ) + + response = azure_customers._update_customer.update_customer(req) + + assert response.status_code == HTTPStatus.CONFLICT + assert response.get_body() == b'This customer does not exist or is duplicated' + + +def test__update_customer_azure_endpoint__returns_invalid_format__when_customer_doesnt_have_all_necesary_attributes( + customer_factory, insert_customer, test_db +): + existent_customer = customer_factory() + inserted_customer = insert_customer(existent_customer, test_db).__dict__ + + inserted_customer.pop("name") + + body = json.dumps(inserted_customer).encode("utf-8") + req = func.HttpRequest( + method='PUT', + body=body, + url=CUSTOMER_URL, + route_params={"id": inserted_customer["id"]}, + ) + + response = azure_customers._update_customer.update_customer(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b'Invalid format or structure of the attributes of the customer' + + +def test__delete_customers_azure_endpoint__returns_a_status_code_400__when_customer_recive_invalid_id( +): + req = func.HttpRequest( + method="DELETE", + body=None, + url=CUSTOMER_URL, + route_params={"id": "invalid id"}, + ) + + response = azure_customers._delete_customer.delete_customer(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b'Invalid Format ID' + + +def test__customers_azure_endpoint__returns_all_customers( + test_db, customer_factory, insert_customer +): + customer_to_insert = customer_factory() + + inserted_customer = insert_customer(customer_to_insert, test_db).__dict__ + + req = func.HttpRequest(method='GET', body=None, url=CUSTOMER_URL) + response = azure_customers._get_customers.get_customers(req) + customers_json_data = response.get_body().decode("utf-8") + customer_list = json.loads(customers_json_data) + + assert response.status_code == HTTPStatus.OK + assert customers_json_data <= json.dumps(inserted_customer) + assert customer_list.pop() == inserted_customer + + +def test__customer_azure_endpoint__returns_a_customer__when_customer_matches_its_id( + test_db, customer_factory, insert_customer +): + existent_customer = customer_factory() + inserted_customer = insert_customer(existent_customer, test_db).__dict__ + + req = func.HttpRequest( + method='GET', + body=None, + url=CUSTOMER_URL, + route_params={"id": inserted_customer["id"]}, + ) + + response = azure_customers._get_customers.get_customers(req) + customer_json_data = response.get_body().decode("utf-8") + + assert response.status_code == HTTPStatus.OK + assert customer_json_data == json.dumps(inserted_customer) + + +def test__customer_azure_endpoint__returns_invalid_id__when_customer_not_matches_its_id(): + req = func.HttpRequest( + method='GET', + body=None, + url=CUSTOMER_URL, + route_params={"id": "Invalid ID"}, + ) + + response = azure_customers._get_customers.get_customers(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b'The id has an invalid format' diff --git a/V2/tests/api/azure/project_azure_endpoints_test.py b/V2/tests/api/azure/project_azure_endpoints_test.py new file mode 100644 index 00000000..b48a13dc --- /dev/null +++ b/V2/tests/api/azure/project_azure_endpoints_test.py @@ -0,0 +1,271 @@ +import json +from http import HTTPStatus + +from faker import Faker +import azure.functions as func + +from time_tracker.projects._application import _projects as azure_projects + +PROJECT_URL = '/api/projects/' + + +def test__project_azure_endpoint__returns_all_projects( + insert_project +): + inserted_projects = [ + insert_project().__dict__, + insert_project().__dict__ + ] + + req = func.HttpRequest(method='GET', body=None, url=PROJECT_URL) + response = azure_projects._get_projects.get_projects(req) + projects_json_data = response.get_body().decode("utf-8") + + assert response.status_code == HTTPStatus.OK + assert projects_json_data == json.dumps(inserted_projects) + + +def test__project_azure_endpoint__returns_a_project__when_project_matches_its_id( + insert_project +): + inserted_project = insert_project().__dict__ + + req = func.HttpRequest( + method='GET', + body=None, + url=PROJECT_URL, + route_params={"id": inserted_project["id"]}, + ) + + response = azure_projects._get_projects.get_projects(req) + activitiy_json_data = response.get_body().decode("utf-8") + + assert response.status_code == HTTPStatus.OK + assert activitiy_json_data == json.dumps(inserted_project) + + +def test__projects_azure_endpoint__returns_a_status_code_400__when_project_receive_invalid_id( +): + req = func.HttpRequest( + method="GET", + body=None, + url=PROJECT_URL, + route_params={"id": "invalid id"}, + ) + + response = azure_projects._get_projects.get_projects(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b"Invalid Format ID" + + +def test__project_azure_endpoint__returns_a_project_with_inactive_status__when_a_project_matching_its_id_is_found( + insert_project +): + inserted_project = insert_project().__dict__ + + req = func.HttpRequest( + method='DELETE', + body=None, + url=PROJECT_URL, + route_params={"id": inserted_project["id"]}, + ) + + response = azure_projects._delete_project.delete_project(req) + project_json_data = json.loads(response.get_body().decode("utf-8")) + + assert response.status_code == HTTPStatus.OK + assert project_json_data['status'] == 0 + assert project_json_data['deleted'] is True + + +def test__delete_projects_azure_endpoint__returns_a_status_code_400__when_project_receive_invalid_id( +): + req = func.HttpRequest( + method="DELETE", + body=None, + url=PROJECT_URL, + route_params={"id": "invalid id"}, + ) + + response = azure_projects._delete_project.delete_project(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b"Invalid Format ID" + + +def test__delete_projects_azure_endpoint__returns_a_status_code_404__when_no_found_a_project_to_delete( +): + req = func.HttpRequest( + method="DELETE", + body=None, + url=PROJECT_URL, + route_params={"id": Faker().pyint()}, + ) + + response = azure_projects._delete_project.delete_project(req) + + assert response.status_code == HTTPStatus.NOT_FOUND + assert response.get_body() == b"Not found" + + +def test__update_project_azure_endpoint__returns_a_project__when_found_a_project_to_update( + insert_project +): + inserted_project = insert_project().__dict__ + + project_body = {"description": Faker().sentence()} + req = func.HttpRequest( + method='PUT', + body=json.dumps(project_body).encode("utf-8"), + url=PROJECT_URL, + route_params={"id": inserted_project["id"]}, + ) + + response = azure_projects._update_project.update_project(req) + activitiy_json_data = response.get_body().decode("utf-8") + inserted_project.update(project_body) + + assert response.status_code == HTTPStatus.OK + assert activitiy_json_data == json.dumps(inserted_project) + + +def test__update_projects_azure_endpoint__returns_a_status_code_404__when_no_found_a_project_to_update( +): + project_body = {"description": Faker().sentence()} + + req = func.HttpRequest( + method="PUT", + body=json.dumps(project_body).encode("utf-8"), + url=PROJECT_URL, + route_params={"id": Faker().pyint()}, + ) + + response = azure_projects._update_project.update_project(req) + assert response.status_code == HTTPStatus.NOT_FOUND + assert response.get_body() == b"Not found" + + +def test__update_projects_azure_endpoint__returns_a_status_code_400__when_receive_an_incorrect_body( +): + project_body = Faker().pydict(5, True, str) + req = func.HttpRequest( + method="PUT", + body=json.dumps(project_body).encode("utf-8"), + url=PROJECT_URL, + route_params={"id": Faker().pyint()}, + ) + + response = azure_projects._update_project.update_project(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b"Incorrect body" + + +def test__update_projects_azure_endpoint__returns_a_status_code_400__when_project_receive_invalid_id( +): + req = func.HttpRequest( + method="PUT", + body=None, + url=PROJECT_URL, + route_params={"id": "invalid id"}, + ) + + response = azure_projects._update_project.update_project(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b"Invalid Format ID" + + +def test__project_azure_endpoint__creates_a_project__when_project_has_all_attributes( + test_db, project_factory, insert_customer, customer_factory +): + inserted = insert_customer(customer_factory(), test_db) + project_body = project_factory(inserted.id).__dict__ + + req = func.HttpRequest( + method='POST', + body=json.dumps(project_body).encode("utf-8"), + url=PROJECT_URL, + ) + + response = azure_projects._create_project.create_project(req) + project_json_data = json.loads(response.get_body()) + project_body['id'] = project_json_data['id'] + + assert response.status_code == HTTPStatus.CREATED + assert project_json_data == project_body + + +def test__project_azure_endpoint__returns_a_status_code_400__when_project_does_not_all_attributes( + test_db, project_factory, insert_customer, customer_factory +): + inserted_customer = insert_customer(customer_factory(), test_db) + project_body = project_factory(customer_id=inserted_customer.id).__dict__ + project_body.pop('name') + + req = func.HttpRequest( + method='POST', + body=json.dumps(project_body).encode("utf-8"), + url=PROJECT_URL, + ) + + response = azure_projects._create_project.create_project(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == json.dumps(['The name key is missing in the input data']).encode() + + +def test__project_azure_endpoint__returns_a_status_code_500__when_project_receive_incorrect_type_data( + project_factory, insert_customer, customer_factory, test_db +): + insert_customer(customer_factory(), test_db) + project_body = project_factory(technologies=Faker().pylist(2, True, str)).__dict__ + + req = func.HttpRequest( + method='POST', + body=json.dumps(project_body).encode("utf-8"), + url=PROJECT_URL, + ) + + response = azure_projects._create_project.create_project(req) + + assert response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR + assert response.get_body() == b"could not be created" + + +def test__get_latest_projects_azure_endpoint__returns_a_list_of_latest_projects__when_an_owner_id_match( + insert_time_entry +): + inserted_time_entry = insert_time_entry().__dict__ + + req = func.HttpRequest( + method='GET', + body=None, + url=PROJECT_URL+"latest/", + params={"owner_id": inserted_time_entry["owner_id"]}, + ) + + response = azure_projects._get_latest_projects.get_latest_projects(req) + projects_json_data = json.loads(response.get_body().decode("utf-8")) + + assert response.status_code == HTTPStatus.OK + assert inserted_time_entry["project_id"] == projects_json_data[0]["id"] + + +def test__get_latest_projects_azure_endpoint__returns_an_empty_list__when_an_owner_id_not_match( + insert_time_entry +): + insert_time_entry().__dict__ + + req = func.HttpRequest( + method='GET', + body=None, + url=PROJECT_URL+"latest/", + ) + + response = azure_projects._get_latest_projects.get_latest_projects(req) + projects_json_data = json.loads(response.get_body().decode("utf-8")) + + assert response.status_code == HTTPStatus.OK + assert projects_json_data == [] diff --git a/V2/tests/api/azure/time_entry_azure_endpoints_test.py b/V2/tests/api/azure/time_entry_azure_endpoints_test.py new file mode 100644 index 00000000..13e3e875 --- /dev/null +++ b/V2/tests/api/azure/time_entry_azure_endpoints_test.py @@ -0,0 +1,274 @@ +import pytest +import json +from faker import Faker +from http import HTTPStatus + +import azure.functions as func + +import time_tracker.time_entries._application._time_entries as azure_time_entries +from time_tracker._infrastructure import DB +from time_tracker.time_entries import _domain as domain_time_entries +from time_tracker.time_entries import _infrastructure as infrastructure_time_entries +from time_tracker.utils.enums import ResponseEnums + + +TIME_ENTRY_URL = "/api/time-entries/" + + +@pytest.fixture(name='insert_time_entry') +def _insert_time_entry() -> domain_time_entries.TimeEntry: + def _new_time_entry(time_entry: domain_time_entries.TimeEntry, database: DB): + dao = infrastructure_time_entries.TimeEntriesSQLDao(database) + new_time_entry = dao.create(time_entry) + return new_time_entry + return _new_time_entry + + +def test__time_entry_azure_endpoint__creates_an_time_entry__when_time_entry_has_all_attributes( + test_db, time_entry_factory, activity_factory, insert_activity +): + inserted_activity = insert_activity(activity_factory(), test_db) + time_entry_body = time_entry_factory(activity_id=inserted_activity.id).__dict__ + + body = json.dumps(time_entry_body).encode("utf-8") + req = func.HttpRequest( + method='POST', + body=body, + url=TIME_ENTRY_URL, + ) + + response = azure_time_entries._create_time_entry.create_time_entry(req) + time_entry_json_data = json.loads(response.get_body()) + time_entry_body['id'] = time_entry_json_data['id'] + + assert response.status_code == HTTPStatus.CREATED + assert time_entry_json_data == time_entry_body + + +def test__delete_time_entries_azure_endpoint__returns_an_time_entry_with_true_deleted__when_its_id_is_found( + test_db, time_entry_factory, insert_time_entry, insert_activity, activity_factory, insert_project +): + inserted_project = insert_project() + inserted_activity = insert_activity(activity_factory(), test_db).__dict__ + time_entry_body = time_entry_factory(activity_id=inserted_activity["id"], project_id=inserted_project.id) + inserted_time_entry = insert_time_entry(time_entry_body, test_db) + + req = func.HttpRequest( + method='DELETE', + body=None, + url=TIME_ENTRY_URL, + route_params={"id": inserted_time_entry.id}, + ) + + response = azure_time_entries._delete_time_entry.delete_time_entry(req) + time_entry_json_data = json.loads(response.get_body().decode("utf-8")) + + assert response.status_code == HTTPStatus.OK + assert time_entry_json_data['deleted'] is True + + +def test__delete_time_entries_azure_endpoint__returns_a_status_code_400__when_time_entry_recive_invalid_id( +): + req = func.HttpRequest( + method="DELETE", + body=None, + url=TIME_ENTRY_URL, + route_params={"id": "invalid id"}, + ) + + response = azure_time_entries._delete_time_entry.delete_time_entry(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b'Invalid Format ID' + + +def test__time_entry_azure_endpoint__returns_all_time_entries( + test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity, insert_project +): + inserted_project = insert_project() + inserted_activity = insert_activity(activity_factory(), test_db) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) + inserted_time_entries = insert_time_entry(time_entries_to_insert, test_db).__dict__ + + req = func.HttpRequest(method="GET", body=None, url=TIME_ENTRY_URL) + + response = azure_time_entries.get_time_entries(req) + time_entries_json_data = response.get_body().decode("utf-8") + time_entry_list = json.loads(time_entries_json_data) + + assert response.status_code == HTTPStatus.OK + assert time_entry_list.pop() == inserted_time_entries + + +def test__time_entry_azure_endpoint__returns_an_time_entry__when_time_entry_matches_its_id( + test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity, insert_project +): + inserted_project = insert_project() + inserted_activity = insert_activity(activity_factory(), test_db) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) + inserted_time_entries = insert_time_entry(time_entries_to_insert, test_db).__dict__ + + req = func.HttpRequest( + method="GET", + body=None, + url=TIME_ENTRY_URL, + route_params={"id": inserted_time_entries["id"]}, + ) + + response = azure_time_entries.get_time_entries(req) + time_entry_json_data = response.get_body().decode("utf-8") + + assert response.status_code == HTTPStatus.OK + assert time_entry_json_data == json.dumps(inserted_time_entries) + + +def test__get_time_entries_azure_endpoint__returns_a_status_code_400__when_time_entry_recive_invalid_id( + test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity, insert_project +): + inserted_project = insert_project() + inserted_activity = insert_activity(activity_factory(), test_db) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) + insert_time_entry(time_entries_to_insert, test_db).__dict__ + + req = func.HttpRequest( + method="GET", + body=None, + url=TIME_ENTRY_URL, + route_params={"id": "invalid id"}, + ) + + response = azure_time_entries.get_time_entries(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b'Invalid Format ID' + + +def test__get_latest_entries_azure_endpoint__returns_a_list_of_latest_time_entries__when_an_owner_id_match( + test_db, time_entry_factory, insert_time_entry, insert_activity, activity_factory, insert_project +): + inserted_project = insert_project() + inserted_activity = insert_activity(activity_factory(), test_db).__dict__ + time_entry_body = time_entry_factory(activity_id=inserted_activity["id"], project_id=inserted_project.id) + inserted_time_entry = insert_time_entry(time_entry_body, test_db).__dict__ + + req = func.HttpRequest( + method='GET', + body=None, + url=TIME_ENTRY_URL+"latest/", + params={"owner_id": inserted_time_entry["owner_id"]}, + ) + + response = azure_time_entries._get_latest_entries.get_latest_entries(req) + time_entry_json_data = json.loads(response.get_body().decode("utf-8")) + + assert response.status_code == 200 + assert time_entry_json_data == [inserted_time_entry] + + +def test__get_latest_entries_azure_endpoint__returns_no_time_entries_found__when_recieve_an_invalid_owner_id( + test_db, insert_activity, activity_factory, +): + insert_activity(activity_factory(), test_db) + + req = func.HttpRequest( + method='GET', + body=None, + url=TIME_ENTRY_URL+"latest/", + params={"owner_id": Faker().pyint()}, + ) + + response = azure_time_entries._get_latest_entries.get_latest_entries(req) + + assert response.status_code == 404 + assert response.get_body() == b'Not found' + + +def test__update_time_entry_azure_endpoint__returns_an_time_entry__when_found_an_time_entry_to_update( + test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity, insert_project +): + inserted_project = insert_project() + inserted_activity = insert_activity(activity_factory(), test_db).__dict__ + time_entry_body = time_entry_factory(activity_id=inserted_activity["id"], project_id=inserted_project.id) + inserted_time_entry = insert_time_entry(time_entry_body, test_db).__dict__ + + time_entry_body = {"description": Faker().sentence()} + + req = func.HttpRequest( + method='PUT', + body=json.dumps(time_entry_body).encode("utf-8"), + url=TIME_ENTRY_URL, + route_params={"id": inserted_time_entry["id"]}, + ) + + response = azure_time_entries._update_time_entry.update_time_entry(req) + activitiy_json_data = response.get_body().decode("utf-8") + inserted_time_entry.update(time_entry_body) + + assert response.status_code == 200 + assert activitiy_json_data == json.dumps(inserted_time_entry) + + +def test__update_time_entries_azure_endpoint__returns_a_status_code_400__when_time_entry_recive_invalid_format_id(): + time_entry_body = {"description": Faker().sentence()} + + req = func.HttpRequest( + method="PUT", + body=json.dumps(time_entry_body).encode("utf-8"), + url=TIME_ENTRY_URL, + route_params={"id": Faker().sentence()}, + ) + + response = azure_time_entries._update_time_entry.update_time_entry(req) + + assert response.status_code == 400 + assert response.get_body() == b'Invalid Format ID' + + +def test__update_time_entries_azure_endpoint__returns_a_status_code_404__when_not_found_an_time_entry_to_update(): + time_entry_body = {"description": Faker().sentence()} + + req = func.HttpRequest( + method="PUT", + body=json.dumps(time_entry_body).encode("utf-8"), + url=TIME_ENTRY_URL, + route_params={"id": Faker().pyint()}, + ) + + response = azure_time_entries._update_time_entry.update_time_entry(req) + + assert response.status_code == 404 + assert response.get_body() == b'Not found' + + +def test__update_time_entries_azure_endpoint__returns_a_status_code_400__when_time_entry_recive_invalid_body(): + + time_entry_body = Faker().pydict(5, True, str) + req = func.HttpRequest( + method="PUT", + body=json.dumps(time_entry_body).encode("utf-8"), + url=TIME_ENTRY_URL, + route_params={"id": Faker().pyint()}, + ) + + response = azure_time_entries._update_time_entry.update_time_entry(req) + + assert response.status_code == 400 + assert response.get_body() == b'Incorrect time entry body' + + +def test__get_latest_entries_azure_endpoint__returns_not_found__when_recieve_an_invalid_owner_id( + test_db, insert_activity, activity_factory, +): + insert_activity(activity_factory(), test_db) + + req = func.HttpRequest( + method='GET', + body=None, + url=TIME_ENTRY_URL+"latest/", + params={"owner_id": Faker().pyint()}, + ) + + response = azure_time_entries._get_latest_entries.get_latest_entries(req) + + assert response.status_code == HTTPStatus.NOT_FOUND + assert response.get_body().decode("utf-8") == ResponseEnums.NOT_FOUND.value diff --git a/V2/tests/conftest.py b/V2/tests/conftest.py index d1c4928f..c11fc951 100644 --- a/V2/tests/conftest.py +++ b/V2/tests/conftest.py @@ -1,2 +1,5 @@ # flake8: noqa -from fixtures import _activity_factory, _create_fake_dao, _create_fake_database \ No newline at end of file +from fixtures import _activity_factory, _test_db, _insert_activity +from fixtures import _time_entry_factory, _insert_time_entry +from fixtures import _customer_factory, _insert_customer +from fixtures import _project_factory, _insert_project diff --git a/V2/tests/fixtures.py b/V2/tests/fixtures.py index d9539035..82391ebf 100644 --- a/V2/tests/fixtures.py +++ b/V2/tests/fixtures.py @@ -1,17 +1,26 @@ import pytest +from faker import Faker -import time_tracker.activities._domain as domain -import time_tracker.activities._infrastructure as infrastructure +import time_tracker.activities._domain as activities_domain +import time_tracker.time_entries._domain as time_entries_domain +import time_tracker.time_entries._infrastructure as time_entries_infrastructure +import time_tracker.customers._domain as customers_domain +import time_tracker.activities._infrastructure as activities_infrastructure +import time_tracker.customers._infrastructure as customers_infrastructure +import time_tracker.projects._domain as projects_domain +import time_tracker.projects._infrastructure as projects_infrastructure from time_tracker._infrastructure import DB -from faker import Faker @pytest.fixture(name='activity_factory') -def _activity_factory() -> domain.Activity: +def _activity_factory() -> activities_domain.Activity: def _make_activity( - name: str = Faker().name(), description: str = Faker().sentence(), deleted: bool = False, status: int = 1 + name: str = Faker().name(), + description: str = Faker().sentence(), + deleted: bool = False, + status: int = 1, ): - activity = domain.Activity( + activity = activities_domain.Activity( id=None, name=name, description=description, @@ -19,17 +28,141 @@ def _make_activity( status=status ) return activity + return _make_activity -@pytest.fixture(name='create_fake_dao') -def _create_fake_dao() -> domain.ActivitiesDao: - db_fake = DB('sqlite:///:memory:') - dao = infrastructure.ActivitiesSQLDao(db_fake) - return dao +@pytest.fixture(name='test_db') +def _test_db() -> DB: + db_fake = DB() + db_fake.get_session().execute("pragma foreign_keys=ON") + return db_fake -@pytest.fixture(name='create_fake_database') -def _create_fake_database() -> domain.ActivitiesDao: - db_fake = DB('sqlite:///:memory:') - return db_fake +@pytest.fixture(name='time_entry_factory') +def _time_entry_factory() -> time_entries_domain.TimeEntry: + def _make_time_entry( + id=Faker().random_int(), + start_date=str(Faker().date_time()), + owner_id=Faker().random_int(), + description=Faker().sentence(), + activity_id=Faker().random_int(), + uri=Faker().domain_name(), + technologies=str(Faker().pylist()), + end_date=str(Faker().date_time()), + deleted=False, + timezone_offset="300", + project_id=Faker().random_int(), + ): + time_entry = time_entries_domain.TimeEntry( + id=id, + start_date=start_date, + owner_id=owner_id, + description=description, + activity_id=activity_id, + uri=uri, + technologies=technologies, + end_date=end_date, + deleted=deleted, + timezone_offset=timezone_offset, + project_id=project_id, + ) + return time_entry + return _make_time_entry + + +@pytest.fixture(name='insert_activity') +def _insert_activity() -> dict: + def _new_activity(activity: activities_domain.Activity, database: DB): + dao = activities_infrastructure.ActivitiesSQLDao(database) + new_activity = dao.create(activity) + return new_activity + return _new_activity + + +@pytest.fixture(name='customer_factory') +def _customer_factory() -> customers_domain.Customer: + def _make_customer( + name: str = Faker().name(), + description: str = Faker().sentence(), + deleted: bool = False, + status: int = 1, + ): + customer = customers_domain.Customer( + id=None, + name=name, + description=description, + deleted=deleted, + status=status + ) + return customer + + return _make_customer + + +@pytest.fixture(name='project_factory') +def _project_factory() -> projects_domain.Project: + def _make_project( + id=Faker().pyint(), + name=Faker().name(), + description=Faker().sentence(), + project_type_id=Faker().pyint(), + customer_id=Faker().pyint(), + status=Faker().pyint(), + deleted=False, + technologies=str(Faker().pylist()), + customer=None + ): + project = projects_domain.Project( + id=id, + name=name, + description=description, + project_type_id=project_type_id, + customer_id=customer_id, + status=status, + deleted=deleted, + technologies=technologies, + customer=customer + ) + return project + return _make_project + + +@pytest.fixture(name='insert_customer') +def _insert_customer() -> customers_domain.Customer: + def _new_customer(customer: customers_domain.Customer, database: DB): + dao = customers_infrastructure.CustomersSQLDao(database) + new_customer = dao.create(customer) + return new_customer + return _new_customer + + +@pytest.fixture(name='insert_project') +def _insert_project(test_db, insert_customer, project_factory, customer_factory) -> projects_domain.Project: + inserted_customer = insert_customer(customer_factory(), test_db) + + def _new_project(): + project_to_insert = project_factory(id=None, customer_id=inserted_customer.id, deleted=False) + dao = projects_infrastructure.ProjectsSQLDao(test_db) + inserted_project = dao.create(project_to_insert) + return inserted_project + return _new_project + + +@pytest.fixture(name='insert_time_entry') +def _insert_time_entry( + test_db, insert_project, activity_factory, insert_activity, time_entry_factory +) -> time_entries_domain.TimeEntry: + + inserted_project = insert_project() + inserted_activity = insert_activity(activity_factory(), test_db) + + def _new_time_entry(owner_id: int = Faker().pyint()): + dao = time_entries_infrastructure.TimeEntriesSQLDao(test_db) + time_entries_to_insert = time_entry_factory( + activity_id=inserted_activity.id, project_id=inserted_project.id, owner_id=owner_id + ) + + inserted_time_entries = dao.create(time_entries_to_insert) + return inserted_time_entries + return _new_time_entry diff --git a/V2/tests/integration/daos/activities_sql_dao_test.py b/V2/tests/integration/daos/activities_dao_test.py similarity index 86% rename from V2/tests/integration/daos/activities_sql_dao_test.py rename to V2/tests/integration/daos/activities_dao_test.py index 25f62500..637a7799 100644 --- a/V2/tests/integration/daos/activities_sql_dao_test.py +++ b/V2/tests/integration/daos/activities_dao_test.py @@ -7,18 +7,17 @@ from time_tracker._infrastructure import DB -@pytest.fixture(name='insert_activity') -def _insert_activity() -> domain.Activity: - def _new_activity(activity: domain.Activity, dao: domain.ActivitiesDao): - new_activity = dao.create(activity) - return new_activity - return _new_activity +@pytest.fixture(name='create_fake_dao') +def _create_fake_dao() -> domain.ActivitiesDao: + db_fake = DB() + dao = infrastructure.ActivitiesSQLDao(db_fake) + return dao @pytest.fixture(name='clean_database', autouse=True) def _clean_database(): yield - db_fake = DB('sqlite:///:memory:') + db_fake = DB() dao = infrastructure.ActivitiesSQLDao(db_fake) query = dao.activity.delete() dao.db.get_session().execute(query) @@ -41,7 +40,7 @@ def test_update__returns_an_update_activity__when_an_activity_matching_its_id_is ): dao = create_fake_dao existent_activity = activity_factory() - inserted_activity = insert_activity(existent_activity, dao) + inserted_activity = insert_activity(existent_activity, dao.db) expected_description = Faker().sentence() updated_activity = dao.update(inserted_activity.id, None, expected_description, None, None) @@ -68,8 +67,8 @@ def test__get_all__returns_a_list_of_activity_dto_objects__when_one_or_more_acti dao = create_fake_dao existent_activities = [activity_factory(), activity_factory()] inserted_activities = [ - insert_activity(existent_activities[0], dao), - insert_activity(existent_activities[1], dao) + insert_activity(existent_activities[0], dao.db), + insert_activity(existent_activities[1], dao.db) ] activities = dao.get_all() @@ -83,7 +82,7 @@ def test_get_by_id__returns_an_activity_dto__when_found_one_activity_that_matche ): dao = create_fake_dao existent_activity = activity_factory() - inserted_activity = insert_activity(existent_activity, dao) + inserted_activity = insert_activity(existent_activity, dao.db) activity = dao.get_by_id(inserted_activity.id) @@ -117,7 +116,7 @@ def test_delete__returns_an_activity_with_inactive_status__when_an_activity_matc ): dao = create_fake_dao existent_activity = activity_factory() - inserted_activity = insert_activity(existent_activity, dao) + inserted_activity = insert_activity(existent_activity, dao.db) activity = dao.delete(inserted_activity.id) diff --git a/V2/tests/integration/daos/customers_dao_test.py b/V2/tests/integration/daos/customers_dao_test.py new file mode 100644 index 00000000..496aaf47 --- /dev/null +++ b/V2/tests/integration/daos/customers_dao_test.py @@ -0,0 +1,139 @@ +import typing + +import pytest +from faker import Faker + +import time_tracker.customers._domain as domain +import time_tracker.customers._infrastructure as infrastructure +from time_tracker._infrastructure import DB + + +@pytest.fixture(name='create_fake_dao') +def _fake_dao() -> domain.CustomersDao: + def _create_fake_dao(db_fake: DB) -> domain.CustomersDao: + dao = infrastructure.CustomersSQLDao(db_fake) + return dao + return _create_fake_dao + + +@pytest.fixture(name='clean_database', autouse=True) +def _clean_database(): + yield + db_fake = DB() + dao = infrastructure.CustomersSQLDao(db_fake) + query = dao.customer.delete() + dao.db.get_session().execute(query) + + +def test__create_customer_dao__returns_a_customer_dto__when_saves_correctly_with_sql_database( + test_db, customer_factory, create_fake_dao +): + dao = create_fake_dao(test_db) + + customer_to_insert = customer_factory() + + inserted_customer = dao.create(customer_to_insert) + + assert isinstance(inserted_customer, domain.Customer) + assert inserted_customer == customer_to_insert + + +def test__get_all__returns_a_list_of_customer_dto_objects__when_one_or_more_customers_are_found_with_sql_database( + test_db, create_fake_dao, customer_factory, insert_customer +): + dao = create_fake_dao(test_db) + customer_to_insert = customer_factory() + inserted_customer = [dao.create(customer_to_insert)] + + customers = dao.get_all() + + assert isinstance(customers, typing.List) + assert customers == inserted_customer + + +def test_get_by_id__returns_a_customer_dto__when_found_one_customer_that_matches_its_id_with_sql_database( + test_db, create_fake_dao, customer_factory, insert_customer +): + dao = create_fake_dao(test_db) + existent_customer = customer_factory() + inserted_customer = insert_customer(existent_customer, dao.db) + + customer = dao.get_by_id(inserted_customer.id) + + assert isinstance(customer, domain.Customer) + assert customer.id == inserted_customer.id + assert customer == inserted_customer + + +def test__get_by_id__returns_none__when_no_customer_matches_its_id_with_sql_database( + test_db, create_fake_dao, customer_factory +): + dao = create_fake_dao(test_db) + existent_customer = customer_factory() + + customer = dao.get_by_id(existent_customer.id) + + assert customer is None + + +def test_get_all__returns_an_empty_list__when_doesnt_found_any_customers_with_sql_database( + test_db, create_fake_dao +): + customers = create_fake_dao(test_db).get_all() + + assert isinstance(customers, typing.List) + assert customers == [] + + +def test_delete__returns_a_customer_with_inactive_status__when_a_customer_matching_its_id_is_found_with_sql_database( + test_db, create_fake_dao, customer_factory, insert_customer +): + dao = create_fake_dao(test_db) + existent_customer = customer_factory() + inserted_customer = insert_customer(existent_customer, dao.db) + + customer = dao.delete(inserted_customer.id) + + assert isinstance(customer, domain.Customer) + assert customer.id == inserted_customer.id + assert customer.status == 1 + assert customer.deleted is True + + +def test_delete__returns_none__when_no_customer_matching_its_id_is_found_with_sql_database( + test_db, create_fake_dao, customer_factory +): + dao = create_fake_dao(test_db) + existent_customer = customer_factory() + + results = dao.delete(existent_customer.id) + + assert results is None + + +def test__update_customer_dao__returns_an_updated_customer_dto__when_updates_correctly_with_sql_database( + test_db, customer_factory, create_fake_dao, insert_customer +): + dao = create_fake_dao(test_db) + + existent_customer = customer_factory() + inserted_customer = insert_customer(existent_customer, dao.db).__dict__ + + inserted_customer["description"] = Faker().sentence() + + updated_customer = dao.update(inserted_customer["id"], domain.Customer(**inserted_customer)) + + assert isinstance(updated_customer, domain.Customer) + assert updated_customer.description == inserted_customer["description"] + assert updated_customer.__dict__ == inserted_customer + + +def test__update_customer_dao__returns_none__when_an_incorrect_id_is_passed( + test_db, customer_factory, create_fake_dao, insert_customer +): + dao = create_fake_dao(test_db) + existent_customer = customer_factory() + + updated_customer = dao.update(Faker().pyint(), existent_customer) + + assert updated_customer is None diff --git a/V2/tests/integration/daos/projects_dao_test.py b/V2/tests/integration/daos/projects_dao_test.py new file mode 100644 index 00000000..01f5a1a3 --- /dev/null +++ b/V2/tests/integration/daos/projects_dao_test.py @@ -0,0 +1,153 @@ +import pytest +import typing +from faker import Faker + +from time_tracker.projects import _domain as domain +from time_tracker.projects import _infrastructure as infrastructure +from time_tracker._infrastructure import DB + + +@pytest.fixture(name='create_fake_dao') +def _create_fake_dao() -> domain.ProjectsDao: + db_fake = DB() + dao = infrastructure.ProjectsSQLDao(db_fake) + return dao + + +@pytest.fixture(name='clean_database', autouse=True) +def _clean_database(): + yield + db_fake = DB() + dao = infrastructure.ProjectsSQLDao(db_fake) + query = dao.project.delete() + dao.db.get_session().execute(query) + + +def test__create_project__returns_a_project_dto__when_saves_correctly_with_sql_database( + create_fake_dao, project_factory, insert_customer, customer_factory +): + dao = create_fake_dao + inserted_customer = insert_customer(customer_factory(), dao.db) + project_to_insert = project_factory(customer_id=inserted_customer.id) + + inserted_project = dao.create(project_to_insert) + + expected_project = project_to_insert.__dict__ + expected_project.update({"customer": inserted_customer.__dict__}) + + assert isinstance(inserted_project, domain.Project) + assert inserted_project == project_to_insert + + +def test_update__returns_an_update_project__when_an_project_matching_its_id_is_found_with_sql_database( + create_fake_dao, insert_project +): + dao = create_fake_dao + + inserted_project = insert_project() + + expected_description = Faker().sentence() + updated_project = dao.update(inserted_project.id, {"description": expected_description}) + + assert isinstance(updated_project, domain.Project) + assert updated_project.id == inserted_project.id + assert updated_project.description == expected_description + + +def test_update__returns_none__when_no_project_matching_its_id_is_found_with_sql_database( + create_fake_dao, project_factory +): + dao = create_fake_dao + project_to_insert = project_factory() + + results = dao.update(project_to_insert.id, {"description": Faker().sentence()}) + + assert results is None + + +def test__get_all__returns_a_list_of_project_dto_objects__when_one_or_more_projects_are_found_with_sql_database( + create_fake_dao, insert_project +): + dao = create_fake_dao + + inserted_projects = [ + insert_project(), + insert_project() + ] + + projects = dao.get_all() + + assert isinstance(projects, typing.List) + assert projects == inserted_projects + + +def test_get_by_id__returns_an_project_dto__when_found_one_project_that_matches_its_id_with_sql_database( + create_fake_dao, insert_project +): + dao = create_fake_dao + + inserted_project = insert_project() + + project = dao.get_by_id(inserted_project.id) + + assert isinstance(project, domain.Project) + assert project.id == inserted_project.id + assert project == inserted_project + + +def test__get_by_id__returns_none__when_no_project_matches_its_id_with_sql_database( + create_fake_dao, project_factory +): + dao = create_fake_dao + project_to_insert = project_factory() + + project = dao.get_by_id(project_to_insert.id) + + assert project is None + + +def test_get_all__returns_an_empty_list__when_doesnt_found_any_projects_with_sql_database( + create_fake_dao +): + projects = create_fake_dao.get_all() + + assert isinstance(projects, typing.List) + assert projects == [] + + +def test_delete__returns_an_project_with_inactive_status__when_an_project_matching_its_id_is_found_with_sql_database( + create_fake_dao, insert_project +): + dao = create_fake_dao + + inserted_project = insert_project() + + project = dao.delete(inserted_project.id) + + assert isinstance(project, domain.Project) + assert project.id == inserted_project.id + assert project.status == 0 + assert project.deleted is True + + +def test_delete__returns_none__when_no_project_matching_its_id_is_found_with_sql_database( + create_fake_dao, project_factory +): + dao = create_fake_dao + project_to_insert = project_factory() + + results = dao.delete(project_to_insert.id) + + assert results is None + + +def test_get_latest_projects__returns_a_list_of_project_dto_objects__when_find_projects_in_the_latest_time_entries( + create_fake_dao, insert_time_entry +): + dao = create_fake_dao + owner_id = Faker().pyint() + inserted_time_entries = insert_time_entry(owner_id) + latest_projects = dao.get_latest(owner_id) + + assert isinstance(latest_projects, typing.List) + assert latest_projects[0].id == inserted_time_entries.project_id diff --git a/V2/tests/integration/daos/time_entries_dao_test.py b/V2/tests/integration/daos/time_entries_dao_test.py new file mode 100644 index 00000000..3c17f7e9 --- /dev/null +++ b/V2/tests/integration/daos/time_entries_dao_test.py @@ -0,0 +1,186 @@ +import pytest +import typing + +from faker import Faker + +import time_tracker.time_entries._domain as domain +import time_tracker.time_entries._infrastructure as infrastructure +from time_tracker._infrastructure import DB + + +@pytest.fixture(name='create_fake_dao') +def _fake_dao() -> domain.TimeEntriesDao: + def _create_fake_dao(db_fake: DB) -> domain.TimeEntriesDao: + dao = infrastructure.TimeEntriesSQLDao(db_fake) + return dao + return _create_fake_dao + + +@pytest.fixture(name='clean_database', autouse=True) +def _clean_database(): + yield + db_fake = DB() + dao = infrastructure.TimeEntriesSQLDao(db_fake) + query = dao.time_entry.delete() + dao.db.get_session().execute(query) + + +def test__time_entry__returns_a_time_entry_dto__when_saves_correctly_with_sql_database( + test_db, time_entry_factory, create_fake_dao, insert_activity, activity_factory, insert_project +): + dao = create_fake_dao(test_db) + inserted_activity = insert_activity(activity_factory(), dao.db) + inserted_project = insert_project() + time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) + + inserted_time_entry = dao.create(time_entry_to_insert) + + assert isinstance(inserted_time_entry, domain.TimeEntry) + assert inserted_time_entry == time_entry_to_insert + + +def test__time_entry__returns_None__when_not_saves_correctly( + time_entry_factory, create_fake_dao, test_db +): + dao = create_fake_dao(test_db) + time_entry_to_insert = time_entry_factory(activity_id=1203) + + inserted_time_entry = dao.create(time_entry_to_insert) + + assert inserted_time_entry is None + + +def test_delete__returns_an_time_entry_with_true_deleted__when_an_time_entry_matching_its_id_is_found( + create_fake_dao, test_db, time_entry_factory, insert_activity, activity_factory, insert_project +): + dao = create_fake_dao(test_db) + inserted_project = insert_project() + inserted_activity = insert_activity(activity_factory(), dao.db) + time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) + inserted_time_entry = dao.create(time_entry_to_insert) + + result = dao.delete(inserted_time_entry.id) + + assert result.deleted is True + + +def test_delete__returns_none__when_no_time_entry_matching_its_id_is_found( + create_fake_dao, test_db +): + dao = create_fake_dao(test_db) + + result = dao.delete(Faker().pyint()) + + assert result is None + + +def test_get_latest_entries__returns_a_list_of_latest_time_entries__when_an_owner_id_match( + create_fake_dao, time_entry_factory, insert_activity, activity_factory, test_db, insert_project +): + dao = create_fake_dao(test_db) + inserted_project = insert_project() + inserted_activity = insert_activity(activity_factory(), dao.db) + time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) + inserted_time_entry = dao.create(time_entry_to_insert).__dict__ + + result = dao.get_latest_entries(int(inserted_time_entry["owner_id"])) + + assert result == [inserted_time_entry] + + +def test_update__returns_an_time_entry_dto__when_found_one_time_entry_to_update( + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory, insert_project +): + dao = create_fake_dao(test_db) + inserted_project = insert_project() + inserted_activity = insert_activity(activity_factory(), dao.db) + time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) + inserted_time_entry = dao.create(time_entry_to_insert).__dict__ + + time_entry_id = inserted_time_entry["id"] + inserted_time_entry.update({"description": "description updated"}) + + time_entry = dao.update(time_entry_id=time_entry_id, time_entry_data=inserted_time_entry) + + assert time_entry.id == time_entry_id + assert time_entry.description == inserted_time_entry.get("description") + + +def test_update__returns_none__when_doesnt_found_one_time_entry_to_update( + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory, insert_project +): + dao = create_fake_dao(test_db) + inserted_project = insert_project() + inserted_activity = insert_activity(activity_factory(), dao.db) + time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) + inserted_time_entry = dao.create(time_entry_to_insert).__dict__ + + time_entry = dao.update(0, inserted_time_entry) + + assert time_entry is None + + +def test__get_all__returns_a_list_of_time_entries_dto_objects__when_one_or_more_time_entries_are_found_in_sql_database( + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory, insert_project +): + + dao = create_fake_dao(test_db) + inserted_project = insert_project() + inserted_activity = insert_activity(activity_factory(), dao.db) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) + inserted_time_entries = [dao.create(time_entries_to_insert)] + + time_entry = dao.get_all() + + assert isinstance(time_entry, typing.List) + assert time_entry == inserted_time_entries + + +def test__get_all__returns_an_empty_list__when_doesnt_found_any_time_entries_in_sql_database( + test_db, create_fake_dao, insert_activity, activity_factory +): + dao = create_fake_dao(test_db) + insert_activity(activity_factory(), dao.db) + + time_entry = dao.get_all() + assert time_entry == [] + + +def test__get_by_id__returns_a_time_entry_dto__when_found_one_time_entry_that_match_id_with_sql_database( + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory, insert_project +): + dao = create_fake_dao(test_db) + inserted_project = insert_project() + inserted_activity = insert_activity(activity_factory(), dao.db) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) + inserted_time_entries = dao.create(time_entries_to_insert) + + time_entry = dao.get_by_id(time_entries_to_insert.id) + + assert isinstance(time_entry, domain.TimeEntry) + assert time_entry.id == inserted_time_entries.id + assert time_entry == inserted_time_entries + + +def test__get_by_id__returns_none__when_no_time_entry_matches_by_id( + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory +): + dao = create_fake_dao(test_db) + inserted_activity = insert_activity(activity_factory(), dao.db) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + dao.create(time_entries_to_insert) + + time_entry = dao.get_by_id(Faker().pyint()) + + assert time_entry is None + + +def test_get_latest_entries__returns_none__when_an_owner_id_is_not_found( + create_fake_dao, test_db, insert_activity, activity_factory +): + dao = create_fake_dao(test_db) + insert_activity(activity_factory(), dao.db) + + result = dao.get_latest_entries(Faker().pyint()) + + assert result is None diff --git a/V2/tests/unit/services/customer_service_test.py b/V2/tests/unit/services/customer_service_test.py new file mode 100644 index 00000000..776d18ee --- /dev/null +++ b/V2/tests/unit/services/customer_service_test.py @@ -0,0 +1,70 @@ +from faker import Faker + +from time_tracker.customers._domain import CustomerService + + +def test__create_customer__uses_the_customer_dao__to_create_a_customer(mocker, customer_factory): + expected_customer = mocker.Mock() + customer_dao = mocker.Mock( + create=mocker.Mock(return_value=expected_customer) + ) + customer_service = CustomerService(customer_dao) + + new_customer = customer_service.create(customer_factory()) + + assert customer_dao.create.called + assert expected_customer == new_customer + + +def test__delete_customer__uses_the_customer_dao__to_delete_customer_selected( + mocker, +): + expected_customer = mocker.Mock() + customer_dao = mocker.Mock( + delete=mocker.Mock(return_value=expected_customer) + ) + + customer_service = CustomerService(customer_dao) + deleted_customer = customer_service.delete(Faker().pyint()) + + assert customer_dao.delete.called + assert expected_customer == deleted_customer + + +def test__get_all__uses_the_customer_dao__to_retrieve_customers(mocker): + expected_customers = mocker.Mock() + customer_dao = mocker.Mock( + get_all=mocker.Mock(return_value=expected_customers) + ) + customer_service = CustomerService(customer_dao) + + actual_customers = customer_service.get_all() + + assert customer_dao.get_all.called + assert expected_customers == actual_customers + + +def test__get_by_id__uses_the_customer_dao__to_retrieve_one_customer(mocker): + expected_customer = mocker.Mock() + customer_dao = mocker.Mock( + get_by_id=mocker.Mock(return_value=expected_customer) + ) + customer_service = CustomerService(customer_dao) + + actual_customer = customer_service.get_by_id(Faker().pyint()) + + assert customer_dao.get_by_id.called + assert expected_customer == actual_customer + + +def test__update_customer__uses_the_customer_dao__to_update_a_customer(mocker, customer_factory): + expected_customer = mocker.Mock() + customer_dao = mocker.Mock( + update=mocker.Mock(return_value=expected_customer) + ) + customer_service = CustomerService(customer_dao) + + updated_customer = customer_service.update(Faker().pyint(), customer_factory()) + + assert customer_dao.update.called + assert expected_customer == updated_customer diff --git a/V2/tests/unit/services/project_service_test.py b/V2/tests/unit/services/project_service_test.py new file mode 100644 index 00000000..913bd40f --- /dev/null +++ b/V2/tests/unit/services/project_service_test.py @@ -0,0 +1,89 @@ +from faker import Faker + +from time_tracker.projects._domain import ProjectService + + +def test__get_all__uses_the_project_dao__to_retrieve_projects(mocker): + expected_projects = mocker.Mock() + project_dao = mocker.Mock( + get_all=mocker.Mock(return_value=expected_projects) + ) + project_service = ProjectService(project_dao) + + actual_projects = project_service.get_all() + + assert project_dao.get_all.called + assert expected_projects == actual_projects + + +def test__get_by_id__uses_the_project_dao__to_retrieve_one_project(mocker): + expected_project = mocker.Mock() + project_dao = mocker.Mock( + get_by_id=mocker.Mock(return_value=expected_project) + ) + project_service = ProjectService(project_dao) + + actual_project = project_service.get_by_id(Faker().pyint()) + + assert project_dao.get_by_id.called + assert expected_project == actual_project + + +def test__delete_project__uses_the_project_dao__to_change_project_status( + mocker, +): + expected_project = mocker.Mock() + project_dao = mocker.Mock( + delete=mocker.Mock(return_value=expected_project) + ) + + project_service = ProjectService(project_dao) + deleted_project = project_service.delete(Faker().pyint()) + + assert project_dao.delete.called + assert expected_project == deleted_project + + +def test__update_project__uses_the_project_dao__to_update_one_project( + mocker, +): + expected_project = mocker.Mock() + project_dao = mocker.Mock( + update=mocker.Mock(return_value=expected_project) + ) + project_service = ProjectService(project_dao) + + updated_project = project_service.update( + Faker().pyint(), Faker().pydict() + ) + + assert project_dao.update.called + assert expected_project == updated_project + + +def test__create_project__uses_the_project_dao__to_create_an_project(mocker, project_factory): + expected_project = mocker.Mock() + project_dao = mocker.Mock( + create=mocker.Mock(return_value=expected_project) + ) + project_service = ProjectService(project_dao) + + actual_project = project_service.create(project_factory()) + + assert project_dao.create.called + assert expected_project == actual_project + + +def test__get_latest_projects__uses_the_project_dao__to_get_last_projects( + mocker, +): + expected_latest_projects = mocker.Mock() + project_dao = mocker.Mock( + get_latest=mocker.Mock(return_value=expected_latest_projects) + ) + + project_service = ProjectService(project_dao) + latest_projects = project_service.get_latest(Faker().pyint()) + + assert expected_latest_projects == latest_projects + assert project_dao.get_latest.called diff --git a/V2/tests/unit/services/time_entry_service_test.py b/V2/tests/unit/services/time_entry_service_test.py new file mode 100644 index 00000000..d1596471 --- /dev/null +++ b/V2/tests/unit/services/time_entry_service_test.py @@ -0,0 +1,89 @@ +from faker import Faker + +from time_tracker.time_entries._domain import TimeEntryService + + +def test__create_time_entries__uses_the_time_entry_dao__to_create_an_time_entry(mocker, time_entry_factory): + expected_time_entry = mocker.Mock() + time_entry_dao = mocker.Mock( + create=mocker.Mock(return_value=expected_time_entry) + ) + time_entry_service = TimeEntryService(time_entry_dao) + + actual_time_entry = time_entry_service.create(time_entry_factory()) + + assert time_entry_dao.create.called + assert expected_time_entry == actual_time_entry + + +def test__delete_time_entry__uses_the_time_entry_dao__to_delete_time_entry_selected( + mocker, +): + expected_time_entry = mocker.Mock() + time_entry_dao = mocker.Mock( + delete=mocker.Mock(return_value=expected_time_entry) + ) + + time_entry_service = TimeEntryService(time_entry_dao) + deleted_time_entry = time_entry_service.delete(Faker().pyint()) + + assert time_entry_dao.delete.called + assert expected_time_entry == deleted_time_entry + + +def test__update_time_entry__uses_the_time_entry_dao__to_update_one_time_entry( + mocker, +): + expected_time_entry = mocker.Mock() + time_entry_dao = mocker.Mock( + update=mocker.Mock(return_value=expected_time_entry) + ) + time_entry_service = TimeEntryService(time_entry_dao) + + updated_time_entry = time_entry_service.update( + Faker().pyint(), Faker().pydict() + ) + + assert time_entry_dao.update.called + assert expected_time_entry == updated_time_entry + + +def test__get_all__uses_the_time_entry_dao__to_retrieve_time_entries(mocker): + expected_time_entries = mocker.Mock() + time_entry_dao = mocker.Mock( + get_all=mocker.Mock(return_value=expected_time_entries) + ) + time_activity_service = TimeEntryService(time_entry_dao) + + actual_activities = time_activity_service.get_all() + + assert time_entry_dao.get_all.called + assert expected_time_entries == actual_activities + + +def test__get_by_id__uses_the_time_entry_dao__to_retrieve_one_time_entry(mocker): + expected_time_entry = mocker.Mock() + time_entry_dao = mocker.Mock( + get_by_id=mocker.Mock(return_value=expected_time_entry) + ) + time_entry_service = TimeEntryService(time_entry_dao) + + actual_time_entry = time_entry_service.get_by_id(Faker().uuid4()) + + assert time_entry_dao.get_by_id.called + assert expected_time_entry == actual_time_entry + + +def test__get_latest_entries__uses_the_time_entry_dao__to_get_last_entries( + mocker, +): + expected_latest_time_entries = mocker.Mock() + time_entry_dao = mocker.Mock( + get_latest_entries=mocker.Mock(return_value=expected_latest_time_entries) + ) + + time_entry_service = TimeEntryService(time_entry_dao) + latest_time_entries = time_entry_service.get_latest_entries(Faker().pyint(), Faker().pyint()) + + assert expected_latest_time_entries == latest_time_entries + assert time_entry_dao.get_latest_entries.called diff --git a/V2/tests/unit/use_cases/customers_use_case_test.py b/V2/tests/unit/use_cases/customers_use_case_test.py new file mode 100644 index 00000000..63e03081 --- /dev/null +++ b/V2/tests/unit/use_cases/customers_use_case_test.py @@ -0,0 +1,77 @@ +from pytest_mock import MockFixture +from faker import Faker + +from time_tracker.customers._domain import _use_cases + + +def test__create_customer_function__uses_the_customer_service__to_create_a_customer( + mocker: MockFixture, customer_factory +): + expected_customer = mocker.Mock() + customer_service = mocker.Mock( + create=mocker.Mock(return_value=expected_customer) + ) + + customer_use_case = _use_cases.CreateCustomerUseCase(customer_service) + new_customer = customer_use_case.create_customer(customer_factory()) + + assert customer_service.create.called + assert expected_customer == new_customer + + +def test__delete_customer_function__uses_the_customer_service__to_delete_customer_selected( + mocker: MockFixture, +): + expected_customer = mocker.Mock() + customer_service = mocker.Mock(delete=mocker.Mock(return_value=expected_customer)) + + customer_use_case = _use_cases.DeleteCustomerUseCase(customer_service) + deleted_customer = customer_use_case.delete_customer(Faker().pyint()) + + assert customer_service.delete.called + assert expected_customer == deleted_customer + + +def test__get_list_customers_function__uses_the_customer_service__to_retrieve_customers( + mocker: MockFixture, +): + expected_customers = mocker.Mock() + customer_service = mocker.Mock( + get_all=mocker.Mock(return_value=expected_customers) + ) + + customers_use_case = _use_cases.GetAllCustomerUseCase(customer_service) + actual_customers = customers_use_case.get_all_customer() + + assert customer_service.get_all.called + assert expected_customers == actual_customers + + +def test__get_customer_by_id_function__uses_the_customer_service__to_retrieve_customer( + mocker: MockFixture, +): + expected_customer = mocker.Mock() + customer_service = mocker.Mock( + get_by_id=mocker.Mock(return_value=expected_customer) + ) + + customer_use_case = _use_cases.GetByIdCustomerUseCase(customer_service) + actual_customer = customer_use_case.get_customer_by_id(Faker().pyint()) + + assert customer_service.get_by_id.called + assert expected_customer == actual_customer + + +def test__update_customer_function__uses_the_customer_service__to_update_a_customer( + mocker: MockFixture, customer_factory +): + expected_customer = mocker.Mock() + customer_service = mocker.Mock( + update=mocker.Mock(return_value=expected_customer) + ) + + customer_use_case = _use_cases.UpdateCustomerUseCase(customer_service) + updated_customer = customer_use_case.update_customer(Faker().pyint(), customer_factory()) + + assert customer_service.update.called + assert expected_customer == updated_customer diff --git a/V2/tests/unit/use_cases/projects_use_case_test.py b/V2/tests/unit/use_cases/projects_use_case_test.py new file mode 100644 index 00000000..9f5d5f5c --- /dev/null +++ b/V2/tests/unit/use_cases/projects_use_case_test.py @@ -0,0 +1,93 @@ +from pytest_mock import MockFixture +from faker import Faker + +from time_tracker.projects._domain import _use_cases + + +def test__create_project_function__uses_the_projects_service__to_create_project( + mocker: MockFixture, project_factory +): + expected_project = mocker.Mock() + project_service = mocker.Mock( + create=mocker.Mock(return_value=expected_project) + ) + + project_use_case = _use_cases.CreateProjectUseCase(project_service) + actual_project = project_use_case.create_project(project_factory()) + + assert project_service.create.called + assert expected_project == actual_project + + +def test__delete_project_function__uses_the_project_service__to_delete_project_selected( + mocker: MockFixture, +): + expected_project = mocker.Mock() + project_service = mocker.Mock(delete=mocker.Mock(return_value=expected_project)) + + project_use_case = _use_cases.DeleteProjectUseCase(project_service) + deleted_project = project_use_case.delete_project(Faker().pyint()) + + assert project_service.delete.called + assert expected_project == deleted_project + + +def test__get_list_projects_function__uses_the_project_service__to_retrieve_projects( + mocker: MockFixture, +): + expected_projects = mocker.Mock() + project_service = mocker.Mock( + get_all=mocker.Mock(return_value=expected_projects) + ) + + projects_use_case = _use_cases.GetProjectsUseCase(project_service) + actual_projects = projects_use_case.get_projects() + + assert project_service.get_all.called + assert expected_projects == actual_projects + + +def test__get_project_by_id_function__uses_the_project_service__to_retrieve_project( + mocker: MockFixture, +): + expected_project = mocker.Mock() + project_service = mocker.Mock( + get_by_id=mocker.Mock(return_value=expected_project) + ) + + project_use_case = _use_cases.GetProjectUseCase(project_service) + actual_project = project_use_case.get_project_by_id(Faker().pyint()) + + assert project_service.get_by_id.called + assert expected_project == actual_project + + +def test__update_project_function__uses_the_projects_service__to_update_an_project( + mocker: MockFixture, project_factory +): + expected_project = mocker.Mock() + project_service = mocker.Mock( + update=mocker.Mock(return_value=expected_project) + ) + project_to_update = project_factory() + + project_use_case = _use_cases.UpdateProjectUseCase(project_service) + updated_project = project_use_case.update_project( + Faker().pyint(), project_to_update.__dict__ + ) + + assert project_service.update.called + assert expected_project == updated_project + + +def test__get_latest_projects_function__uses_the_project_service__to_get_latest_project( + mocker: MockFixture, +): + expected_latest_projects = mocker.Mock() + project_service = mocker.Mock(get_latest=mocker.Mock(return_value=expected_latest_projects)) + + project_use_case = _use_cases.GetLatestProjectsUseCase(project_service) + latest_projects = project_use_case.get_latest(Faker().pyint()) + + assert project_service.get_latest.called + assert expected_latest_projects == latest_projects diff --git a/V2/tests/unit/use_cases/time_entries_use_case_test.py b/V2/tests/unit/use_cases/time_entries_use_case_test.py new file mode 100644 index 00000000..f96666cb --- /dev/null +++ b/V2/tests/unit/use_cases/time_entries_use_case_test.py @@ -0,0 +1,90 @@ +from pytest_mock import MockFixture +from faker import Faker + +from time_tracker.time_entries._domain import _use_cases + +fake = Faker() + + +def test__create_time_entry_function__uses_the_time_entries_service__to_create_time_entry( + mocker: MockFixture, time_entry_factory +): + expected_time_entry = mocker.Mock() + time_entry_service = mocker.Mock( + create=mocker.Mock(return_value=expected_time_entry) + ) + + time_entry_use_case = _use_cases.CreateTimeEntryUseCase(time_entry_service) + actual_time_entry = time_entry_use_case.create_time_entry(time_entry_factory()) + + assert time_entry_service.create.called + assert expected_time_entry == actual_time_entry + + +def test__delete_time_entry_function__uses_the_time_entry_service__to_delete_time_entry_selected( + mocker: MockFixture, +): + expected_time_entry = mocker.Mock() + time_entry_service = mocker.Mock(delete=mocker.Mock(return_value=expected_time_entry)) + + time_entry_use_case = _use_cases.DeleteTimeEntryUseCase(time_entry_service) + deleted_time_entry = time_entry_use_case.delete_time_entry(Faker().pyint()) + + assert time_entry_service.delete.called + assert expected_time_entry == deleted_time_entry + + +def test__update_time_entries_function__uses_the_time_entry_service__to_update_an_time_entry( + mocker: MockFixture, +): + expected_time_entry = mocker.Mock() + time_entry_service = mocker.Mock(update=mocker.Mock(return_value=expected_time_entry)) + + time_entry_use_case = _use_cases.UpdateTimeEntryUseCase(time_entry_service) + updated_time_entry = time_entry_use_case.update_time_entry(Faker().uuid4(), Faker().pydict()) + + assert time_entry_service.update.called + assert expected_time_entry == updated_time_entry + + +def test__get_all_time_entries_function__using_the_use_case_get_time_entries__to_get_all_time_entries( + mocker: MockFixture, +): + expected_time_entries = mocker.Mock() + time_entry_service = mocker.Mock( + get_all=mocker.Mock(return_value=expected_time_entries) + ) + + time_entries_use_case = _use_cases.GetTimeEntriesUseCase(time_entry_service) + actual_time_entries = time_entries_use_case.get_time_entries() + + assert time_entry_service.get_all.called + assert expected_time_entries == actual_time_entries + + +def test__get_time_entry_by_id_function__uses_the_time_entry_service__to_retrieve_time_entry( + mocker: MockFixture, +): + expected_time_entries = mocker.Mock() + time_entry_service = mocker.Mock( + get_by_id=mocker.Mock(return_value=expected_time_entries) + ) + + time_entry_use_case = _use_cases.GetTimeEntryUseCase(time_entry_service) + actual_time_entry = time_entry_use_case.get_time_entry_by_id(fake.uuid4()) + + assert time_entry_service.get_by_id.called + assert expected_time_entries == actual_time_entry + + +def test__get_latest_entries_function__uses_the_time_entry_service__to_get_last_entries( + mocker: MockFixture, +): + expected_latest_time_entries = mocker.Mock() + time_entry_service = mocker.Mock(get_latest_entries=mocker.Mock(return_value=expected_latest_time_entries)) + + time_entry_use_case = _use_cases.GetLastestTimeEntryUseCase(time_entry_service) + latest_time_entries = time_entry_use_case.get_latest_entries(Faker().pyint(), Faker().pyint()) + + assert time_entry_service.get_latest_entries.called + assert expected_latest_time_entries == latest_time_entries diff --git a/V2/time_tracker/_infrastructure/_config.py b/V2/time_tracker/_infrastructure/_config.py index 7f8c8fa7..cf4f19bf 100644 --- a/V2/time_tracker/_infrastructure/_config.py +++ b/V2/time_tracker/_infrastructure/_config.py @@ -1,20 +1,17 @@ import typing import os -CONNECTION_STRING = 'postgresql://root:root@localhost:5433/timetracker' - class Config(typing.NamedTuple): DB_CONNECTION_STRING: str - DB_USER: str - DB_PASS: str - DB_NAME: str def load_config(): + if os.environ.get("ENVIRONMENT") == "development": + connection: str = os.environ.get("DB_CONNECTION") + else: + connection: str = os.environ.get("TEST_DB_CONNECTION") + return Config( - CONNECTION_STRING if os.environ.get("DB_CONNECTION_STRING") is None else os.environ.get("DB_CONNECTION_STRING"), - os.environ.get("DB_USER"), - os.environ.get("DB_PASS"), - os.environ.get("DB_NAME") + connection ) diff --git a/V2/time_tracker/_infrastructure/_db.py b/V2/time_tracker/_infrastructure/_db.py index 8fe5cef1..6f3a9f9a 100644 --- a/V2/time_tracker/_infrastructure/_db.py +++ b/V2/time_tracker/_infrastructure/_db.py @@ -14,7 +14,7 @@ def __init__(self, conn_string: str = conn_string): self.engine = sqlalchemy.create_engine(conn_string) def get_session(self): + self.metadata.create_all(self.engine) if self.connection is None: - self.metadata.create_all(self.engine) self.connection = self.engine.connect() return self.connection diff --git a/V2/time_tracker/activities/_application/_activities/_create_activity.py b/V2/time_tracker/activities/_application/_activities/_create_activity.py index 94f3701d..8d5b912d 100644 --- a/V2/time_tracker/activities/_application/_activities/_create_activity.py +++ b/V2/time_tracker/activities/_application/_activities/_create_activity.py @@ -8,11 +8,10 @@ from ... import _infrastructure from time_tracker._infrastructure import DB -DATABASE = DB() - def create_activity(req: func.HttpRequest) -> func.HttpResponse: - activity_dao = _infrastructure.ActivitiesSQLDao(DATABASE) + database = DB() + activity_dao = _infrastructure.ActivitiesSQLDao(database) activity_service = _domain.ActivityService(activity_dao) use_case = _domain._use_cases.CreateActivityUseCase(activity_service) diff --git a/V2/time_tracker/activities/_application/_activities/_delete_activity.py b/V2/time_tracker/activities/_application/_activities/_delete_activity.py index 14ada8ab..746b1073 100644 --- a/V2/time_tracker/activities/_application/_activities/_delete_activity.py +++ b/V2/time_tracker/activities/_application/_activities/_delete_activity.py @@ -7,8 +7,6 @@ from ... import _infrastructure from time_tracker._infrastructure import DB -DATABASE = DB() - def delete_activity(req: func.HttpRequest) -> func.HttpResponse: logging.info( @@ -29,8 +27,9 @@ def delete_activity(req: func.HttpRequest) -> func.HttpResponse: def _delete(activity_id: int) -> str: + database = DB() activity_use_case = _domain._use_cases.DeleteActivityUseCase( - _create_activity_service(DATABASE) + _create_activity_service(database) ) activity = activity_use_case.delete_activity(activity_id) return json.dumps(activity.__dict__) if activity else b'Not found' diff --git a/V2/time_tracker/activities/_application/_activities/_get_activities.py b/V2/time_tracker/activities/_application/_activities/_get_activities.py index d92503dd..dd6053b0 100644 --- a/V2/time_tracker/activities/_application/_activities/_get_activities.py +++ b/V2/time_tracker/activities/_application/_activities/_get_activities.py @@ -7,10 +7,9 @@ from ... import _infrastructure from time_tracker._infrastructure import DB -DATABASE = DB() - def get_activities(req: func.HttpRequest) -> func.HttpResponse: + database = DB() logging.info( 'Python HTTP trigger function processed a request to get an activity.' ) @@ -19,11 +18,11 @@ def get_activities(req: func.HttpRequest) -> func.HttpResponse: try: if activity_id: - response = _get_by_id(int(activity_id)) + response = _get_by_id(int(activity_id), database) if response == b'Not Found': status_code = 404 else: - response = _get_all() + response = _get_all(database) return func.HttpResponse( body=response, status_code=status_code, mimetype="application/json" @@ -34,18 +33,18 @@ def get_activities(req: func.HttpRequest) -> func.HttpResponse: ) -def _get_by_id(activity_id: int) -> str: +def _get_by_id(activity_id: int, database: DB) -> str: activity_use_case = _domain._use_cases.GetActivityUseCase( - _create_activity_service(DATABASE) + _create_activity_service(database) ) activity = activity_use_case.get_activity_by_id(activity_id) return json.dumps(activity.__dict__) if activity else b'Not Found' -def _get_all() -> str: +def _get_all(database: DB) -> str: activities_use_case = _domain._use_cases.GetActivitiesUseCase( - _create_activity_service(DATABASE) + _create_activity_service(database) ) return json.dumps( [ diff --git a/V2/time_tracker/activities/_application/_activities/_update_activity.py b/V2/time_tracker/activities/_application/_activities/_update_activity.py index 0933fd72..4717042c 100644 --- a/V2/time_tracker/activities/_application/_activities/_update_activity.py +++ b/V2/time_tracker/activities/_application/_activities/_update_activity.py @@ -8,8 +8,6 @@ from ... import _infrastructure from time_tracker._infrastructure import DB -DATABASE = DB() - def update_activity(req: func.HttpRequest) -> func.HttpResponse: logging.info( @@ -37,8 +35,9 @@ def update_activity(req: func.HttpRequest) -> func.HttpResponse: def _update(activity_id: int, activity_data: dict) -> str: + database = DB() activity_use_case = _domain._use_cases.UpdateActivityUseCase( - _create_activity_service(DATABASE) + _create_activity_service(database) ) activity = activity_use_case.update_activity( activity_id, activity_data.get("name"), diff --git a/V2/time_tracker/activities/_infrastructure/_data_persistence/__init__.py b/V2/time_tracker/activities/_infrastructure/_data_persistence/__init__.py index 1e7220c5..35c209db 100644 --- a/V2/time_tracker/activities/_infrastructure/_data_persistence/__init__.py +++ b/V2/time_tracker/activities/_infrastructure/_data_persistence/__init__.py @@ -1,2 +1,2 @@ # flake8: noqa -from ._activities_sql_dao import ActivitiesSQLDao +from ._activities_dao import ActivitiesSQLDao diff --git a/V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_sql_dao.py b/V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_dao.py similarity index 100% rename from V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_sql_dao.py rename to V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_dao.py diff --git a/V2/time_tracker/customers/_application/__init__.py b/V2/time_tracker/customers/_application/__init__.py new file mode 100644 index 00000000..d9ba1676 --- /dev/null +++ b/V2/time_tracker/customers/_application/__init__.py @@ -0,0 +1,5 @@ +# flake8: noqa +from ._customers import create_customer +from ._customers import get_customers +from ._customers import delete_customer +from ._customers import update_customer diff --git a/V2/time_tracker/customers/_application/_customers/__init__.py b/V2/time_tracker/customers/_application/_customers/__init__.py new file mode 100644 index 00000000..b07840ce --- /dev/null +++ b/V2/time_tracker/customers/_application/_customers/__init__.py @@ -0,0 +1,5 @@ +# flake8: noqa +from ._create_customer import create_customer +from ._get_customers import get_customers +from ._delete_customer import delete_customer +from ._update_customer import update_customer diff --git a/V2/time_tracker/customers/_application/_customers/_create_customer.py b/V2/time_tracker/customers/_application/_customers/_create_customer.py new file mode 100644 index 00000000..48e39dc3 --- /dev/null +++ b/V2/time_tracker/customers/_application/_customers/_create_customer.py @@ -0,0 +1,56 @@ +import dataclasses +import json +import typing +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + + +def create_customer(req: func.HttpRequest) -> func.HttpResponse: + try: + database = DB() + customer_dao = _infrastructure.CustomersSQLDao(database) + customer_service = _domain.CustomerService(customer_dao) + use_case = _domain._use_cases.CreateCustomerUseCase(customer_service) + customer_data = req.get_json() + + customer_is_invalid = _validate_customer(customer_data) + if customer_is_invalid: + raise ValueError + + customer_to_create = _domain.Customer( + id=None, + deleted=None, + status=None, + name=str(customer_data["name"]).strip(), + description=str(customer_data["description"]), + ) + created_customer = use_case.create_customer(customer_to_create) + + if created_customer: + body = json.dumps(created_customer.__dict__) + status_code = HTTPStatus.CREATED + else: + body = b'This customer already exists' + status_code = HTTPStatus.CONFLICT + + return func.HttpResponse( + body=body, + status_code=status_code, + mimetype="application/json" + ) + except ValueError: + return func.HttpResponse( + body=b'Invalid format or structure of the attributes of the customer', + status_code=HTTPStatus.BAD_REQUEST, + mimetype="application/json" + ) + + +def _validate_customer(customer_data: dict) -> typing.List[str]: + return [field.name for field in dataclasses.fields(_domain.Customer) + if (field.name not in customer_data) and (field.type != typing.Optional[field.type])] diff --git a/V2/time_tracker/customers/_application/_customers/_delete_customer.py b/V2/time_tracker/customers/_application/_customers/_delete_customer.py new file mode 100644 index 00000000..41fc3464 --- /dev/null +++ b/V2/time_tracker/customers/_application/_customers/_delete_customer.py @@ -0,0 +1,39 @@ +import json +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + +DATATYPE = "application/json" + + +def delete_customer(req: func.HttpRequest) -> func.HttpResponse: + customer_dao = _infrastructure.CustomersSQLDao(DB()) + customer_service = _domain.CustomerService(customer_dao) + use_case = _domain._use_cases.DeleteCustomerUseCase(customer_service) + + try: + customer_id = int(req.route_params.get("id")) + deleted_customer = use_case.delete_customer(customer_id) + if not deleted_customer: + return func.HttpResponse( + body="Not found", + status_code=HTTPStatus.NOT_FOUND, + mimetype=DATATYPE + ) + + return func.HttpResponse( + body=json.dumps(deleted_customer.__dict__, default=str), + status_code=HTTPStatus.OK, + mimetype=DATATYPE, + ) + + except ValueError: + return func.HttpResponse( + body=b"Invalid Format ID", + status_code=HTTPStatus.BAD_REQUEST, + mimetype=DATATYPE + ) diff --git a/V2/time_tracker/customers/_application/_customers/_get_customers.py b/V2/time_tracker/customers/_application/_customers/_get_customers.py new file mode 100644 index 00000000..8cb9635f --- /dev/null +++ b/V2/time_tracker/customers/_application/_customers/_get_customers.py @@ -0,0 +1,55 @@ +from http import HTTPStatus +import json + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + + +def get_customers(req: func.HttpRequest) -> func.HttpResponse: + customer_id = req.route_params.get('id') + status_code = HTTPStatus.OK + + try: + if customer_id: + response = _get_by_id(int(customer_id)) + if response == b'This customer does not exist': + status_code = HTTPStatus.NOT_FOUND + else: + response = _get_all() + + return func.HttpResponse( + body=response, status_code=status_code, mimetype="application/json" + ) + except ValueError: + return func.HttpResponse( + body=b"The id has an invalid format", status_code=HTTPStatus.BAD_REQUEST, mimetype="application/json" + ) + + +def _get_by_id(customer_id: int) -> str: + customer_use_case = _domain._use_cases.GetByIdCustomerUseCase( + _create_customer_service(DB()) + ) + customer = customer_use_case.get_customer_by_id(customer_id) + + return json.dumps(customer.__dict__) if customer else b'This customer does not exist' + + +def _get_all() -> str: + customer_sql = _domain._use_cases.GetAllCustomerUseCase( + _create_customer_service(DB()) + ) + return json.dumps( + [ + customer.__dict__ + for customer in customer_sql.get_all_customer() + ] + ) + + +def _create_customer_service(db: DB) -> _domain.CustomerService: + customer_sql = _infrastructure.CustomersSQLDao(db) + return _domain.CustomerService(customer_sql) diff --git a/V2/time_tracker/customers/_application/_customers/_update_customer.py b/V2/time_tracker/customers/_application/_customers/_update_customer.py new file mode 100644 index 00000000..93524c65 --- /dev/null +++ b/V2/time_tracker/customers/_application/_customers/_update_customer.py @@ -0,0 +1,53 @@ +import dataclasses +import json +import typing +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + + +def update_customer(req: func.HttpRequest) -> func.HttpResponse: + try: + database = DB() + customer_id = int(req.route_params.get('id')) + customer_dao = _infrastructure.CustomersSQLDao(database) + customer_service = _domain.CustomerService(customer_dao) + use_case = _domain._use_cases.UpdateCustomerUseCase(customer_service) + + customer_data = req.get_json() + customer_is_invalid = _validate_customer(customer_data) + if customer_is_invalid: + raise ValueError + + customer_to_update = _domain.Customer( + **{field.name: customer_data.get(field.name) for field in dataclasses.fields(_domain.Customer)} + ) + updated_customer = use_case.update_customer(customer_id, customer_to_update) + + if updated_customer: + body = json.dumps(updated_customer.__dict__) + status_code = HTTPStatus.OK + else: + body = b'This customer does not exist or is duplicated' + status_code = HTTPStatus.CONFLICT + + return func.HttpResponse( + body=body, + status_code=status_code, + mimetype="application/json" + ) + except ValueError: + return func.HttpResponse( + body=b'Invalid format or structure of the attributes of the customer', + status_code=HTTPStatus.BAD_REQUEST, + mimetype="application/json" + ) + + +def _validate_customer(customer_data: dict) -> typing.List[str]: + return [field.name for field in dataclasses.fields(_domain.Customer) + if field.name not in customer_data] diff --git a/V2/time_tracker/customers/_domain/__init__.py b/V2/time_tracker/customers/_domain/__init__.py new file mode 100644 index 00000000..a2e8014b --- /dev/null +++ b/V2/time_tracker/customers/_domain/__init__.py @@ -0,0 +1,11 @@ +# flake8: noqa +from ._entities import Customer +from ._persistence_contracts import CustomersDao +from ._services import CustomerService +from ._use_cases import ( + CreateCustomerUseCase, + UpdateCustomerUseCase, + GetAllCustomerUseCase, + GetByIdCustomerUseCase, + DeleteCustomerUseCase +) \ No newline at end of file diff --git a/V2/time_tracker/customers/_domain/_entities/__init__.py b/V2/time_tracker/customers/_domain/_entities/__init__.py new file mode 100644 index 00000000..2a23e12c --- /dev/null +++ b/V2/time_tracker/customers/_domain/_entities/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._customer import Customer \ No newline at end of file diff --git a/V2/time_tracker/customers/_domain/_entities/_customer.py b/V2/time_tracker/customers/_domain/_entities/_customer.py new file mode 100644 index 00000000..fedc0835 --- /dev/null +++ b/V2/time_tracker/customers/_domain/_entities/_customer.py @@ -0,0 +1,11 @@ +from dataclasses import dataclass +import typing + + +@dataclass(frozen=True) +class Customer: + id: typing.Optional[int] + name: str + description: str + deleted: typing.Optional[bool] + status: typing.Optional[int] diff --git a/V2/time_tracker/customers/_domain/_persistence_contracts/__init__.py b/V2/time_tracker/customers/_domain/_persistence_contracts/__init__.py new file mode 100644 index 00000000..8b1b02fd --- /dev/null +++ b/V2/time_tracker/customers/_domain/_persistence_contracts/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._customers_dao import CustomersDao \ No newline at end of file diff --git a/V2/time_tracker/customers/_domain/_persistence_contracts/_customers_dao.py b/V2/time_tracker/customers/_domain/_persistence_contracts/_customers_dao.py new file mode 100644 index 00000000..186d5c86 --- /dev/null +++ b/V2/time_tracker/customers/_domain/_persistence_contracts/_customers_dao.py @@ -0,0 +1,26 @@ +import abc +import typing + +from time_tracker.customers._domain import Customer + + +class CustomersDao(abc.ABC): + @abc.abstractmethod + def create(self, data: Customer) -> Customer: + pass + + @abc.abstractmethod + def update(self, id: int, data: Customer) -> Customer: + pass + + @abc.abstractmethod + def get_by_id(self, id: int) -> Customer: + pass + + @abc.abstractmethod + def get_all(self) -> typing.List[Customer]: + pass + + @abc.abstractmethod + def delete(self, id: int) -> Customer: + pass diff --git a/V2/time_tracker/customers/_domain/_services/__init__.py b/V2/time_tracker/customers/_domain/_services/__init__.py new file mode 100644 index 00000000..84ed66cf --- /dev/null +++ b/V2/time_tracker/customers/_domain/_services/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._customer import CustomerService \ No newline at end of file diff --git a/V2/time_tracker/customers/_domain/_services/_customer.py b/V2/time_tracker/customers/_domain/_services/_customer.py new file mode 100644 index 00000000..082a7b08 --- /dev/null +++ b/V2/time_tracker/customers/_domain/_services/_customer.py @@ -0,0 +1,24 @@ +import typing + +from time_tracker.customers._domain import Customer, CustomersDao + + +class CustomerService: + + def __init__(self, customer_dao: CustomersDao): + self.customer_dao = customer_dao + + def create(self, data: Customer) -> Customer: + return self.customer_dao.create(data) + + def update(self, id: int, data: Customer) -> Customer: + return self.customer_dao.update(id, data) + + def get_by_id(self, id: int) -> Customer: + return self.customer_dao.get_by_id(id) + + def get_all(self) -> typing.List[Customer]: + return self.customer_dao.get_all() + + def delete(self, id: int) -> Customer: + return self.customer_dao.delete(id) diff --git a/V2/time_tracker/customers/_domain/_use_cases/__init__.py b/V2/time_tracker/customers/_domain/_use_cases/__init__.py new file mode 100644 index 00000000..4dcb8239 --- /dev/null +++ b/V2/time_tracker/customers/_domain/_use_cases/__init__.py @@ -0,0 +1,6 @@ +# flake8: noqa +from ._create_customer_use_case import CreateCustomerUseCase +from ._update_customer_use_case import UpdateCustomerUseCase +from ._get_by_id_customer_use_case import GetByIdCustomerUseCase +from ._get_all_customer_use_case import GetAllCustomerUseCase +from ._delete_customer_use_case import DeleteCustomerUseCase diff --git a/V2/time_tracker/customers/_domain/_use_cases/_create_customer_use_case.py b/V2/time_tracker/customers/_domain/_use_cases/_create_customer_use_case.py new file mode 100644 index 00000000..8aeefa2b --- /dev/null +++ b/V2/time_tracker/customers/_domain/_use_cases/_create_customer_use_case.py @@ -0,0 +1,10 @@ +from time_tracker.customers._domain import Customer, CustomerService + + +class CreateCustomerUseCase: + + def __init__(self, customer_service: CustomerService): + self.customer_service = customer_service + + def create_customer(self, data: Customer) -> Customer: + return self.customer_service.create(data) diff --git a/V2/time_tracker/customers/_domain/_use_cases/_delete_customer_use_case.py b/V2/time_tracker/customers/_domain/_use_cases/_delete_customer_use_case.py new file mode 100644 index 00000000..0477a1f2 --- /dev/null +++ b/V2/time_tracker/customers/_domain/_use_cases/_delete_customer_use_case.py @@ -0,0 +1,10 @@ +from time_tracker.customers._domain import Customer, CustomerService + + +class DeleteCustomerUseCase: + + def __init__(self, customer_service: CustomerService): + self.customer_service = customer_service + + def delete_customer(self, id: int) -> Customer: + return self.customer_service.delete(id) diff --git a/V2/time_tracker/customers/_domain/_use_cases/_get_all_customer_use_case.py b/V2/time_tracker/customers/_domain/_use_cases/_get_all_customer_use_case.py new file mode 100644 index 00000000..d3780449 --- /dev/null +++ b/V2/time_tracker/customers/_domain/_use_cases/_get_all_customer_use_case.py @@ -0,0 +1,12 @@ +import typing + +from time_tracker.customers._domain import Customer, CustomerService + + +class GetAllCustomerUseCase: + + def __init__(self, customer_service: CustomerService): + self.customer_service = customer_service + + def get_all_customer(self) -> typing.List[Customer]: + return self.customer_service.get_all() diff --git a/V2/time_tracker/customers/_domain/_use_cases/_get_by_id_customer_use_case.py b/V2/time_tracker/customers/_domain/_use_cases/_get_by_id_customer_use_case.py new file mode 100644 index 00000000..2372029a --- /dev/null +++ b/V2/time_tracker/customers/_domain/_use_cases/_get_by_id_customer_use_case.py @@ -0,0 +1,10 @@ +from time_tracker.customers._domain import Customer, CustomerService + + +class GetByIdCustomerUseCase: + + def __init__(self, customer_service: CustomerService): + self.customer_service = customer_service + + def get_customer_by_id(self, id: int) -> Customer: + return self.customer_service.get_by_id(id) diff --git a/V2/time_tracker/customers/_domain/_use_cases/_update_customer_use_case.py b/V2/time_tracker/customers/_domain/_use_cases/_update_customer_use_case.py new file mode 100644 index 00000000..318ced28 --- /dev/null +++ b/V2/time_tracker/customers/_domain/_use_cases/_update_customer_use_case.py @@ -0,0 +1,10 @@ +from time_tracker.customers._domain import Customer, CustomerService + + +class UpdateCustomerUseCase: + + def __init__(self, customer_service: CustomerService): + self.customer_service = customer_service + + def update_customer(self, id: int, data: Customer) -> Customer: + return self.customer_service.update(id, data) diff --git a/V2/time_tracker/customers/_infrastructure/__init__.py b/V2/time_tracker/customers/_infrastructure/__init__.py new file mode 100644 index 00000000..220e8f60 --- /dev/null +++ b/V2/time_tracker/customers/_infrastructure/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._data_persistence import CustomersSQLDao diff --git a/V2/time_tracker/customers/_infrastructure/_data_persistence/__init__.py b/V2/time_tracker/customers/_infrastructure/_data_persistence/__init__.py new file mode 100644 index 00000000..c3c24a98 --- /dev/null +++ b/V2/time_tracker/customers/_infrastructure/_data_persistence/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._customer_dao import CustomersSQLDao diff --git a/V2/time_tracker/customers/_infrastructure/_data_persistence/_customer_dao.py b/V2/time_tracker/customers/_infrastructure/_data_persistence/_customer_dao.py new file mode 100644 index 00000000..f3b15122 --- /dev/null +++ b/V2/time_tracker/customers/_infrastructure/_data_persistence/_customer_dao.py @@ -0,0 +1,80 @@ +import dataclasses +import typing + +import sqlalchemy as sq + +import time_tracker.customers._domain as domain +from time_tracker._infrastructure import _db + + +class CustomersSQLDao(domain.CustomersDao): + + def __init__(self, database: _db.DB): + self.customer_key = [field.name for field in dataclasses.fields(domain.Customer)] + self.db = database + self.customer = sq.Table( + 'customer', + self.db.metadata, + sq.Column('id', sq.Integer, primary_key=True, autoincrement=True), + sq.Column('name', sq.String, unique=True, nullable=False), + sq.Column('description', sq.String), + sq.Column('deleted', sq.Boolean), + sq.Column('status', sq.Integer), + extend_existing=True, + ) + + def get_by_id(self, id: int) -> domain.Customer: + query = sq.sql.select(self.customer).where( + sq.sql.and_(self.customer.c.id == id, self.customer.c.deleted.is_(False)) + ) + customer = self.db.get_session().execute(query).one_or_none() + return self.__create_customer_dto(dict(customer)) if customer else None + + def get_all(self) -> typing.List[domain.Customer]: + query = sq.sql.select(self.customer).where(self.customer.c.deleted.is_(False)) + result = self.db.get_session().execute(query) + return [ + self.__create_customer_dto(dict(customer)) + for customer in result + ] + + def create(self, data: domain.Customer) -> domain.Customer: + try: + new_customer = data.__dict__ + new_customer.pop('id', None) + new_customer['deleted'] = False + new_customer['status'] = 1 + + query = self.customer.insert().values(new_customer).return_defaults() + customer = self.db.get_session().execute(query) + new_customer.update({"id": customer.inserted_primary_key[0]}) + return self.__create_customer_dto(new_customer) + except sq.exc.IntegrityError: + return None + + def __create_customer_dto(self, customer: dict) -> domain.Customer: + customer = {key: customer.get(key) for key in self.customer_key} + return domain.Customer(**customer) + + def delete(self, customer_id: int) -> domain.Customer: + query = ( + self.customer.update() + .where(self.customer.c.id == customer_id) + .values({"deleted": True}) + ) + self.db.get_session().execute(query) + query_deleted_customer = sq.sql.select(self.customer).where(self.customer.c.id == customer_id) + customer = self.db.get_session().execute(query_deleted_customer).one_or_none() + return self.__create_customer_dto(dict(customer)) if customer else None + + def update(self, id: int, data: domain.Customer) -> domain.Customer: + try: + new_customer = data.__dict__ + new_customer.pop("id") + + customer_validated = {key: value for (key, value) in new_customer.items() if value is not None} + query = self.customer.update().where(self.customer.c.id == id).values(customer_validated) + self.db.get_session().execute(query) + return self.get_by_id(id) + except sq.exc.SQLAlchemyError: + return None diff --git a/V2/time_tracker/customers/interface.py b/V2/time_tracker/customers/interface.py new file mode 100644 index 00000000..9aef2091 --- /dev/null +++ b/V2/time_tracker/customers/interface.py @@ -0,0 +1,5 @@ +# flake8: noqa +from ._application import create_customer +from ._application import get_customers +from ._application import delete_customer +from ._application import update_customer diff --git a/V2/time_tracker/projects/_application/__init__.py b/V2/time_tracker/projects/_application/__init__.py new file mode 100644 index 00000000..96a6f985 --- /dev/null +++ b/V2/time_tracker/projects/_application/__init__.py @@ -0,0 +1,6 @@ +# flake8: noqa +from ._projects import create_project +from ._projects import delete_project +from ._projects import get_projects +from ._projects import update_project +from ._projects import get_latest_projects \ No newline at end of file diff --git a/V2/time_tracker/projects/_application/_projects/__init__.py b/V2/time_tracker/projects/_application/_projects/__init__.py new file mode 100644 index 00000000..b7500f9b --- /dev/null +++ b/V2/time_tracker/projects/_application/_projects/__init__.py @@ -0,0 +1,6 @@ +# flake8: noqa +from ._create_project import create_project +from ._delete_project import delete_project +from ._get_projects import get_projects +from ._update_project import update_project +from ._get_latest_projects import get_latest_projects \ No newline at end of file diff --git a/V2/time_tracker/projects/_application/_projects/_create_project.py b/V2/time_tracker/projects/_application/_projects/_create_project.py new file mode 100644 index 00000000..1397284a --- /dev/null +++ b/V2/time_tracker/projects/_application/_projects/_create_project.py @@ -0,0 +1,58 @@ +import dataclasses +import json +import typing +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB as database + + +def create_project(req: func.HttpRequest) -> func.HttpResponse: + + project_dao = _infrastructure.ProjectsSQLDao(database()) + project_service = _domain.ProjectService(project_dao) + use_case = _domain._use_cases.CreateProjectUseCase(project_service) + + project_data = req.get_json() + + validation_errors = _validate_project(project_data) + if validation_errors: + status_code = HTTPStatus.BAD_REQUEST + response = json.dumps(validation_errors) + else: + project_to_create = _domain.Project( + id=None, + name=project_data["name"], + description=project_data["description"], + project_type_id=project_data["project_type_id"], + customer_id=project_data["customer_id"], + status=project_data["status"], + deleted=False, + technologies=project_data["technologies"], + customer=None + ) + + created_project = use_case.create_project(project_to_create) + + status_code, response = [ + HTTPStatus.INTERNAL_SERVER_ERROR, b"could not be created" + ] if not created_project else [HTTPStatus.CREATED, json.dumps(created_project.__dict__)] + + return func.HttpResponse( + body=response, + status_code=status_code, + mimetype="application/json" + ) + + +def _validate_project(project_data: dict) -> typing.List[str]: + project_fields = [field.name for field in dataclasses.fields(_domain.Project) + if field.type != typing.Optional[field.type]] + missing_keys = [field for field in project_fields if field not in project_data] + return [ + f'The {missing_key} key is missing in the input data' + for missing_key in missing_keys + ] diff --git a/V2/time_tracker/projects/_application/_projects/_delete_project.py b/V2/time_tracker/projects/_application/_projects/_delete_project.py new file mode 100644 index 00000000..5274b79f --- /dev/null +++ b/V2/time_tracker/projects/_application/_projects/_delete_project.py @@ -0,0 +1,35 @@ +import json +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB as database + + +def delete_project(req: func.HttpRequest) -> func.HttpResponse: + project_dao = _infrastructure.ProjectsSQLDao(database()) + project_service = _domain.ProjectService(project_dao) + use_case = _domain._use_cases.DeleteProjectUseCase(project_service) + + try: + project_id = int(req.route_params.get("id")) + deleted_project = use_case.delete_project(project_id) + + status_code, response = [ + HTTPStatus.NOT_FOUND, b"Not found" + ] if not deleted_project else [HTTPStatus.OK, json.dumps(deleted_project.__dict__)] + + return func.HttpResponse( + body=response, + status_code=status_code, + mimetype="application/json", + ) + + except ValueError: + return func.HttpResponse( + body=b"Invalid Format ID", + status_code=HTTPStatus.BAD_REQUEST, + mimetype="application/json" + ) diff --git a/V2/time_tracker/projects/_application/_projects/_get_latest_projects.py b/V2/time_tracker/projects/_application/_projects/_get_latest_projects.py new file mode 100644 index 00000000..0aa9badc --- /dev/null +++ b/V2/time_tracker/projects/_application/_projects/_get_latest_projects.py @@ -0,0 +1,26 @@ +import json +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB as database + + +def get_latest_projects(req: func.HttpRequest) -> func.HttpResponse: + project_dao = _infrastructure.ProjectsSQLDao(database()) + project_service = _domain.ProjectService(project_dao) + use_case = _domain._use_cases.GetLatestProjectsUseCase(project_service) + + owner_id = req.params.get('owner_id') + response = [ + project.__dict__ + for project in use_case.get_latest(owner_id) + ] + + return func.HttpResponse( + body=json.dumps(response), + status_code=HTTPStatus.OK, + mimetype="application/json", + ) diff --git a/V2/time_tracker/projects/_application/_projects/_get_projects.py b/V2/time_tracker/projects/_application/_projects/_get_projects.py new file mode 100644 index 00000000..c15efa1c --- /dev/null +++ b/V2/time_tracker/projects/_application/_projects/_get_projects.py @@ -0,0 +1,56 @@ +import json +import typing +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB as database + + +def get_projects(req: func.HttpRequest) -> func.HttpResponse: + project_dao = _infrastructure.ProjectsSQLDao(database()) + project_service = _domain.ProjectService(project_dao) + + project_id = req.route_params.get("id") + + try: + if project_id: + response = _get_by_id(int(project_id), project_service) + if not response: + return func.HttpResponse( + body=b"Not found", + status_code=HTTPStatus.NOT_FOUND, + mimetype="application/json" + ) + else: + response = _get_all(project_service) + + return func.HttpResponse( + body=json.dumps(response), + status_code=HTTPStatus.OK, + mimetype="application/json", + ) + + except ValueError: + return func.HttpResponse( + body=b"Invalid Format ID", + status_code=HTTPStatus.BAD_REQUEST, + mimetype="application/json" + ) + + +def _get_by_id(project_id: int, project_service: _domain.ProjectService) -> str: + use_case = _domain._use_cases.GetProjectUseCase(project_service) + project = use_case.get_project_by_id(project_id) + + return project.__dict__ if project else None + + +def _get_all(project_service: _domain.ProjectService) -> typing.List: + use_case = _domain._use_cases.GetProjectsUseCase(project_service) + return [ + project.__dict__ + for project in use_case.get_projects() + ] diff --git a/V2/time_tracker/projects/_application/_projects/_update_project.py b/V2/time_tracker/projects/_application/_projects/_update_project.py new file mode 100644 index 00000000..b2cc1e57 --- /dev/null +++ b/V2/time_tracker/projects/_application/_projects/_update_project.py @@ -0,0 +1,53 @@ +import dataclasses +import json +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB as database + + +def update_project(req: func.HttpRequest) -> func.HttpResponse: + project_dao = _infrastructure.ProjectsSQLDao(database()) + project_service = _domain.ProjectService(project_dao) + use_case = _domain._use_cases.UpdateProjectUseCase(project_service) + + try: + project_id = int(req.route_params.get("id")) + project_data = req.get_json() + + if not _validate_project(project_data): + status_code = HTTPStatus.BAD_REQUEST + response = b"Incorrect body" + + else: + updated_project = use_case.update_project(project_id, project_data) + status_code, response = [ + HTTPStatus.NOT_FOUND, b"Not found" + ] if not updated_project else [HTTPStatus.OK, json.dumps(updated_project.__dict__)] + + return func.HttpResponse( + body=response, + status_code=status_code, + mimetype="application/json", + ) + + except ValueError: + return func.HttpResponse( + body=b"Invalid Format ID", + status_code=HTTPStatus.BAD_REQUEST, + mimetype="application/json", + ) + except Exception as error: + return func.HttpResponse( + body=str(error).encode(), + status_code=HTTPStatus.BAD_REQUEST, + mimetype="application/json", + ) + + +def _validate_project(project_data: dict) -> bool: + project_keys = [field.name for field in dataclasses.fields(_domain.Project)] + return all(key in project_keys for key in project_data.keys()) diff --git a/V2/time_tracker/projects/_domain/__init__.py b/V2/time_tracker/projects/_domain/__init__.py new file mode 100644 index 00000000..6cdbe548 --- /dev/null +++ b/V2/time_tracker/projects/_domain/__init__.py @@ -0,0 +1,12 @@ +# flake8: noqa +from ._entities import Project +from ._persistence_contracts import ProjectsDao +from ._services import ProjectService +from ._use_cases import ( + CreateProjectUseCase, + DeleteProjectUseCase, + GetProjectsUseCase, + GetProjectUseCase, + UpdateProjectUseCase, + GetLatestProjectsUseCase +) \ No newline at end of file diff --git a/V2/time_tracker/projects/_domain/_entities/__init__.py b/V2/time_tracker/projects/_domain/_entities/__init__.py new file mode 100644 index 00000000..693c3a41 --- /dev/null +++ b/V2/time_tracker/projects/_domain/_entities/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._project import Project \ No newline at end of file diff --git a/V2/time_tracker/projects/_domain/_entities/_project.py b/V2/time_tracker/projects/_domain/_entities/_project.py new file mode 100644 index 00000000..75361db8 --- /dev/null +++ b/V2/time_tracker/projects/_domain/_entities/_project.py @@ -0,0 +1,16 @@ +from dataclasses import dataclass +from typing import List, Optional + + +@dataclass(frozen=True) +class Project: + id: Optional[int] + name: str + description: str + project_type_id: int + customer_id: int + status: int + deleted: Optional[bool] + technologies: List[str] + + customer: Optional[dict] diff --git a/V2/time_tracker/projects/_domain/_persistence_contracts/__init__.py b/V2/time_tracker/projects/_domain/_persistence_contracts/__init__.py new file mode 100644 index 00000000..b17214a7 --- /dev/null +++ b/V2/time_tracker/projects/_domain/_persistence_contracts/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._projects_dao import ProjectsDao \ No newline at end of file diff --git a/V2/time_tracker/projects/_domain/_persistence_contracts/_projects_dao.py b/V2/time_tracker/projects/_domain/_persistence_contracts/_projects_dao.py new file mode 100644 index 00000000..ef0bb10f --- /dev/null +++ b/V2/time_tracker/projects/_domain/_persistence_contracts/_projects_dao.py @@ -0,0 +1,30 @@ +import abc +import typing + +from .. import Project + + +class ProjectsDao(abc.ABC): + @abc.abstractmethod + def create(self, time_entry_data: Project) -> Project: + pass + + @abc.abstractmethod + def get_all(self) -> typing.List[Project]: + pass + + @abc.abstractmethod + def get_by_id(self, id: int) -> Project: + pass + + @abc.abstractmethod + def update(self, id: int, project_data: dict) -> Project: + pass + + @abc.abstractmethod + def delete(self, id: int) -> Project: + pass + + @abc.abstractmethod + def get_latest(self, owner_id: int) -> typing.List[Project]: + pass diff --git a/V2/time_tracker/projects/_domain/_services/__init__.py b/V2/time_tracker/projects/_domain/_services/__init__.py new file mode 100644 index 00000000..5eb9532b --- /dev/null +++ b/V2/time_tracker/projects/_domain/_services/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._project import ProjectService \ No newline at end of file diff --git a/V2/time_tracker/projects/_domain/_services/_project.py b/V2/time_tracker/projects/_domain/_services/_project.py new file mode 100644 index 00000000..70dfe9c0 --- /dev/null +++ b/V2/time_tracker/projects/_domain/_services/_project.py @@ -0,0 +1,27 @@ +import typing + +from .. import Project, ProjectsDao + + +class ProjectService: + + def __init__(self, project_dao: ProjectsDao): + self.project_dao = project_dao + + def create(self, project_data: Project) -> Project: + return self.project_dao.create(project_data) + + def get_all(self) -> typing.List[Project]: + return self.project_dao.get_all() + + def get_by_id(self, id: int) -> Project: + return self.project_dao.get_by_id(id) + + def update(self, id: int, project_data: dict) -> Project: + return self.project_dao.update(id, project_data) + + def delete(self, id: int) -> Project: + return self.project_dao.delete(id) + + def get_latest(self, owner_id: int) -> typing.List[Project]: + return self.project_dao.get_latest(owner_id) diff --git a/V2/time_tracker/projects/_domain/_use_cases/__init__.py b/V2/time_tracker/projects/_domain/_use_cases/__init__.py new file mode 100644 index 00000000..f2a7dfce --- /dev/null +++ b/V2/time_tracker/projects/_domain/_use_cases/__init__.py @@ -0,0 +1,7 @@ +# flake8: noqa +from ._create_project_use_case import CreateProjectUseCase +from ._delete_project_use_case import DeleteProjectUseCase +from ._get_project_by_id_use_case import GetProjectUseCase +from ._get_projects_use_case import GetProjectsUseCase +from ._update_project_use_case import UpdateProjectUseCase +from ._get_latest_projects_use_case import GetLatestProjectsUseCase diff --git a/V2/time_tracker/projects/_domain/_use_cases/_create_project_use_case.py b/V2/time_tracker/projects/_domain/_use_cases/_create_project_use_case.py new file mode 100644 index 00000000..60b50687 --- /dev/null +++ b/V2/time_tracker/projects/_domain/_use_cases/_create_project_use_case.py @@ -0,0 +1,10 @@ +from .. import Project, ProjectService + + +class CreateProjectUseCase: + + def __init__(self, project_service: ProjectService): + self.project_service = project_service + + def create_project(self, project_data: Project) -> Project: + return self.project_service.create(project_data) diff --git a/V2/time_tracker/projects/_domain/_use_cases/_delete_project_use_case.py b/V2/time_tracker/projects/_domain/_use_cases/_delete_project_use_case.py new file mode 100644 index 00000000..9dd91d4b --- /dev/null +++ b/V2/time_tracker/projects/_domain/_use_cases/_delete_project_use_case.py @@ -0,0 +1,10 @@ +from .. import Project, ProjectService + + +class DeleteProjectUseCase: + + def __init__(self, project_service: ProjectService): + self.project_service = project_service + + def delete_project(self, id: int) -> Project: + return self.project_service.delete(id) diff --git a/V2/time_tracker/projects/_domain/_use_cases/_get_latest_projects_use_case.py b/V2/time_tracker/projects/_domain/_use_cases/_get_latest_projects_use_case.py new file mode 100644 index 00000000..b26d484c --- /dev/null +++ b/V2/time_tracker/projects/_domain/_use_cases/_get_latest_projects_use_case.py @@ -0,0 +1,11 @@ +import typing + +from .. import Project, ProjectService + + +class GetLatestProjectsUseCase: + def __init__(self, project_service: ProjectService): + self.project_service = project_service + + def get_latest(self, owner_id: int) -> typing.List[Project]: + return self.project_service.get_latest(owner_id) diff --git a/V2/time_tracker/projects/_domain/_use_cases/_get_project_by_id_use_case.py b/V2/time_tracker/projects/_domain/_use_cases/_get_project_by_id_use_case.py new file mode 100644 index 00000000..94573496 --- /dev/null +++ b/V2/time_tracker/projects/_domain/_use_cases/_get_project_by_id_use_case.py @@ -0,0 +1,9 @@ +from .. import ProjectService, Project + + +class GetProjectUseCase: + def __init__(self, project_service: ProjectService): + self.project_service = project_service + + def get_project_by_id(self, id: int) -> Project: + return self.project_service.get_by_id(id) diff --git a/V2/time_tracker/projects/_domain/_use_cases/_get_projects_use_case.py b/V2/time_tracker/projects/_domain/_use_cases/_get_projects_use_case.py new file mode 100644 index 00000000..ccf0b3a4 --- /dev/null +++ b/V2/time_tracker/projects/_domain/_use_cases/_get_projects_use_case.py @@ -0,0 +1,11 @@ +import typing + +from .. import Project, ProjectService + + +class GetProjectsUseCase: + def __init__(self, project_service: ProjectService): + self.project_service = project_service + + def get_projects(self) -> typing.List[Project]: + return self.project_service.get_all() diff --git a/V2/time_tracker/projects/_domain/_use_cases/_update_project_use_case.py b/V2/time_tracker/projects/_domain/_use_cases/_update_project_use_case.py new file mode 100644 index 00000000..628d7437 --- /dev/null +++ b/V2/time_tracker/projects/_domain/_use_cases/_update_project_use_case.py @@ -0,0 +1,9 @@ +from .. import ProjectService, Project + + +class UpdateProjectUseCase: + def __init__(self, projects_service: ProjectService): + self.projects_service = projects_service + + def update_project(self, id: int, project_data: dict) -> Project: + return self.projects_service.update(id, project_data) diff --git a/V2/time_tracker/projects/_infrastructure/__init__.py b/V2/time_tracker/projects/_infrastructure/__init__.py new file mode 100644 index 00000000..b940cba3 --- /dev/null +++ b/V2/time_tracker/projects/_infrastructure/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._data_persistence import ProjectsSQLDao diff --git a/V2/time_tracker/projects/_infrastructure/_data_persistence/__init__.py b/V2/time_tracker/projects/_infrastructure/_data_persistence/__init__.py new file mode 100644 index 00000000..b73fcf44 --- /dev/null +++ b/V2/time_tracker/projects/_infrastructure/_data_persistence/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._projects_dao import ProjectsSQLDao diff --git a/V2/time_tracker/projects/_infrastructure/_data_persistence/_projects_dao.py b/V2/time_tracker/projects/_infrastructure/_data_persistence/_projects_dao.py new file mode 100644 index 00000000..63e65972 --- /dev/null +++ b/V2/time_tracker/projects/_infrastructure/_data_persistence/_projects_dao.py @@ -0,0 +1,104 @@ +import typing +import dataclasses + +import sqlalchemy as sq + +from ... import _domain as domain +from time_tracker._infrastructure import _db +from time_tracker.time_entries._infrastructure._data_persistence import TimeEntriesSQLDao +from time_tracker.customers._infrastructure._data_persistence import CustomersSQLDao + + +class ProjectsSQLDao(domain.ProjectsDao): + + def __init__(self, database: _db.DB): + self.project_key = [field.name for field in dataclasses.fields(domain.Project)] + self.db = database + self.project = sq.Table( + 'project', + self.db.metadata, + sq.Column('id', sq.Integer, primary_key=True, autoincrement=True), + sq.Column('name', sq.String), + sq.Column('description', sq.String), + sq.Column('project_type_id', sq.Integer), + sq.Column('customer_id', sq.Integer, sq.ForeignKey('customer.id')), + sq.Column('status', sq.SmallInteger), + sq.Column('deleted', sq.BOOLEAN), + sq.Column( + 'technologies', + sq.ARRAY(sq.String).with_variant(sq.String, "sqlite") + ), + extend_existing=True, + ) + + def create(self, project_data: domain.Project) -> domain.Project: + try: + validated_project = {key: value for (key, value) in project_data.__dict__.items() if value is not None} + + query = self.project.insert().values(validated_project).return_defaults() + + project = self.db.get_session().execute(query) + return self.get_by_id(project.inserted_primary_key[0]) + + except sq.exc.SQLAlchemyError: + return None + + def get_by_id(self, id: int) -> domain.Project: + query = sq.sql.select(self.project).where(self.project.c.id == id) + project = self.db.get_session().execute(query).one_or_none() + if project: + customer_dao = CustomersSQLDao(self.db) + customer = customer_dao.get_by_id(project["customer_id"]) + project = dict(project) + project.update({"customer": customer.__dict__ if customer else None}) + + return self.__create_project_dto(project) if project else None + + def get_all(self) -> typing.List[domain.Project]: + query = sq.sql.select(self.project) + result = self.db.get_session().execute(query).all() + projects = [] + + for project in result: + customer_dao = CustomersSQLDao(self.db) + customer = customer_dao.get_by_id(project["customer_id"]) + project = dict(project) + project.update({"customer": customer.__dict__ if customer else None}) + projects.append(project) + + return [ + self.__create_project_dto(project) + for project in projects + ] + + def delete(self, id: int) -> domain.Project: + query = ( + self.project.update() + .where(self.project.c.id == id) + .values({"deleted": True, "status": 0}) + ) + self.db.get_session().execute(query) + return self.get_by_id(id) + + def update(self, id: int, project_data: dict) -> domain.Project: + try: + query = self.project.update().where(self.project.c.id == id).values(project_data) + self.db.get_session().execute(query) + return self.get_by_id(id) + except sq.exc.SQLAlchemyError as error: + raise Exception(error.orig) + + def get_latest(self, owner_id: int) -> typing.List[domain.Project]: + time_entries_dao = TimeEntriesSQLDao(self.db) + latest_time_entries = time_entries_dao.get_latest_entries(owner_id) + latest_projects = [] + + if latest_time_entries: + filter_project = typing.Counter(time_entry['project_id'] for time_entry in latest_time_entries) + latest_projects = [self.get_by_id(project_id) for project_id in filter_project] + + return latest_projects + + def __create_project_dto(self, project: dict) -> domain.Project: + project = {key: project.get(key) for key in self.project_key} + return domain.Project(**project) diff --git a/V2/time_tracker/projects/interface.py b/V2/time_tracker/projects/interface.py new file mode 100644 index 00000000..a0312258 --- /dev/null +++ b/V2/time_tracker/projects/interface.py @@ -0,0 +1,6 @@ +# flake8: noqa +from ._application import create_project +from ._application import delete_project +from ._application import get_projects +from ._application import update_project +from ._application import get_latest_projects \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_application/__init__.py b/V2/time_tracker/time_entries/_application/__init__.py new file mode 100644 index 00000000..4d003056 --- /dev/null +++ b/V2/time_tracker/time_entries/_application/__init__.py @@ -0,0 +1,6 @@ +# flake8: noqa +from ._time_entries import create_time_entry +from ._time_entries import delete_time_entry +from ._time_entries import update_time_entry +from ._time_entries import get_time_entries +from ._time_entries import get_latest_entries diff --git a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py new file mode 100644 index 00000000..9b48eb2a --- /dev/null +++ b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py @@ -0,0 +1,7 @@ +# flake8: noqa +from ._create_time_entry import create_time_entry +from ._delete_time_entry import delete_time_entry +from ._get_latest_entries import get_latest_entries +from ._update_time_entry import update_time_entry +from ._get_time_entries import get_time_entries +from ._get_latest_entries import get_latest_entries diff --git a/V2/time_tracker/time_entries/_application/_time_entries/_create_time_entry.py b/V2/time_tracker/time_entries/_application/_time_entries/_create_time_entry.py new file mode 100644 index 00000000..95149c55 --- /dev/null +++ b/V2/time_tracker/time_entries/_application/_time_entries/_create_time_entry.py @@ -0,0 +1,63 @@ +import dataclasses +import json +import typing + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + + +def create_time_entry(req: func.HttpRequest) -> func.HttpResponse: + database = DB() + time_entry_dao = _infrastructure.TimeEntriesSQLDao(database) + time_entry_service = _domain.TimeEntryService(time_entry_dao) + use_case = _domain._use_cases.CreateTimeEntryUseCase(time_entry_service) + + time_entry_data = req.get_json() + + validation_errors = _validate_time_entry(time_entry_data) + if validation_errors: + return func.HttpResponse( + body=json.dumps(validation_errors), status_code=400, mimetype="application/json" + ) + + time_entry_to_create = _domain.TimeEntry( + id=None, + start_date=time_entry_data["start_date"], + owner_id=time_entry_data["owner_id"], + description=time_entry_data["description"], + activity_id=time_entry_data["activity_id"], + uri=time_entry_data["uri"], + technologies=time_entry_data["technologies"], + end_date=time_entry_data["end_date"], + deleted=False, + timezone_offset=time_entry_data["timezone_offset"], + project_id=time_entry_data["project_id"] + ) + + created_time_entry = use_case.create_time_entry(time_entry_to_create) + + if not created_time_entry: + return func.HttpResponse( + body=json.dumps({'error': 'time_entry could not be created'}), + status_code=500, + mimetype="application/json" + ) + + return func.HttpResponse( + body=json.dumps(created_time_entry.__dict__), + status_code=201, + mimetype="application/json" + ) + + +def _validate_time_entry(time_entry_data: dict) -> typing.List[str]: + time_entry_fields = [field.name for field in dataclasses.fields(_domain.TimeEntry) + if field.type != typing.Optional[field.type]] + missing_keys = [field for field in time_entry_fields if field not in time_entry_data] + return [ + f'The {missing_key} key is missing in the input data' + for missing_key in missing_keys + ] diff --git a/V2/time_tracker/time_entries/_application/_time_entries/_delete_time_entry.py b/V2/time_tracker/time_entries/_application/_time_entries/_delete_time_entry.py new file mode 100644 index 00000000..bbf76eab --- /dev/null +++ b/V2/time_tracker/time_entries/_application/_time_entries/_delete_time_entry.py @@ -0,0 +1,36 @@ +import json + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + + +def delete_time_entry(req: func.HttpRequest) -> func.HttpResponse: + time_entry_dao = _infrastructure.TimeEntriesSQLDao(DB()) + time_entry_service = _domain.TimeEntryService(time_entry_dao) + use_case = _domain._use_cases.DeleteTimeEntryUseCase(time_entry_service) + + try: + time_entry_id = int(req.route_params.get("id")) + deleted_time_entry = use_case.delete_time_entry(time_entry_id) + if not deleted_time_entry: + return func.HttpResponse( + body="Not found", + status_code=404, + mimetype="application/json" + ) + + return func.HttpResponse( + body=json.dumps(deleted_time_entry.__dict__, default=str), + status_code=200, + mimetype="application/json", + ) + + except ValueError: + return func.HttpResponse( + body=b"Invalid Format ID", + status_code=400, + mimetype="application/json" + ) diff --git a/V2/time_tracker/time_entries/_application/_time_entries/_get_latest_entries.py b/V2/time_tracker/time_entries/_application/_time_entries/_get_latest_entries.py new file mode 100644 index 00000000..b813fb4f --- /dev/null +++ b/V2/time_tracker/time_entries/_application/_time_entries/_get_latest_entries.py @@ -0,0 +1,49 @@ +import json +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB +from time_tracker.utils.enums import ResponseEnums + + +def get_latest_entries(req: func.HttpRequest) -> func.HttpResponse: + database = DB() + time_entry_dao = _infrastructure.TimeEntriesSQLDao(database) + time_entry_service = _domain.TimeEntryService(time_entry_dao) + use_case = _domain._use_cases.GetLastestTimeEntryUseCase(time_entry_service) + + try: + owner_id = req.params.get("owner_id") + limit = req.params.get("limit") + + if not owner_id: + return func.HttpResponse( + body=ResponseEnums.NOT_FOUND.value, + status_code=HTTPStatus.NOT_FOUND, + mimetype=ResponseEnums.MIME_TYPE.value, + ) + + time_entries = use_case.get_latest_entries(int(owner_id), int(limit) if limit and int(limit) > 0 else None) + + if not time_entries or len(time_entries) == 0: + return func.HttpResponse( + body=ResponseEnums.NOT_FOUND.value, + status_code=HTTPStatus.NOT_FOUND, + mimetype=ResponseEnums.MIME_TYPE.value, + ) + + return func.HttpResponse( + body=json.dumps(time_entries, default=str), + status_code=HTTPStatus.OK, + mimetype=ResponseEnums.MIME_TYPE.value, + ) + + except ValueError: + return func.HttpResponse( + body=ResponseEnums.INVALID_ID.value, + status_code=HTTPStatus.BAD_REQUEST, + mimetype=ResponseEnums.MIME_TYPE.value, + ) diff --git a/V2/time_tracker/time_entries/_application/_time_entries/_get_time_entries.py b/V2/time_tracker/time_entries/_application/_time_entries/_get_time_entries.py new file mode 100644 index 00000000..37574d32 --- /dev/null +++ b/V2/time_tracker/time_entries/_application/_time_entries/_get_time_entries.py @@ -0,0 +1,61 @@ +import json +from http import HTTPStatus + +import azure.functions as func + +from time_tracker.time_entries._infrastructure import TimeEntriesSQLDao +from time_tracker.time_entries._domain import TimeEntryService, _use_cases +from time_tracker._infrastructure import DB + + +NOT_FOUND = b'Not Found' +INVALID_FORMAT_ID = b'Invalid Format ID' + + +def get_time_entries(req: func.HttpRequest) -> func.HttpResponse: + + time_entry_id = req.route_params.get('id') + status_code = HTTPStatus.OK + + if time_entry_id: + try: + response = _get_by_id(int(time_entry_id)) + if response == NOT_FOUND: + status_code = HTTPStatus.NOT_FOUND + except ValueError: + response = INVALID_FORMAT_ID + status_code = HTTPStatus.BAD_REQUEST + else: + response = _get_all() + + return func.HttpResponse( + body=response, status_code=status_code, mimetype="application/json" + ) + + +def _get_by_id(id: int) -> str: + database = DB() + time_entry_use_case = _use_cases.GetTimeEntryUseCase( + _create_time_entry_service(database) + ) + time_entry = time_entry_use_case.get_time_entry_by_id(id) + + return json.dumps(time_entry.__dict__) if time_entry else NOT_FOUND + + +def _get_all() -> str: + database = DB() + time_entries_use_case = _use_cases.GetTimeEntriesUseCase( + _create_time_entry_service(database) + ) + return json.dumps( + [ + time_entry.__dict__ + for time_entry in time_entries_use_case.get_time_entries() + ] + ) + + +def _create_time_entry_service(db: DB): + time_entry_sql = TimeEntriesSQLDao(db) + return TimeEntryService(time_entry_sql) diff --git a/V2/time_tracker/time_entries/_application/_time_entries/_update_time_entry.py b/V2/time_tracker/time_entries/_application/_time_entries/_update_time_entry.py new file mode 100644 index 00000000..63366869 --- /dev/null +++ b/V2/time_tracker/time_entries/_application/_time_entries/_update_time_entry.py @@ -0,0 +1,46 @@ +import dataclasses +import json + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + + +def update_time_entry(req: func.HttpRequest) -> func.HttpResponse: + database = DB() + time_entry_dao = _infrastructure.TimeEntriesSQLDao(database) + time_entry_service = _domain.TimeEntryService(time_entry_dao) + use_case = _domain._use_cases.UpdateTimeEntryUseCase(time_entry_service) + + try: + time_entry_id = int(req.route_params.get("id")) + time_entry_data = req.get_json() + + if not _validate_time_entry(time_entry_data): + status_code = 400 + response = b"Incorrect time entry body" + else: + updated_time_entry = use_case.update_time_entry(time_entry_id, time_entry_data) + status_code, response = [ + 404, b"Not found" + ] if not updated_time_entry else [200, json.dumps(updated_time_entry.__dict__)] + + return func.HttpResponse( + body=response, + status_code=status_code, + mimetype="application/json", + ) + + except ValueError: + return func.HttpResponse( + body=b"Invalid Format ID", + status_code=400, + mimetype="application/json" + ) + + +def _validate_time_entry(time_entry_data: dict) -> bool: + time_entry_keys = [field.name for field in dataclasses.fields(_domain.TimeEntry)] + return all(key in time_entry_keys for key in time_entry_data.keys()) diff --git a/V2/time_tracker/time_entries/_domain/__init__.py b/V2/time_tracker/time_entries/_domain/__init__.py new file mode 100644 index 00000000..f0aec6d0 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/__init__.py @@ -0,0 +1,13 @@ +# flake8: noqa +from ._entities import TimeEntry +from ._persistence_contracts import TimeEntriesDao +from ._services import TimeEntryService +from ._use_cases import ( + CreateTimeEntryUseCase, + DeleteTimeEntryUseCase, + GetLastestTimeEntryUseCase, + UpdateTimeEntryUseCase, + GetTimeEntriesUseCase, + GetTimeEntryUseCase, + GetLastestTimeEntryUseCase, +) diff --git a/V2/time_tracker/time_entries/_domain/_entities/__init__.py b/V2/time_tracker/time_entries/_domain/_entities/__init__.py new file mode 100644 index 00000000..3245a461 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_entities/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._time_entry import TimeEntry diff --git a/V2/time_tracker/time_entries/_domain/_entities/_time_entry.py b/V2/time_tracker/time_entries/_domain/_entities/_time_entry.py new file mode 100644 index 00000000..08df5f8f --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_entities/_time_entry.py @@ -0,0 +1,17 @@ +from dataclasses import dataclass +from typing import List, Optional + + +@dataclass(frozen=True) +class TimeEntry: + id: Optional[int] + start_date: str + owner_id: int + description: str + activity_id: int + uri: str + technologies: List[str] + end_date: str + deleted: Optional[bool] + timezone_offset: str + project_id: int diff --git a/V2/time_tracker/time_entries/_domain/_persistence_contracts/__init__.py b/V2/time_tracker/time_entries/_domain/_persistence_contracts/__init__.py new file mode 100644 index 00000000..3f17d5ee --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_persistence_contracts/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._time_entries_dao import TimeEntriesDao diff --git a/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py b/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py new file mode 100644 index 00000000..59b9d975 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py @@ -0,0 +1,30 @@ +import abc +import typing + +from time_tracker.time_entries._domain import TimeEntry + + +class TimeEntriesDao(abc.ABC): + @abc.abstractmethod + def create(self, time_entry_data: TimeEntry) -> TimeEntry: + pass + + @abc.abstractmethod + def delete(self, id: int) -> TimeEntry: + pass + + @abc.abstractmethod + def update(self, id: int, new_time_entry: dict) -> TimeEntry: + pass + + @abc.abstractmethod + def get_by_id(self, id: int) -> TimeEntry: + pass + + @abc.abstractmethod + def get_all(self) -> typing.List[TimeEntry]: + pass + + @abc.abstractmethod + def get_latest_entries(self, owner_id: int, limit: int) -> typing.List[TimeEntry]: + pass diff --git a/V2/time_tracker/time_entries/_domain/_services/__init__.py b/V2/time_tracker/time_entries/_domain/_services/__init__.py new file mode 100644 index 00000000..1a06f65b --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_services/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._time_entry import TimeEntryService diff --git a/V2/time_tracker/time_entries/_domain/_services/_time_entry.py b/V2/time_tracker/time_entries/_domain/_services/_time_entry.py new file mode 100644 index 00000000..0c2b8b9b --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_services/_time_entry.py @@ -0,0 +1,26 @@ +import typing + +from time_tracker.time_entries._domain import TimeEntry, TimeEntriesDao + + +class TimeEntryService: + def __init__(self, time_entry_dao: TimeEntriesDao): + self.time_entry_dao = time_entry_dao + + def create(self, time_entry_data: TimeEntry) -> TimeEntry: + return self.time_entry_dao.create(time_entry_data) + + def delete(self, id: int) -> TimeEntry: + return self.time_entry_dao.delete(id) + + def update(self, time_entry_id: int, new_time_entry: dict) -> TimeEntry: + return self.time_entry_dao.update(time_entry_id, new_time_entry) + + def get_by_id(self, id: int) -> TimeEntry: + return self.time_entry_dao.get_by_id(id) + + def get_all(self) -> typing.List[TimeEntry]: + return self.time_entry_dao.get_all() + + def get_latest_entries(self, owner_id: int, limit: int) -> typing.List[TimeEntry]: + return self.time_entry_dao.get_latest_entries(owner_id, limit) diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py new file mode 100644 index 00000000..0dd05666 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py @@ -0,0 +1,8 @@ +# flake8: noqa +from ._create_time_entry_use_case import CreateTimeEntryUseCase +from ._delete_time_entry_use_case import DeleteTimeEntryUseCase +from ._get_latest_entries_use_case import GetLastestTimeEntryUseCase +from ._update_time_entry_use_case import UpdateTimeEntryUseCase +from ._get_time_entry_use_case import GetTimeEntriesUseCase +from ._get_time_entry_by_id_use_case import GetTimeEntryUseCase +from ._get_latest_entries_use_case import GetLastestTimeEntryUseCase diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/_create_time_entry_use_case.py b/V2/time_tracker/time_entries/_domain/_use_cases/_create_time_entry_use_case.py new file mode 100644 index 00000000..f2258468 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_use_cases/_create_time_entry_use_case.py @@ -0,0 +1,10 @@ +from time_tracker.time_entries._domain import TimeEntry, TimeEntryService + + +class CreateTimeEntryUseCase: + + def __init__(self, time_entry_service: TimeEntryService): + self.time_entry_service = time_entry_service + + def create_time_entry(self, time_entry_data: TimeEntry) -> TimeEntry: + return self.time_entry_service.create(time_entry_data) diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/_delete_time_entry_use_case.py b/V2/time_tracker/time_entries/_domain/_use_cases/_delete_time_entry_use_case.py new file mode 100644 index 00000000..a195c303 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_use_cases/_delete_time_entry_use_case.py @@ -0,0 +1,10 @@ +from time_tracker.time_entries._domain import TimeEntry, TimeEntryService + + +class DeleteTimeEntryUseCase: + + def __init__(self, time_entry_service: TimeEntryService): + self.time_entry_service = time_entry_service + + def delete_time_entry(self, id: int) -> TimeEntry: + return self.time_entry_service.delete(id) diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/_get_latest_entries_use_case.py b/V2/time_tracker/time_entries/_domain/_use_cases/_get_latest_entries_use_case.py new file mode 100644 index 00000000..c070f8ac --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_use_cases/_get_latest_entries_use_case.py @@ -0,0 +1,11 @@ +from time_tracker.time_entries._domain import TimeEntry, TimeEntryService +import typing + + +class GetLastestTimeEntryUseCase: + + def __init__(self, time_entry_service: TimeEntryService): + self.time_entry_service = time_entry_service + + def get_latest_entries(self, owner_id: int, limit: int) -> typing.List[TimeEntry]: + return self.time_entry_service.get_latest_entries(owner_id, limit) diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/_get_time_entry_by_id_use_case.py b/V2/time_tracker/time_entries/_domain/_use_cases/_get_time_entry_by_id_use_case.py new file mode 100644 index 00000000..410233e1 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_use_cases/_get_time_entry_by_id_use_case.py @@ -0,0 +1,9 @@ +from time_tracker.time_entries._domain import TimeEntryService, TimeEntry + + +class GetTimeEntryUseCase: + def __init__(self, time_entry_service: TimeEntryService): + self.time_entry_service = time_entry_service + + def get_time_entry_by_id(self, id: int) -> TimeEntry: + return self.time_entry_service.get_by_id(id) diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/_get_time_entry_use_case.py b/V2/time_tracker/time_entries/_domain/_use_cases/_get_time_entry_use_case.py new file mode 100644 index 00000000..c7bd3f27 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_use_cases/_get_time_entry_use_case.py @@ -0,0 +1,11 @@ +import typing + +from time_tracker.time_entries._domain import TimeEntryService, TimeEntry + + +class GetTimeEntriesUseCase: + def __init__(self, time_entry_service: TimeEntryService): + self.time_entry_service = time_entry_service + + def get_time_entries(self) -> typing.List[TimeEntry]: + return self.time_entry_service.get_all() diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/_update_time_entry_use_case.py b/V2/time_tracker/time_entries/_domain/_use_cases/_update_time_entry_use_case.py new file mode 100644 index 00000000..0e2cdf70 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_use_cases/_update_time_entry_use_case.py @@ -0,0 +1,11 @@ +from time_tracker.time_entries._domain import TimeEntryService, TimeEntry + + +class UpdateTimeEntryUseCase: + def __init__(self, time_entry_service: TimeEntryService): + self.time_entry_service = time_entry_service + + def update_time_entry( + self, time_entry_id: int, new_time_entry: dict + ) -> TimeEntry: + return self.time_entry_service.update(time_entry_id, new_time_entry) diff --git a/V2/time_tracker/time_entries/_infrastructure/__init__.py b/V2/time_tracker/time_entries/_infrastructure/__init__.py new file mode 100644 index 00000000..1c7a7d6d --- /dev/null +++ b/V2/time_tracker/time_entries/_infrastructure/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._data_persistence import TimeEntriesSQLDao diff --git a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/__init__.py b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/__init__.py new file mode 100644 index 00000000..76b56455 --- /dev/null +++ b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._time_entries_sql_dao import TimeEntriesSQLDao diff --git a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py new file mode 100644 index 00000000..59988205 --- /dev/null +++ b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py @@ -0,0 +1,106 @@ +import dataclasses +import typing + +import sqlalchemy +import sqlalchemy.sql as sql + +import time_tracker.time_entries._domain as domain +from time_tracker._infrastructure import _db + + +class TimeEntriesSQLDao(domain.TimeEntriesDao): + + def __init__(self, database: _db.DB): + self.time_entry_key = [field.name for field in dataclasses.fields(domain.TimeEntry)] + self.db = database + self.time_entry = sqlalchemy.Table( + 'time_entry', + self.db.metadata, + sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True, autoincrement=True), + sqlalchemy.Column('start_date', sqlalchemy.DateTime().with_variant(sqlalchemy.String, "sqlite")), + sqlalchemy.Column('owner_id', sqlalchemy.Integer), + sqlalchemy.Column('description', sqlalchemy.String), + sqlalchemy.Column('activity_id', sqlalchemy.Integer, sqlalchemy.ForeignKey('activity.id')), + sqlalchemy.Column('uri', sqlalchemy.String), + sqlalchemy.Column( + 'technologies', + sqlalchemy.ARRAY(sqlalchemy.String).with_variant(sqlalchemy.String, "sqlite") + ), + sqlalchemy.Column('end_date', sqlalchemy.DateTime().with_variant(sqlalchemy.String, "sqlite")), + sqlalchemy.Column('deleted', sqlalchemy.Boolean), + sqlalchemy.Column('timezone_offset', sqlalchemy.String), + sqlalchemy.Column('project_id', sqlalchemy.Integer, sqlalchemy.ForeignKey('project.id')), + extend_existing=True, + ) + + def get_by_id(self, time_entry_id: int) -> domain.TimeEntry: + query = sql.select(self.time_entry).where(self.time_entry.c.id == time_entry_id) + time_entry = self.db.get_session().execute(query).one_or_none() + return self.__create_time_entry_dto(dict(time_entry)) if time_entry else None + + def get_all(self) -> typing.List[domain.TimeEntry]: + query = sql.select(self.time_entry) + result = self.db.get_session().execute(query) + return [ + self.__create_time_entry_dto(dict(time_entry)) + for time_entry in result + ] + + def create(self, time_entry_data: domain.TimeEntry) -> domain.TimeEntry: + try: + new_time_entry = time_entry_data.__dict__ + new_time_entry.pop('id', None) + + query = self.time_entry.insert().values(new_time_entry).return_defaults() + time_entry = self.db.get_session().execute(query) + new_time_entry.update({"id": time_entry.inserted_primary_key[0]}) + return self.__create_time_entry_dto(new_time_entry) + + except sqlalchemy.exc.SQLAlchemyError: + return None + + def update(self, time_entry_id: int, time_entry_data: dict) -> domain.TimeEntry: + try: + query = self.time_entry.update().where(self.time_entry.c.id == time_entry_id).values(time_entry_data) + self.db.get_session().execute(query) + query_updated_time_entry = ( + sqlalchemy.sql.select(self.time_entry) + .where(self.time_entry.c.id == time_entry_id) + ) + time_entry = self.db.get_session().execute(query_updated_time_entry).one_or_none() + + return self.__create_time_entry_dto(dict(time_entry)) if time_entry else None + except sqlalchemy.exc.SQLAlchemyError: + return None + + def delete(self, time_entry_id: int) -> domain.TimeEntry: + query = ( + self.time_entry.update() + .where(self.time_entry.c.id == time_entry_id) + .values({"deleted": True}) + ) + self.db.get_session().execute(query) + query_deleted_time_entry = sqlalchemy.sql.select(self.time_entry).where(self.time_entry.c.id == time_entry_id) + time_entry = self.db.get_session().execute(query_deleted_time_entry).one_or_none() + return self.__create_time_entry_dto(dict(time_entry)) if time_entry else None + + def __create_time_entry_dto(self, time_entry: dict) -> domain.TimeEntry: + time_entry.update({ + "start_date": str(time_entry.get("start_date")), + "end_date": str(time_entry.get("end_date"))}) + time_entry = {key: time_entry.get(key) for key in self.time_entry_key} + return domain.TimeEntry(**time_entry) + + def get_latest_entries(self, owner_id: int, limit: int = 20) -> typing.List[domain.TimeEntry]: + query = ( + self.time_entry.select() + .where(sqlalchemy.and_( + self.time_entry.c.owner_id == owner_id, + self.time_entry.c.deleted.is_(False) + )) + .order_by(self.time_entry.c.start_date.desc()) + .limit(limit) + ) + time_entries_data = self.db.get_session().execute(query) + list_time_entries = [dict(entry) for entry in time_entries_data] + return list_time_entries if len(list_time_entries) > 0 else None diff --git a/V2/time_tracker/time_entries/interface.py b/V2/time_tracker/time_entries/interface.py new file mode 100644 index 00000000..1b6c1826 --- /dev/null +++ b/V2/time_tracker/time_entries/interface.py @@ -0,0 +1,7 @@ +# flake8: noqa +from ._application import create_time_entry +from ._application import delete_time_entry +from ._application import get_latest_entries +from ._application import update_time_entry +from ._application import get_time_entries +from ._application import get_latest_entries diff --git a/V2/time_tracker/utils/enums/__init__.py b/V2/time_tracker/utils/enums/__init__.py new file mode 100644 index 00000000..317ca876 --- /dev/null +++ b/V2/time_tracker/utils/enums/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from .response_enums import ResponseEnums \ No newline at end of file diff --git a/V2/time_tracker/utils/enums/response_enums.py b/V2/time_tracker/utils/enums/response_enums.py new file mode 100644 index 00000000..6509ad4c --- /dev/null +++ b/V2/time_tracker/utils/enums/response_enums.py @@ -0,0 +1,10 @@ +from enum import Enum + + +class ResponseEnums(Enum): + INVALID_ID = "Invalid Format ID" + NOT_FOUND = "Not found" + NOT_CREATED = "could not be created" + INCORRECT_BODY = "Incorrect body" + + MIME_TYPE = "application/json" diff --git a/V2/update_activity/function.json b/V2/update_activity/function.json deleted file mode 100644 index 97c9fb49..00000000 --- a/V2/update_activity/function.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "disabled": false, - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "route": "activities/{id}", - "authLevel": "anonymous", - "methods": [ - "PUT" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ], - "entryPoint": "update_activity", - "scriptFile": "../time_tracker/activities/interface.py" -} \ No newline at end of file diff --git a/commons/data_access_layer/file.py b/commons/data_access_layer/file.py new file mode 100644 index 00000000..879970dc --- /dev/null +++ b/commons/data_access_layer/file.py @@ -0,0 +1,31 @@ +import os +from azure.storage.blob import BlobServiceClient +from utils.azure_users import AzureConnection + + +class FileStream(): + CONNECTION_STRING = AzureConnection().get_blob_storage_connection_string() + container_name: str + + def __init__(self, container_name: str): + """ + Initialize the FileStream object. which is used to get the file stream from Azure Blob Storage. + `container_name`: The name of the Azure Storage container. + """ + self.container_name = container_name + + def get_file_stream(self, file_name: str): + if self.CONNECTION_STRING is None: + print("No connection string") + return None + + try: + account = BlobServiceClient.from_connection_string( + self.CONNECTION_STRING) + value = account.get_blob_client(self.container_name, file_name) + file = value.download_blob().readall() + print("Connection string is valid") + return file + except Exception as e: + print(f'Error: {e}') + return None diff --git a/commons/data_access_layer/file_stream.py b/commons/data_access_layer/file_stream.py deleted file mode 100644 index a705c061..00000000 --- a/commons/data_access_layer/file_stream.py +++ /dev/null @@ -1,27 +0,0 @@ -import os -from azure.storage.blob.blockblobservice import BlockBlobService - -ACCOUNT_KEY = os.environ.get('AZURE_STORAGE_ACCOUNT_KEY') - -class FileStream: - def __init__(self, account_name:str, container_name:str): - """ - Initialize the FileStream object. which is used to get the file stream from Azure Blob Storage. - `account_name`: The name of the Azure Storage account. - `container_name`: The name of the Azure Storage container. - """ - self.account_name = account_name - self.container_name = container_name - self.blob_service = BlockBlobService(account_name=self.account_name, account_key=ACCOUNT_KEY) - - def get_file_stream(self, filename:str): - import tempfile - try: - local_file = tempfile.NamedTemporaryFile() - self.blob_service.get_blob_to_stream(self.container_name, filename, stream=local_file) - - local_file.seek(0) - return local_file - except Exception as e: - print(e) - return None \ No newline at end of file diff --git a/requirements/time_tracker_api/dev.txt b/requirements/time_tracker_api/dev.txt index b7a6d667..4580007e 100644 --- a/requirements/time_tracker_api/dev.txt +++ b/requirements/time_tracker_api/dev.txt @@ -22,4 +22,4 @@ pyfiglet==0.7 factory_boy==3.2.0 # azure blob storage -azure-storage-blob==2.1.0 \ No newline at end of file +azure-storage-blob==12.1.0 \ No newline at end of file diff --git a/requirements/time_tracker_api/prod.txt b/requirements/time_tracker_api/prod.txt index dd6df0df..48bf85a7 100644 --- a/requirements/time_tracker_api/prod.txt +++ b/requirements/time_tracker_api/prod.txt @@ -15,6 +15,8 @@ flake8==3.7.9 WSGIserver==1.3 Werkzeug==0.16.1 Jinja2==2.11.3 +markupsafe==2.0.1 +itsdangerous==2.0.1 #WSGI server gunicorn==20.0.4 @@ -47,4 +49,4 @@ pytz==2019.3 python-dateutil==2.8.1 # azure blob storage -azure-storage-blob==2.1.0 \ No newline at end of file +azure-storage-blob==12.1.0 \ No newline at end of file diff --git a/tests/commons/data_access_layer/file_stream_test.py b/tests/commons/data_access_layer/file_stream_test.py index a3119774..a8b4c137 100644 --- a/tests/commons/data_access_layer/file_stream_test.py +++ b/tests/commons/data_access_layer/file_stream_test.py @@ -1,15 +1,18 @@ import json +import pytest -from commons.data_access_layer.file_stream import FileStream +from commons.data_access_layer.file import FileStream -fs = FileStream("storageaccounteystr82c5","tt-common-files") +fs = FileStream("tt-common-files") +@pytest.mark.skip(reason='file not in the repository') def test__get_file_stream__return_file_content__when_enter_file_name(): result = fs.get_file_stream("activity_test.json") - - assert len(json.load(result)) == 15 + + assert len(json.loads(result)) == 15 + def test__get_file_stream__return_None__when_not_enter_file_name_or_incorrect_name(): result = fs.get_file_stream("") - - assert result == None \ No newline at end of file + + assert result == None diff --git a/tests/time_tracker_api/activities/activities_namespace_test.py b/tests/time_tracker_api/activities/activities_namespace_test.py index 86e34691..17efe406 100644 --- a/tests/time_tracker_api/activities/activities_namespace_test.py +++ b/tests/time_tracker_api/activities/activities_namespace_test.py @@ -19,6 +19,7 @@ fake_activity = ({"id": fake.random_int(1, 9999)}).update(valid_activity_data) + def test__get_all_activities__return_response__when_send_activities_get_request( client: FlaskClient, valid_header: dict ): @@ -28,6 +29,7 @@ def test__get_all_activities__return_response__when_send_activities_get_request( assert HTTPStatus.OK == response.status_code + def test_create_activity_should_succeed_with_valid_request( client: FlaskClient, mocker: MockFixture, valid_header: dict ): @@ -64,6 +66,7 @@ def test_create_activity_should_reject_bad_request( assert HTTPStatus.BAD_REQUEST == response.status_code repository_create_mock.assert_not_called() + @pytest.mark.skip(reason="There is currently no way to test this. Getting the value of the azure blob storage") def test_list_all_active( client: FlaskClient, mocker: MockFixture, valid_header: dict @@ -90,6 +93,7 @@ def test_list_all_active( max_count=ANY, ) + @pytest.mark.skip(reason="There is currently no way to test this. Getting the value of the azure blob storage") def test_list_all_active_activities( client: FlaskClient, mocker: MockFixture, valid_header: dict @@ -118,7 +122,7 @@ def test_list_all_active_activities( max_count=ANY, ) - +@pytest.mark.skip(reason="There is currently no way to test this. Getting the value of the azure blob storage") def test_get_activity_should_succeed_with_valid_id( client: FlaskClient, mocker: MockFixture, valid_header: dict ): @@ -141,6 +145,7 @@ def test_get_activity_should_succeed_with_valid_id( repository_find_mock.assert_called_once_with(str(valid_id), ANY) +@pytest.mark.skip(reason="There is currently no way to test this. Getting the value of the azure blob storage") def test_get_activity_should_return_not_found_with_invalid_id( client: FlaskClient, mocker: MockFixture, valid_header: dict ): diff --git a/tests/utils/azure_users_test.py b/tests/utils/azure_users_test.py index 22bd8965..8a888ac8 100644 --- a/tests/utils/azure_users_test.py +++ b/tests/utils/azure_users_test.py @@ -17,10 +17,10 @@ ], ) def test_azure_connection_is_test_user( - get_mock, - field_name, - field_value, - is_test_user_expected_value, + get_mock, + field_name, + field_value, + is_test_user_expected_value, ): response_mock = Mock() response_mock.status_code = 200 @@ -58,7 +58,7 @@ def test_azure_connection_get_test_user_ids(get_mock): @patch('utils.azure_users.AzureConnection.get_test_user_ids') @patch('utils.azure_users.AzureConnection.users') def test_azure_connection_get_non_test_users( - users_mock, get_test_user_ids_mock + users_mock, get_test_user_ids_mock ): test_user = AzureUser('ID1', None, None, [], []) non_test_user = AzureUser('ID2', None, None, [], []) @@ -81,7 +81,7 @@ def test_azure_connection_get_group_id_by_group_name(get_mock): group_id = 'ID1' azure_connection = AzureConnection() assert ( - azure_connection.get_group_id_by_group_name('group_name') == group_id + azure_connection.get_group_id_by_group_name('group_name') == group_id ) @@ -91,7 +91,7 @@ def test_azure_connection_get_group_id_by_group_name(get_mock): @patch('requests.post') @mark.parametrize('expected_value', [True, False]) def test_is_user_in_group( - post_mock, get_group_id_by_group_name_mock, expected_value + post_mock, get_group_id_by_group_name_mock, expected_value ): response_expected = {'value': expected_value} response_mock = Mock() @@ -104,8 +104,8 @@ def test_is_user_in_group( azure_connection = AzureConnection() assert ( - azure_connection.is_user_in_group('user_id', payload_mock) - == response_expected + azure_connection.is_user_in_group('user_id', payload_mock) + == response_expected ) @@ -164,7 +164,7 @@ def test_get_groups_and_users(get_mock): ], ) def test_get_groups_by_user_id( - get_groups_and_users_mock, user_id, groups_expected_value + get_groups_and_users_mock, user_id, groups_expected_value ): get_groups_and_users_mock.return_value = [ ('test-group-1', ['user-id1', 'user-id2']), @@ -180,7 +180,7 @@ def test_get_groups_by_user_id( @patch('utils.azure_users.AzureConnection.get_token', Mock()) @patch('utils.azure_users.AzureConnection.get_groups_and_users') def test_get_groups_and_users_called_once_by_instance( - get_groups_and_users_mock, + get_groups_and_users_mock, ): get_groups_and_users_mock.return_value = [] user_id = 'user-id1' @@ -198,7 +198,7 @@ def test_get_groups_and_users_called_once_by_instance( @patch('utils.azure_users.AzureConnection.get_group_id_by_group_name') @patch('requests.post') def test_add_user_to_group( - post_mock, get_group_id_by_group_name_mock, get_user_mock + post_mock, get_group_id_by_group_name_mock, get_user_mock ): get_group_id_by_group_name_mock.return_value = 'dummy_group' test_user = AzureUser('ID1', None, None, [], []) @@ -224,7 +224,7 @@ def test_add_user_to_group( @patch('utils.azure_users.AzureConnection.get_group_id_by_group_name') @patch('requests.delete') def test_remove_user_from_group( - delete_mock, get_group_id_by_group_name_mock, get_user_mock + delete_mock, get_group_id_by_group_name_mock, get_user_mock ): get_group_id_by_group_name_mock.return_value = 'dummy_group' test_user = AzureUser('ID1', None, None, [], []) @@ -247,7 +247,7 @@ def test_remove_user_from_group( @patch('utils.azure_users.AzureConnection.get_groups_and_users') @patch('requests.get') def test_users_functions_should_returns_all_users( - get_mock, get_groups_and_users_mock + get_mock, get_groups_and_users_mock ): first_response = Response() first_response.status_code = 200 @@ -264,4 +264,4 @@ def test_users_functions_should_returns_all_users( users = AzureConnection().users() - assert len(users) == 0 + assert len(users) == 2 diff --git a/time_tracker_api/activities/activities_model.py b/time_tracker_api/activities/activities_model.py index 158c8053..0810521c 100644 --- a/time_tracker_api/activities/activities_model.py +++ b/time_tracker_api/activities/activities_model.py @@ -7,13 +7,15 @@ CosmosDBModel, CosmosDBDao, CosmosDBRepository, + CustomError, ) from time_tracker_api.database import CRUDDao, APICosmosDBDao from typing import List, Callable from commons.data_access_layer.database import EventContext from utils.enums.status import Status from utils.query_builder import CosmosDBQueryBuilder -from commons.data_access_layer.file_stream import FileStream +from commons.data_access_layer.file import FileStream + class ActivityDao(CRUDDao): pass @@ -118,16 +120,27 @@ def find_all_from_blob_storage( self, event_context: EventContext, mapper: Callable = None, + activity_id: str = None, file_name: str = "activity.json", - ): + ): tenant_id_value = self.find_partition_key_value(event_context) function_mapper = self.get_mapper_or_dict(mapper) if tenant_id_value is None: - return [] - - fs = FileStream("storageaccounteystr82c5","tt-common-files") + return [{"result": "error", "message": "tenant_id is None"}] + + fs = FileStream("tt-common-files") result = fs.get_file_stream(file_name) - return list(map(function_mapper, json.load(result))) if result is not None else [] + result_json = list(map(function_mapper, json.loads( + result))) if result is not None else [] + if activity_id is not None: + result_json = [ + activity + for activity in result_json + if activity.id == activity_id + ] + + return result_json + class ActivityCosmosDBDao(APICosmosDBDao, ActivityDao): def __init__(self, repository): @@ -143,7 +156,7 @@ def get_all_with_id_in_list( activity_ids, ) - def get_all( + def get_all_v1( self, conditions: dict = None, activities_id: List = None, @@ -162,11 +175,25 @@ def get_all( ) return activities - def get_all_test(self, conditions: dict = None) -> list: + def get_all(self, **kwargs) -> list: event_ctx = self.create_event_context("read-many") - activities = self.repository.find_all_from_blob_storage(event_context=event_ctx) + activities = self.repository.find_all_from_blob_storage( + event_context=event_ctx + ) return activities + def get(self, id: str = None) -> list: + event_ctx = self.create_event_context("read-many") + activities = self.repository.find_all_from_blob_storage( + event_context=event_ctx, + activity_id=id + ) + + if len(activities) > 0: + return activities[0] + else: + raise CustomError(404, "It was not found") + def create(self, activity_payload: dict): event_ctx = self.create_event_context('create') activity_payload['status'] = Status.ACTIVE.value diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index a262ca73..dd8e65f1 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.44.0' +__version__ = '0.46.1' diff --git a/utils/azure_users.py b/utils/azure_users.py index 45a1a0f3..84f590a4 100644 --- a/utils/azure_users.py +++ b/utils/azure_users.py @@ -13,7 +13,8 @@ class MSConfig: 'MS_SECRET', 'MS_SCOPE', 'MS_ENDPOINT', - 'USERID' + 'USERID', + 'AZURE_STORAGE_CONNECTION_STRING' ] check_variables_are_defined(ms_variables) @@ -24,6 +25,7 @@ class MSConfig: SCOPE = os.environ.get('MS_SCOPE') ENDPOINT = os.environ.get('MS_ENDPOINT') USERID = os.environ.get('USERID') + AZURE_STORAGE_CONNECTION_STRING = os.environ.get('AZURE_STORAGE_CONNECTION_STRING') class BearerAuth(requests.auth.AuthBase): @@ -68,6 +70,9 @@ def __init__(self, config=MSConfig): self.access_token = self.get_token() self.groups_and_users = None + def get_blob_storage_connection_string(self) -> str: + return self.config.AZURE_STORAGE_CONNECTION_STRING + def get_msal_client(self): client = msal.ConfidentialClientApplication( self.config.CLIENT_ID, @@ -103,6 +108,7 @@ def users(self) -> List[AzureUser]: role_fields_params=role_fields_params, ) + final_endpoint = endpoint exists_users = True users = [] valid_users = [] @@ -110,8 +116,8 @@ def users(self) -> List[AzureUser]: while exists_users: response = requests.get( - endpoint, auth=BearerAuth(self.access_token) - ) + final_endpoint, auth=BearerAuth(self.access_token) + ) json_response = response.json() assert 200 == response.status_code assert 'value' in json_response @@ -126,11 +132,12 @@ def users(self) -> List[AzureUser]: request_token = remaining_users_link.split( skip_token_attribute )[1] - endpoint = endpoint + skip_token_attribute + request_token - - for i in range(len(users)): - if users[i]['mail'] is None: - valid_users.append(users[i]) + final_endpoint = endpoint + skip_token_attribute + request_token + + for user in users: + user_emails = user['otherMails'] + if(len(user_emails) != 0 and user_emails[0].split('@')[1] == 'ioet.com'): + valid_users.append(user) return [self.to_azure_user(user) for user in valid_users] @@ -181,7 +188,15 @@ def add_user_to_group(self, user_id, group_name): headers=HTTP_PATCH_HEADERS, ) assert 204 == response.status_code - + if self.groups_and_users is None: + self.groups_and_users = [(group_name, [user_id])] + elif group_name not in [gn for (gn, ul) in self.groups_and_users]: + self.groups_and_users.append((group_name, [user_id])) + else: + for (cache_group_name, user_ids) in self.groups_and_users: + if group_name == cache_group_name: + if user_id not in user_ids: + user_ids.append(user_id) return self.get_user(user_id) def remove_user_from_group(self, user_id, group_name): @@ -195,7 +210,11 @@ def remove_user_from_group(self, user_id, group_name): headers=HTTP_PATCH_HEADERS, ) assert 204 == response.status_code - + if self.groups_and_users is not None: + for (cache_group_name, user_ids) in self.groups_and_users: + if group_name == cache_group_name: + if user_id in user_ids: + user_ids.remove(user_id) return self.get_user(user_id) def get_non_test_users(self) -> List[AzureUser]: @@ -265,7 +284,6 @@ def get_groups_and_users(self): result = list(map(parse_item, response.json()['value'])) users_id = self.config.USERID.split(",") result[0][1].extend(users_id) - return result def is_user_in_group(self, user_id, data: dict): diff --git a/utils/extend_model.py b/utils/extend_model.py index ce39d5b7..9040895f 100644 --- a/utils/extend_model.py +++ b/utils/extend_model.py @@ -96,7 +96,7 @@ def add_project_info_to_time_entries(time_entries, projects): setattr(time_entry, 'customer_name', project.customer_name) -def add_activity_name_to_time_entries(time_entries, activities): +def add_activity_name_to_time_entries_v1(time_entries, activities): for time_entry in time_entries: for activity in activities: if time_entry.activity_id == activity.id: @@ -107,6 +107,19 @@ def add_activity_name_to_time_entries(time_entries, activities): ) setattr(time_entry, 'activity_name', name) +def add_activity_name_to_time_entries(time_entries, activities): + for time_entry in time_entries: + result = [x for x in activities if time_entry.activity_id == x.id] + if result: + name = ( + result[0].name + " (archived)" + if result[0].is_deleted() + else result[0].name + ) + setattr(time_entry, 'activity_name', name) + else: + setattr(time_entry, 'activity_name', "activity") + def add_user_email_to_time_entries(time_entries, users): for time_entry in time_entries: diff --git a/utils/time.py b/utils/time.py index 37082d76..aa82dab3 100644 --- a/utils/time.py +++ b/utils/time.py @@ -60,10 +60,10 @@ def to_utc(date: datetime) -> datetime: localized = _tz.localize(date) return localized - from dateutil.parser import isoparse + from dateutil import parser - no_timezone_info = isoparse(value).tzinfo is None + no_timezone_info = parser.parse(value).tzinfo is None if no_timezone_info: - return to_utc(isoparse(value)) + return to_utc(parser.parse(value)) else: - return isoparse(value) + return parser.parse(value)