From 00ed5ae9afc1b3d5c9f60c25007132a7f737742d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean=20Carlos=20Alarc=C3=B3n?= <56373098+jcalarcon98@users.noreply.github.com> Date: Fri, 16 Jul 2021 16:23:41 -0500 Subject: [PATCH 01/74] fix: TT-288 Fix and improve tests on find_interception_with_date_range function (#308) --- .../time_entries/time_entries_model_test.py | 238 +++--------------- .../time_entries/time_entries_repository.py | 9 +- 2 files changed, 42 insertions(+), 205 deletions(-) diff --git a/tests/time_tracker_api/time_entries/time_entries_model_test.py b/tests/time_tracker_api/time_entries/time_entries_model_test.py index fd04166e..6ea0d88b 100644 --- a/tests/time_tracker_api/time_entries/time_entries_model_test.py +++ b/tests/time_tracker_api/time_entries/time_entries_model_test.py @@ -11,221 +11,55 @@ ) -def create_time_entry( - start_date: str, - end_date: str, - owner_id: str, - tenant_id: str, - mocker, - event_context: EventContext, - time_entry_repository: TimeEntryCosmosDBRepository, -) -> TimeEntryCosmosDBModel: - data = { +def test_find_interception_with_date_range_should_return_true_if_there_are_collisions(): + owner_id = Faker().uuid4() + tenant_id = Faker().uuid4() + entry_start_date = "2020-10-01T05:00:00.000Z" + entry_end_date = "2020-10-01T10:00:00.000Z" + + collision_entry = { "project_id": Faker().uuid4(), "activity_id": Faker().uuid4(), "description": Faker().paragraph(nb_sentences=2), - "start_date": start_date, - "end_date": end_date, - "owner_id": owner_id, + "start_date": entry_start_date, + "end_date": entry_end_date, + "owner_id": Faker().uuid4(), "tenant_id": tenant_id, } + time_entry_repository = TimeEntryCosmosDBRepository() + query_items_mock = Mock(return_value=[collision_entry]) + time_entry_repository.container = Mock() + time_entry_repository.container.query_items = query_items_mock - mocker.patch( - 'time_tracker_api.time_entries.time_entries_repository.are_related_entry_entities_valid', - return_value={ - "is_valid": True, - "status_code": HTTPStatus.OK, - "message": "Related entry entities valid", - }, - ) - - created_item = time_entry_repository.create( - data, event_context, mapper=TimeEntryCosmosDBModel - ) - return created_item - - -@pytest.mark.parametrize( - 'start_date,end_date,start_date_,end_date_', - [ - ( - "2020-10-01T05:00:00.000Z", - "2020-10-01T10:00:00.000Z", - "2020-10-01T05:00:00.000Z", - "2020-10-01T10:00:00.000Z", - ), - ( - "2020-10-01T05:00:00.000Z", - "2020-10-01T10:00:00.000Z", - "2020-10-01T07:00:00.000Z", - "2020-10-01T12:00:00.000Z", - ), - ( - "2020-10-01T05:00:00.000Z", - "2020-10-01T10:00:00.000Z", - "2020-10-01T02:00:00.000Z", - "2020-10-01T07:00:00.000Z", - ), - ( - "2020-10-01T05:00:00.000Z", - "2020-10-01T10:00:00.000Z", - "2020-10-01T02:00:00.000Z", - "2020-10-01T12:00:00.000Z", - ), - ( - "2020-10-01T05:00:00.000Z", - "2020-10-01T10:00:00.000Z", - "2020-10-01T06:00:00.000Z", - "2020-10-01T07:00:00.000Z", - ), - ], -) -def test_find_interception_with_date_range_should_find( - start_date: str, - end_date: str, - start_date_: str, - end_date_: str, - owner_id: str, - tenant_id: str, - mocker, - time_entry_repository: TimeEntryCosmosDBRepository, - event_context: EventContext, -): - existing_item = create_time_entry( - start_date, - end_date, - owner_id, - tenant_id, - mocker, - event_context, - time_entry_repository, - ) - - try: - result = time_entry_repository.find_interception_with_date_range( - start_date_, end_date_, owner_id, tenant_id - ) - - assert result is not None - assert len(result) > 0 - assert any([existing_item.id == item.id for item in result]) - finally: - time_entry_repository.delete_permanently( - existing_item.id, event_context + exist_collision_entries = ( + time_entry_repository.find_interception_with_date_range( + start_date=entry_start_date, + end_date=entry_end_date, + owner_id=owner_id, + tenant_id=tenant_id, ) - - -@pytest.mark.parametrize( - 'start_date,end_date,start_date_,end_date_', - [ - ( - "2020-10-01T05:00:00.000Z", - "2020-10-01T10:00:00.000Z", - "2020-10-01T10:00:00.000Z", - "2020-10-01T15:00:00.000Z", - ), - ( - "2020-10-01T05:00:00.000Z", - "2020-10-01T10:00:00.000Z", - "2020-10-01T12:00:00.000Z", - "2020-10-01T15:00:00.000Z", - ), - ( - "2020-10-01T05:00:00.000Z", - "2020-10-01T10:00:00.000Z", - "2020-10-01T02:00:00.000Z", - "2020-10-01T05:00:00.000Z", - ), - ( - "2020-10-01T05:00:00.000Z", - "2020-10-01T10:00:00.000Z", - "2020-10-01T02:00:00.000Z", - "2020-10-01T04:00:00.000Z", - ), - ], -) -def test_find_interception_with_date_range_should_not_find( - start_date: str, - end_date: str, - start_date_: str, - end_date_: str, - owner_id: str, - tenant_id: str, - time_entry_repository: TimeEntryCosmosDBRepository, - event_context: EventContext, - mocker, -): - existing_item = create_time_entry( - start_date, - end_date, - owner_id, - tenant_id, - mocker, - event_context, - time_entry_repository, ) + assert exist_collision_entries is True - try: - result = time_entry_repository.find_interception_with_date_range( - start_date_, end_date_, owner_id, tenant_id - ) - - assert result == [] - assert len(result) == 0 - assert not any([existing_item.id == item.id for item in result]) - finally: - time_entry_repository.delete_permanently( - existing_item.id, event_context - ) - - -def test_find_interception_should_ignore_id_of_existing_item( - owner_id: str, - tenant_id: str, - time_entry_repository: TimeEntryCosmosDBRepository, - event_context: EventContext, - mocker, -): - start_date = "2020-10-01T05:00:00.000Z" - end_date = "2020-10-01T10:00:00.000Z" - - existing_item = create_time_entry( - start_date, - end_date, - owner_id, - tenant_id, - mocker, - event_context, - time_entry_repository, - ) - try: - colliding_result = ( - time_entry_repository.find_interception_with_date_range( - start_date, end_date, owner_id, tenant_id - ) - ) +def test_find_interception_with_date_range_should_return_false_if_there_are_not_collisions(): + entry_start_date = "2020-10-01T05:00:00.000Z" + entry_end_date = "2020-10-01T10:00:00.000Z" - non_colliding_result = ( - time_entry_repository.find_interception_with_date_range( - start_date, - end_date, - owner_id, - tenant_id, - ignore_id=existing_item.id, - ) - ) + time_entry_repository = TimeEntryCosmosDBRepository() + query_items_mock = Mock(return_value=[]) + time_entry_repository.container = Mock() + time_entry_repository.container.query_items = query_items_mock - assert colliding_result is not None - assert any([existing_item.id == item.id for item in colliding_result]) - assert non_colliding_result is not None - assert not any( - [existing_item.id == item.id for item in non_colliding_result] - ) - finally: - time_entry_repository.delete_permanently( - existing_item.id, event_context + exist_collision_entries = ( + time_entry_repository.find_interception_with_date_range( + start_date=entry_start_date, + end_date=entry_end_date, + owner_id=Faker().uuid4(), + tenant_id=Faker().uuid4(), ) + ) + assert exist_collision_entries is False def test_find_running_should_return_running_time_entry( diff --git a/time_tracker_api/time_entries/time_entries_repository.py b/time_tracker_api/time_entries/time_entries_repository.py index 5abed126..d773f2a9 100644 --- a/time_tracker_api/time_entries/time_entries_repository.py +++ b/time_tracker_api/time_entries/time_entries_repository.py @@ -265,7 +265,9 @@ def find_interception_with_date_range( ) function_mapper = self.get_mapper_or_dict(mapper) - return list(map(function_mapper, result)) + collision_entries = list(map(function_mapper, result)) + exist_collision_entries = len(collision_entries) > 0 + return exist_collision_entries def find_running( self, tenant_id: str, owner_id: str, mapper: Callable = None @@ -331,14 +333,15 @@ def validate_data(self, data, event_context: EventContext): description="You cannot end a time entry in the future", ) - collision = self.find_interception_with_date_range( + exist_collision_entries = self.find_interception_with_date_range( start_date=start_date, end_date=data.get('end_date'), owner_id=event_context.user_id, tenant_id=event_context.tenant_id, ignore_id=data.get('id'), ) - if len(collision) > 0: + + if exist_collision_entries: raise CustomError( HTTPStatus.UNPROCESSABLE_ENTITY, description="There is another time entry in that date range", From c8c22d3c6d95d006011b26bf70f4e8ba7242f8ab Mon Sep 17 00:00:00 2001 From: semantic-release Date: Fri, 16 Jul 2021 21:31:54 +0000 Subject: [PATCH 02/74] 0.36.4 Automatically generated by python-semantic-release --- CHANGELOG.md | 4 ++++ time_tracker_api/version.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 87d384d6..35437293 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ +## v0.36.4 (2021-07-16) +### Fix +* TT-288 Fix and improve tests on find_interception_with_date_range function ([#308](https://github.com/ioet/time-tracker-backend/issues/308)) ([`00ed5ae`](https://github.com/ioet/time-tracker-backend/commit/00ed5ae9afc1b3d5c9f60c25007132a7f737742d)) + ## v0.36.3 (2021-07-14) ### Fix * TT-274 fix error on archive a customer removes the project ([#307](https://github.com/ioet/time-tracker-backend/issues/307)) ([`4538307`](https://github.com/ioet/time-tracker-backend/commit/4538307407d8f482b1419db6889b6ecc3013950e)) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index 9faa3d8f..c1935a24 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.36.3' +__version__ = '0.36.4' From 50f8d468d77835a6f90b958d4642d338f36d5f37 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean=20Carlos=20Alarc=C3=B3n?= <56373098+jcalarcon98@users.noreply.github.com> Date: Mon, 26 Jul 2021 16:12:52 -0500 Subject: [PATCH 03/74] feat: TT-293 Create Script to generate data in the Database (#310) * refactor: TT-293 refactor docker-compose to execute a valid entrypoint.sh * feat: TT-293 add CLI functionality in order to manage entities data in the database * feat: TT-293 add extra security layer to avoid use cli in a non-development environment --- cosmosdb-emulator/init_emulator.sh | 36 ----- cosmosdb-emulator/init_emulator_db.py | 64 -------- cosmosdb_emulator/cli.sh | 22 +++ .../entrypoint.sh | 10 +- cosmosdb_emulator/init_emulator_db.py | 63 ++++++++ .../time_tracker_cli/data_target/cosmos.py | 116 ++++++++++++++ .../data_target/data_target.py | 11 ++ .../time_tracker_cli/enums/entites.py | 12 ++ .../factories/activity_factory.py | 29 ++++ .../factories/customer_factory.py | 24 +++ .../factories/project_factory.py | 30 ++++ .../factories/project_type_factory.py | 28 ++++ .../factories/time_entry_factory.py | 38 +++++ cosmosdb_emulator/time_tracker_cli/main.py | 90 +++++++++++ .../time_tracker_cli/providers/common.py | 7 + .../time_tracker_cli/questions/common.py | 69 ++++++++ .../time_tracker_cli/questions/entries.py | 108 +++++++++++++ .../activity_management_strategy.py | 36 +++++ .../customer_management_strategy.py | 38 +++++ .../strategies/management_context.py | 46 ++++++ .../strategies/management_strategy.py | 34 ++++ .../strategies/project_management_strategy.py | 35 +++++ .../project_type_management_strategy.py | 37 +++++ .../time_entry_management_strategy.py | 103 ++++++++++++ .../time_tracker_cli/utils/activity.py | 25 +++ .../time_tracker_cli/utils/common.py | 31 ++++ .../time_tracker_cli/utils/customer.py | 20 +++ .../time_tracker_cli/utils/project.py | 36 +++++ .../time_tracker_cli/utils/project_type.py | 33 ++++ .../time_tracker_cli/utils/time_entry.py | 147 ++++++++++++++++++ .../time_tracker_cli/validators/max_amount.py | 21 +++ .../time_tracker_cli/validators/number.py | 21 +++ .../time_tracker_cli/validators/uuid.py | 15 ++ cosmosdb_emulator/verify_environment.sh | 13 ++ docker-compose.yml | 38 ++--- requirements/time_tracker_api/dev.txt | 5 + 36 files changed, 1362 insertions(+), 129 deletions(-) delete mode 100644 cosmosdb-emulator/init_emulator.sh delete mode 100644 cosmosdb-emulator/init_emulator_db.py create mode 100755 cosmosdb_emulator/cli.sh rename {cosmosdb-emulator => cosmosdb_emulator}/entrypoint.sh (58%) create mode 100644 cosmosdb_emulator/init_emulator_db.py create mode 100644 cosmosdb_emulator/time_tracker_cli/data_target/cosmos.py create mode 100644 cosmosdb_emulator/time_tracker_cli/data_target/data_target.py create mode 100644 cosmosdb_emulator/time_tracker_cli/enums/entites.py create mode 100644 cosmosdb_emulator/time_tracker_cli/factories/activity_factory.py create mode 100644 cosmosdb_emulator/time_tracker_cli/factories/customer_factory.py create mode 100644 cosmosdb_emulator/time_tracker_cli/factories/project_factory.py create mode 100644 cosmosdb_emulator/time_tracker_cli/factories/project_type_factory.py create mode 100644 cosmosdb_emulator/time_tracker_cli/factories/time_entry_factory.py create mode 100644 cosmosdb_emulator/time_tracker_cli/main.py create mode 100644 cosmosdb_emulator/time_tracker_cli/providers/common.py create mode 100644 cosmosdb_emulator/time_tracker_cli/questions/common.py create mode 100644 cosmosdb_emulator/time_tracker_cli/questions/entries.py create mode 100644 cosmosdb_emulator/time_tracker_cli/strategies/activity_management_strategy.py create mode 100644 cosmosdb_emulator/time_tracker_cli/strategies/customer_management_strategy.py create mode 100644 cosmosdb_emulator/time_tracker_cli/strategies/management_context.py create mode 100644 cosmosdb_emulator/time_tracker_cli/strategies/management_strategy.py create mode 100644 cosmosdb_emulator/time_tracker_cli/strategies/project_management_strategy.py create mode 100644 cosmosdb_emulator/time_tracker_cli/strategies/project_type_management_strategy.py create mode 100644 cosmosdb_emulator/time_tracker_cli/strategies/time_entry_management_strategy.py create mode 100644 cosmosdb_emulator/time_tracker_cli/utils/activity.py create mode 100644 cosmosdb_emulator/time_tracker_cli/utils/common.py create mode 100644 cosmosdb_emulator/time_tracker_cli/utils/customer.py create mode 100644 cosmosdb_emulator/time_tracker_cli/utils/project.py create mode 100644 cosmosdb_emulator/time_tracker_cli/utils/project_type.py create mode 100644 cosmosdb_emulator/time_tracker_cli/utils/time_entry.py create mode 100644 cosmosdb_emulator/time_tracker_cli/validators/max_amount.py create mode 100644 cosmosdb_emulator/time_tracker_cli/validators/number.py create mode 100644 cosmosdb_emulator/time_tracker_cli/validators/uuid.py create mode 100644 cosmosdb_emulator/verify_environment.sh diff --git a/cosmosdb-emulator/init_emulator.sh b/cosmosdb-emulator/init_emulator.sh deleted file mode 100644 index 545ed6a3..00000000 --- a/cosmosdb-emulator/init_emulator.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/sh - -containerId=$(docker ps --all | grep 'Time-Tracker-Cosmos-Db' | awk '{print $1}') -if [ -z "$containerId" ]; then - ipaddr="`ifconfig | grep "inet " | grep -Fv 127.0.0.1 | awk '{print $2}' | head -n 1`" - containerId=$(docker create -p 8081:8081 -p 10251:10251 -p 10252:10252 -p 10253:10253 -p 10254:10254 -m 3g --cpus=2.0 --name=Time-Tracker-Cosmos-Db -e AZURE_COSMOS_EMULATOR_PARTITION_COUNT=10 -e AZURE_COSMOS_EMULATOR_ENABLE_DATA_PERSISTENCE=true -e AZURE_COSMOS_EMULATOR_IP_ADDRESS_OVERRIDE=$ipaddr -it mcr.microsoft.com/cosmosdb/linux/azure-cosmos-emulator) - echo "##vso[task.setvariable variable=cosmosDbContainerId]$containerId"> /dev/tty -fi -docker start $containerId - -until curl -ksf "127.0.0.1:8081/_explorer/emulator.pem" -o 'cosmosdb-emulator/emulatorcert.crt'; do - echo "Waiting for Cosmosdb to start..." - sleep 10 -done - -echo "Container cosmosemulator started." - -echo "Checking SSL" -isInstalled=$( awk -v cmd='openssl x509 -noout -subject' '/BEGIN/{close(cmd)};{print | cmd}' < /etc/ssl/certs/ca-certificates.crt | grep host ) || : - -echo "ps" -echo "$isInstalled" - -if [ -z "$isInstalled" ]; then - echo "Importing SSL..." - cp cosmosdb-emulator/emulatorcert.crt /usr/local/share/ca-certificates/ - cp cosmosdb-emulator/emulatorcert.crt /usr/share/ca-certificates/ - update-ca-certificates --fresh - echo "Importing Containers..." - export REQUESTS_CA_BUNDLE=/etc/ssl/certs/ - python3 ./cosmosdb-emulator/init_emulator_db.py - echo "Installation succeed!!" -fi - -echo "Starting Flask!!" -flask run \ No newline at end of file diff --git a/cosmosdb-emulator/init_emulator_db.py b/cosmosdb-emulator/init_emulator_db.py deleted file mode 100644 index 31868293..00000000 --- a/cosmosdb-emulator/init_emulator_db.py +++ /dev/null @@ -1,64 +0,0 @@ -from azure.cosmos import exceptions, CosmosClient, PartitionKey -import os, sys, json - -with open('/usr/src/app/cosmosdb-emulator/seed_database.json') as database_file: - seed_database=json.load(database_file) - -sys.path.append("/usr/src/app") - -DATABASE_ACCOUNT_URI = os.environ.get('DATABASE_ACCOUNT_URI') -DATABASE_MASTER_KEY = os.environ.get('DATABASE_MASTER_KEY') - -endpoint = DATABASE_ACCOUNT_URI -key = DATABASE_MASTER_KEY - -# -client = CosmosClient(endpoint, key) -# -database_name = 'time-tracker-db' -database = client.create_database_if_not_exists(id=database_name) -# - -print("Creating TimeTracker initial initial database schema...") - -try: - print('- Project') - from time_tracker_api.projects.projects_model import container_definition as project_definition - project_container=database.create_container_if_not_exists(**project_definition) - for project in seed_database['projects']: - project_container.create_item(body=project) - - print('- Project type') - from time_tracker_api.project_types.project_types_model import container_definition as project_type_definition - project_type_container=database.create_container_if_not_exists(**project_type_definition) - for project_type in seed_database['project_types']: - project_type_container.create_item(body=project_type) - - print('- Activity') - from time_tracker_api.activities.activities_model import container_definition as activity_definition - activity_container=database.create_container_if_not_exists(**activity_definition) - for activity in seed_database['activities']: - activity_container.create_item(body=activity) - - print('- Customer') - from time_tracker_api.customers.customers_model import container_definition as customer_definition - customer_container=database.create_container_if_not_exists(**customer_definition) - for customer in seed_database['customers']: - customer_container.create_item(body=customer) - - print('- Time entry') - from time_tracker_api.time_entries.time_entries_model import container_definition as time_entry_definition - time_entry_container=database.create_container_if_not_exists(**time_entry_definition) - for time_entry in seed_database['time_entries']: - time_entry_container.create_item(body=time_entry) - - print('- Technology') - from time_tracker_api.technologies.technologies_model import container_definition as technologies_definition - database.create_container_if_not_exists(**technologies_definition) -except exceptions.CosmosResourceExistsError as e: - print("Unexpected error while creating initial database schema: %s" % e.message) - -database_file.close() - -print("Done!") - diff --git a/cosmosdb_emulator/cli.sh b/cosmosdb_emulator/cli.sh new file mode 100755 index 00000000..709f6392 --- /dev/null +++ b/cosmosdb_emulator/cli.sh @@ -0,0 +1,22 @@ +#!/bin/sh +COMMAND=$@ +API_CONTAINER_NAME="time-tracker-backend_api" +TIME_TRACKER_CLI_URL="cosmosdb_emulator/time_tracker_cli" +DEFAULT_SCRIPT_NAME="main.py" +FIRST_ARG=$1 + +execute(){ + docker exec -it $API_CONTAINER_NAME sh "cosmosdb_emulator/verify_environment.sh" + + if [ "$FIRST_ARG" != "$DEFAULT_SCRIPT_NAME" ]; then + echo "Do not forget that the file name is $DEFAULT_SCRIPT_NAME and needs to be sent as first parameter" + echo "For example: ./cli.sh main.py" + exit 0 + fi + + TIME_TRACKER_CLI="python3 $COMMAND" + + docker exec -it $API_CONTAINER_NAME sh -c "cd $TIME_TRACKER_CLI_URL && $TIME_TRACKER_CLI" +} + +execute \ No newline at end of file diff --git a/cosmosdb-emulator/entrypoint.sh b/cosmosdb_emulator/entrypoint.sh similarity index 58% rename from cosmosdb-emulator/entrypoint.sh rename to cosmosdb_emulator/entrypoint.sh index 8978d832..3960bd26 100644 --- a/cosmosdb-emulator/entrypoint.sh +++ b/cosmosdb_emulator/entrypoint.sh @@ -1,19 +1,21 @@ #!/bin/sh -until curl -ksf "${DATABASE_ACCOUNT_URI}/_explorer/emulator.pem" -o 'cosmosdb-emulator/emulatorcert.crt'; do +until curl -ksf "${DATABASE_ACCOUNT_URI}/_explorer/emulator.pem" -o 'cosmosdb_emulator/emulatorcert.crt'; do echo "Waiting for Cosmosdb to start..." sleep 10 done +source cosmosdb_emulator/verify_environment.sh + echo "Container cosmosemulator started." echo "Importing SSL..." -cp cosmosdb-emulator/emulatorcert.crt /usr/local/share/ca-certificates/ -cp cosmosdb-emulator/emulatorcert.crt /usr/share/ca-certificates/ +cp cosmosdb_emulator/emulatorcert.crt /usr/local/share/ca-certificates/ +cp cosmosdb_emulator/emulatorcert.crt /usr/share/ca-certificates/ update-ca-certificates --fresh echo "Importing Containers..." export REQUESTS_CA_BUNDLE=/etc/ssl/certs/ -python3 ./cosmosdb-emulator/init_emulator_db.py +python3 ./cosmosdb_emulator/init_emulator_db.py echo "Installation succeed!!" echo "Starting Flask!!" diff --git a/cosmosdb_emulator/init_emulator_db.py b/cosmosdb_emulator/init_emulator_db.py new file mode 100644 index 00000000..6b4d3438 --- /dev/null +++ b/cosmosdb_emulator/init_emulator_db.py @@ -0,0 +1,63 @@ +from azure.cosmos import exceptions, CosmosClient, PartitionKey +import os, sys + +sys.path.append("/usr/src/app") + +DATABASE_ACCOUNT_URI = os.environ.get('DATABASE_ACCOUNT_URI') +DATABASE_MASTER_KEY = os.environ.get('DATABASE_MASTER_KEY') +DATABASE_NAME = os.environ.get('DATABASE_NAME') + +client = CosmosClient(DATABASE_ACCOUNT_URI, DATABASE_MASTER_KEY) +database = client.create_database_if_not_exists(id=DATABASE_NAME) + +print("Creating TimeTracker initial initial database schema...") + +try: + print('- Project') + from time_tracker_api.projects.projects_model import ( + container_definition as project_definition, + ) + + database.create_container_if_not_exists(**project_definition) + + print('- Project type') + from time_tracker_api.project_types.project_types_model import ( + container_definition as project_type_definition, + ) + + database.create_container_if_not_exists(**project_type_definition) + + print('- Activity') + from time_tracker_api.activities.activities_model import ( + container_definition as activity_definition, + ) + + database.create_container_if_not_exists(**activity_definition) + + print('- Customer') + from time_tracker_api.customers.customers_model import ( + container_definition as customer_definition, + ) + + database.create_container_if_not_exists(**customer_definition) + + print('- Time entry') + from time_tracker_api.time_entries.time_entries_model import ( + container_definition as time_entry_definition, + ) + + database.create_container_if_not_exists(**time_entry_definition) + + print('- Technology') + from time_tracker_api.technologies.technologies_model import ( + container_definition as technologies_definition, + ) + + database.create_container_if_not_exists(**technologies_definition) +except exceptions.CosmosResourceExistsError as e: + print( + "Unexpected error while creating initial database schema: %s" + % e.message + ) + +print("Done!") diff --git a/cosmosdb_emulator/time_tracker_cli/data_target/cosmos.py b/cosmosdb_emulator/time_tracker_cli/data_target/cosmos.py new file mode 100644 index 00000000..39c72acd --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/data_target/cosmos.py @@ -0,0 +1,116 @@ +import os +import sys + +from azure.cosmos import CosmosClient +from azure.cosmos.exceptions import ( + CosmosResourceExistsError, + CosmosResourceNotFoundError, +) + +from cosmosdb_emulator.time_tracker_cli.data_target.data_target import ( + DataTarget, +) +from cosmosdb_emulator.time_tracker_cli.enums.entites import ( + TimeTrackerEntities, +) +from cosmosdb_emulator.time_tracker_cli.utils.activity import get_activity_json +from cosmosdb_emulator.time_tracker_cli.utils.customer import get_customer_json +from cosmosdb_emulator.time_tracker_cli.utils.project import get_project_json +from cosmosdb_emulator.time_tracker_cli.utils.project_type import ( + get_project_type_json, +) +from cosmosdb_emulator.time_tracker_cli.utils.time_entry import get_entry_json + +from time_tracker_api.customers.customers_model import ( + container_definition as customer_definition, +) +from time_tracker_api.project_types.project_types_model import ( + container_definition as project_type_definition, +) +from time_tracker_api.projects.projects_model import ( + container_definition as project_definition, +) +from time_tracker_api.activities.activities_model import ( + container_definition as activity_definition, +) +from time_tracker_api.time_entries.time_entries_model import ( + container_definition as time_entry_definition, +) + +DATABASE_ACCOUNT_URI = os.environ.get('DATABASE_ACCOUNT_URI') +DATABASE_MASTER_KEY = os.environ.get('DATABASE_MASTER_KEY') +DATABASE_NAME = os.environ.get('DATABASE_NAME') + + +class CosmosDataTarget(DataTarget): + def __init__(self): + self.cosmos_client = CosmosClient( + DATABASE_ACCOUNT_URI, DATABASE_MASTER_KEY + ) + self.database = self.cosmos_client.create_database_if_not_exists( + DATABASE_NAME + ) + + @staticmethod + def get_container_definition_by_entity_name(container_name: str) -> dict: + containers_definition = { + TimeTrackerEntities.CUSTOMER.value: customer_definition, + TimeTrackerEntities.PROJECT_TYPE.value: project_type_definition, + TimeTrackerEntities.PROJECT.value: project_definition, + TimeTrackerEntities.ACTIVITY.value: activity_definition, + TimeTrackerEntities.TIME_ENTRY.value: time_entry_definition, + } + + return containers_definition.get(container_name) + + @staticmethod + def get_json_method_entity_name(entity_name): + available_json = { + TimeTrackerEntities.CUSTOMER.value: get_customer_json, + TimeTrackerEntities.PROJECT_TYPE.value: get_project_type_json, + TimeTrackerEntities.PROJECT.value: get_project_json, + TimeTrackerEntities.ACTIVITY.value: get_activity_json, + TimeTrackerEntities.TIME_ENTRY.value: get_entry_json, + } + + return available_json.get(entity_name) + + def delete(self, entities: dict): + for entity in entities: + entity_container_definition = ( + CosmosDataTarget.get_container_definition_by_entity_name( + entity + ) + ) + entity_container_id = entity_container_definition.get('id') + try: + self.database.delete_container(entity_container_id) + self.database.create_container_if_not_exists( + **entity_container_definition + ) + except CosmosResourceNotFoundError: + pass + + def save(self, entities: dict): + for entity in entities: + entity_container_definition = ( + CosmosDataTarget.get_container_definition_by_entity_name( + entity + ) + ) + entities_list = entities.get(entity) + entity_container = self.database.create_container_if_not_exists( + **entity_container_definition + ) + + for element in entities_list: + get_json_entity = CosmosDataTarget.get_json_method_entity_name( + entity + ) + json_entity = get_json_entity(element) + try: + entity_container.create_item(body=json_entity) + except CosmosResourceExistsError: + print( + f'The {entity} entity with the ID ({element.id}) already exists, so it has not been created.' + ) diff --git a/cosmosdb_emulator/time_tracker_cli/data_target/data_target.py b/cosmosdb_emulator/time_tracker_cli/data_target/data_target.py new file mode 100644 index 00000000..0a7a3854 --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/data_target/data_target.py @@ -0,0 +1,11 @@ +from abc import ABC, abstractmethod + + +class DataTarget(ABC): + @abstractmethod + def save(self, entities: dict): + pass + + @abstractmethod + def delete(self, entities: set): + pass diff --git a/cosmosdb_emulator/time_tracker_cli/enums/entites.py b/cosmosdb_emulator/time_tracker_cli/enums/entites.py new file mode 100644 index 00000000..022b7967 --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/enums/entites.py @@ -0,0 +1,12 @@ +from enum import Enum + + +class TimeTrackerEntities(Enum): + def __str__(self): + return str(self.value) + + CUSTOMER = 'Customers' + PROJECT = 'Projects' + PROJECT_TYPE = 'Project-Types' + ACTIVITY = 'Activities' + TIME_ENTRY = 'Time-entries' diff --git a/cosmosdb_emulator/time_tracker_cli/factories/activity_factory.py b/cosmosdb_emulator/time_tracker_cli/factories/activity_factory.py new file mode 100644 index 00000000..13d7c843 --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/factories/activity_factory.py @@ -0,0 +1,29 @@ +from typing import NamedTuple + +from factory import Factory, Faker + +from cosmosdb_emulator.time_tracker_cli.providers.common import CommonProvider +from cosmosdb_emulator.time_tracker_cli.utils.common import ( + get_time_tracker_tenant_id, +) + +Faker.add_provider(CommonProvider) + + +class Activity(NamedTuple): + id: str + name: str + description: str + status: str + tenant_id: str + + +class ActivityFactory(Factory): + class Meta: + model = Activity + + id = Faker('uuid4') + name = Faker('job') + description = Faker('sentence', nb_words=6) + status = Faker('status') + tenant_id = get_time_tracker_tenant_id() diff --git a/cosmosdb_emulator/time_tracker_cli/factories/customer_factory.py b/cosmosdb_emulator/time_tracker_cli/factories/customer_factory.py new file mode 100644 index 00000000..4c63d0f3 --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/factories/customer_factory.py @@ -0,0 +1,24 @@ +from typing import NamedTuple + +from factory import Factory, Faker + +from cosmosdb_emulator.time_tracker_cli.utils.common import ( + get_time_tracker_tenant_id, +) + + +class Customer(NamedTuple): + id: str + name: str + description: str + tenant_id: str + + +class CustomerFactory(Factory): + class Meta: + model = Customer + + id = Faker('uuid4') + name = Faker('company') + description = Faker('sentence', nb_words=10) + tenant_id = get_time_tracker_tenant_id() diff --git a/cosmosdb_emulator/time_tracker_cli/factories/project_factory.py b/cosmosdb_emulator/time_tracker_cli/factories/project_factory.py new file mode 100644 index 00000000..a03f9ae0 --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/factories/project_factory.py @@ -0,0 +1,30 @@ +from typing import NamedTuple + +from factory import Factory, Faker + +from cosmosdb_emulator.time_tracker_cli.utils.common import ( + get_time_tracker_tenant_id, +) + + +class Project(NamedTuple): + id: str + name: str + description: str + project_type_id: int + customer_id: str + tenant_id: str + + +class ProjectFactory(Factory): + class Meta: + model = Project + + def __init__(self, project_type_id, customer_id): + self.project_type_id = project_type_id + self.customer_id = customer_id + + id = Faker('uuid4') + name = Faker('name') + description = Faker('sentence', nb_words=10) + tenant_id = get_time_tracker_tenant_id() diff --git a/cosmosdb_emulator/time_tracker_cli/factories/project_type_factory.py b/cosmosdb_emulator/time_tracker_cli/factories/project_type_factory.py new file mode 100644 index 00000000..3978100b --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/factories/project_type_factory.py @@ -0,0 +1,28 @@ +from typing import NamedTuple + +from factory import Factory, Faker + +from cosmosdb_emulator.time_tracker_cli.utils.common import ( + get_time_tracker_tenant_id, +) + + +class ProjectType(NamedTuple): + id: str + name: str + description: str + customer_id: str + tenant_id: str + + +class ProjectTypeFactory(Factory): + class Meta: + model = ProjectType + + def __init__(self, customer_id): + self.customer_id = customer_id + + id = Faker('uuid4') + name = Faker('name') + description = Faker('sentence', nb_words=10) + tenant_id = get_time_tracker_tenant_id() diff --git a/cosmosdb_emulator/time_tracker_cli/factories/time_entry_factory.py b/cosmosdb_emulator/time_tracker_cli/factories/time_entry_factory.py new file mode 100644 index 00000000..5cf1bd9d --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/factories/time_entry_factory.py @@ -0,0 +1,38 @@ +from typing import NamedTuple, List + +from factory import Factory, Faker + +from cosmosdb_emulator.time_tracker_cli.utils.common import ( + get_time_tracker_tenant_id, +) + + +class TimeEntry(NamedTuple): + project_id: str + start_date: str + owner_id: str + id: str + tenant_id: str + description: str + activity_id: str + technologies: List[str] + end_date: str + + +class TimeEntryFactory(Factory): + class Meta: + model = TimeEntry + + def __init__( + self, owner_id, start_date, end_date, project_id, activity_id + ): + self.start_date = start_date + self.end_date = end_date + self.owner_id = owner_id + self.project_id = project_id + self.activity_id = activity_id + + id = Faker('uuid4') + description = Faker('sentence', nb_words=10) + technologies = Faker('words', nb=3) + tenant_id = get_time_tracker_tenant_id() diff --git a/cosmosdb_emulator/time_tracker_cli/main.py b/cosmosdb_emulator/time_tracker_cli/main.py new file mode 100644 index 00000000..e8e1defe --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/main.py @@ -0,0 +1,90 @@ +import sys + +project_source = '/usr/src/app' +sys.path.append(project_source) + +import click +from pyfiglet import Figlet + +from cosmosdb_emulator.time_tracker_cli.strategies.management_strategy import ( + ManagementStrategy, +) +from cosmosdb_emulator.time_tracker_cli.strategies.activity_management_strategy import ( + ActivityManagementStrategy, +) +from cosmosdb_emulator.time_tracker_cli.strategies.time_entry_management_strategy import ( + TimeEntryManagementStrategy, +) +from cosmosdb_emulator.time_tracker_cli.strategies.project_management_strategy import ( + ProjectManagementStrategy, +) +from cosmosdb_emulator.time_tracker_cli.strategies.customer_management_strategy import ( + CustomerManagementStrategy, +) +from cosmosdb_emulator.time_tracker_cli.strategies.project_type_management_strategy import ( + ProjectTypeManagementStrategy, +) +from cosmosdb_emulator.time_tracker_cli.strategies.management_context import ( + ManagementContext, +) +from cosmosdb_emulator.time_tracker_cli.enums.entites import ( + TimeTrackerEntities, +) +from cosmosdb_emulator.time_tracker_cli.questions.common import ( + ask_entity, + time_tracker_entities, + ask_action, + entities_actions, +) +from cosmosdb_emulator.time_tracker_cli.data_target.cosmos import ( + CosmosDataTarget, +) + + +@click.command() +@click.option( + '--action', + '-a', + type=click.Choice(entities_actions, case_sensitive=True), + help='Action to be implemented in the entities.', +) +@click.option( + '--entity', + '-e', + type=click.Choice(time_tracker_entities, case_sensitive=True), + help='Entity to which the action is to be applied', +) +def main(action: str, entity: str): + time_tracker_cli_header = Figlet(font='slant').renderText( + 'Time Tracker CLI' + ) + print(time_tracker_cli_header) + + selected_action = action if action else ask_action() + selected_entity = entity if entity else ask_entity(action=selected_action) + + management_strategy = get_strategy_by_selected_entity(selected_entity) + data_target = CosmosDataTarget() + management_context = ManagementContext(management_strategy, data_target) + + if selected_action == 'Delete': + management_context.delete_data() + sys.exit() + + management_context.create_data() + + +def get_strategy_by_selected_entity(selected_entity) -> ManagementStrategy: + strategies = { + TimeTrackerEntities.TIME_ENTRY.value: TimeEntryManagementStrategy(), + TimeTrackerEntities.PROJECT.value: ProjectManagementStrategy(), + TimeTrackerEntities.ACTIVITY.value: ActivityManagementStrategy(), + TimeTrackerEntities.CUSTOMER.value: CustomerManagementStrategy(), + TimeTrackerEntities.PROJECT_TYPE.value: ProjectTypeManagementStrategy(), + } + + return strategies.get(selected_entity) + + +if __name__ == '__main__': + main() diff --git a/cosmosdb_emulator/time_tracker_cli/providers/common.py b/cosmosdb_emulator/time_tracker_cli/providers/common.py new file mode 100644 index 00000000..c5ec3e24 --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/providers/common.py @@ -0,0 +1,7 @@ +from faker.providers import BaseProvider + + +class CommonProvider(BaseProvider): + def status(self) -> str: + available_status = ['active', 'inactive'] + return self.random_element(elements=available_status) diff --git a/cosmosdb_emulator/time_tracker_cli/questions/common.py b/cosmosdb_emulator/time_tracker_cli/questions/common.py new file mode 100644 index 00000000..8a222afc --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/questions/common.py @@ -0,0 +1,69 @@ +from PyInquirer import prompt + +from cosmosdb_emulator.time_tracker_cli.enums.entites import ( + TimeTrackerEntities, +) +from cosmosdb_emulator.time_tracker_cli.utils.common import ( + stop_execution_if_user_input_is_invalid, +) + + +time_tracker_entities = [entity.value for entity in TimeTrackerEntities] + +entities_actions = ['Create', 'Delete'] + + +def ask_entity(action: str): + question_key = 'entity' + + select_entity_question = { + 'type': 'list', + 'name': question_key, + 'message': f'Perfect, please provide the entity that you want to {action.lower()}:', + 'choices': time_tracker_entities, + } + + selected_entity_answer = prompt(select_entity_question) + selected_entity = selected_entity_answer.get(question_key) + stop_execution_if_user_input_is_invalid(selected_entity) + + return selected_entity + + +def ask_action(): + question_key = 'action' + + select_action_question = { + 'type': 'list', + 'name': question_key, + 'message': 'Hello TT Coder, what action do you want to generate on the entities?', + 'choices': entities_actions, + } + + selected_action_answer = prompt(select_action_question) + selected_action = selected_action_answer.get(question_key) + stop_execution_if_user_input_is_invalid(selected_action) + + return selected_action + + +def ask_delete_confirmation(entities_to_eliminate: set) -> bool: + question_key = 'delete_confirmation' + + join_element = ', ' + entities = join_element.join(entities_to_eliminate) + + message = f'Are you sure to delete these ({entities}) entities' + + delete_confirmation_question = { + 'type': 'confirm', + 'name': question_key, + 'message': message, + 'default': True, + } + + delete_confirmation_answer = prompt(delete_confirmation_question) + is_user_agree_to_delete = delete_confirmation_answer.get(question_key) + stop_execution_if_user_input_is_invalid(is_user_agree_to_delete) + + return is_user_agree_to_delete diff --git a/cosmosdb_emulator/time_tracker_cli/questions/entries.py b/cosmosdb_emulator/time_tracker_cli/questions/entries.py new file mode 100644 index 00000000..2a8078b7 --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/questions/entries.py @@ -0,0 +1,108 @@ +from PyInquirer import Separator, prompt + +from cosmosdb_emulator.time_tracker_cli.utils.common import ( + stop_execution_if_user_input_is_invalid, +) +from cosmosdb_emulator.time_tracker_cli.validators.max_amount import ( + MaxAmountValidator, +) +from cosmosdb_emulator.time_tracker_cli.validators.number import ( + NumberValidator, +) +from cosmosdb_emulator.time_tracker_cli.validators.uuid import UUIDValidator + + +def ask_delete_entries(): + question_key = 'delete' + delete_entries_question = { + 'type': 'confirm', + 'name': question_key, + 'message': ( + 'We are going to delete all entries that is currently in the emulator, are you sure to continue?' + ), + 'default': True, + } + + delete_data_answer = prompt(delete_entries_question) + user_agree_to_delete_data = delete_data_answer.get(question_key) + stop_execution_if_user_input_is_invalid(user_agree_to_delete_data) + return user_agree_to_delete_data + + +def ask_entry_type(): + question_key = 'entry_type' + entry_type_question = { + 'type': 'list', + 'name': question_key, + 'message': 'What type of entry do you want to generate?', + 'choices': [ + Separator('<=== AVAILABLE ENTRY TYPES ====>'), + {'name': 'Own entries (Time Entries Page)', 'value': 'OE'}, + {'name': 'General entries (Reports Page)', 'value': 'GE'}, + ], + } + entry_type_answer = prompt(entry_type_question) + entry_type = entry_type_answer.get('entry_type') + stop_execution_if_user_input_is_invalid(entry_type) + return entry_type + + +def ask_entries_amount(entries_type: str): + question_key = 'entries_amount' + message_for_own_entries = 'Enter the amount of entries that you need:' + message_for_general_entries = ( + 'Enter the amount of entries per user that you need:' + ) + own_entries_id = 'OE' + + entries_amount_message = ( + message_for_own_entries + if entries_type == own_entries_id + else message_for_general_entries + ) + + entries_amount_question = { + 'type': 'input', + 'name': question_key, + 'message': entries_amount_message, + 'validate': NumberValidator, + } + + entries_amount_answer = prompt(entries_amount_question).get(question_key) + stop_execution_if_user_input_is_invalid(entries_amount_answer) + entries_amount = int(entries_amount_answer) + return entries_amount + + +def ask_user_identifier() -> str: + question_key = 'user_id' + user_identifier_question = { + 'type': 'input', + 'name': question_key, + 'message': 'Please your identifier:', + 'validate': UUIDValidator, + } + user_identifier_answer = prompt(user_identifier_question) + user_identifier = user_identifier_answer.get(question_key) + stop_execution_if_user_input_is_invalid(user_identifier) + return user_identifier + + +def ask_entries_owners_amount(users_amount: int) -> int: + question_key = 'entries_owners_amount' + entries_owners_amount_question = { + 'type': 'input', + 'name': question_key, + 'message': 'Enter the number of users to be assigned entries:', + } + + max_amount_validator = MaxAmountValidator( + max_amount=users_amount, + error_message='We do not have that amount of users, do not be smart!', + ) + + entries_owners_amount_answer = prompt( + entries_owners_amount_question, validator=max_amount_validator + ).get(question_key) + stop_execution_if_user_input_is_invalid(entries_owners_amount_answer) + return int(entries_owners_amount_answer) diff --git a/cosmosdb_emulator/time_tracker_cli/strategies/activity_management_strategy.py b/cosmosdb_emulator/time_tracker_cli/strategies/activity_management_strategy.py new file mode 100644 index 00000000..4e937040 --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/strategies/activity_management_strategy.py @@ -0,0 +1,36 @@ +import sys + +from cosmosdb_emulator.time_tracker_cli.enums.entites import ( + TimeTrackerEntities, +) +from cosmosdb_emulator.time_tracker_cli.questions.common import ( + ask_delete_confirmation, +) +from cosmosdb_emulator.time_tracker_cli.strategies.management_strategy import ( + ManagementStrategy, +) + + +class ActivityManagementStrategy(ManagementStrategy): + + _conflict_entities: set = { + TimeTrackerEntities.TIME_ENTRY.value, + TimeTrackerEntities.ACTIVITY.value, + } + + def get_confirmation_to_delete_data(self) -> bool: + is_user_agree_to_delete_activities_data = ask_delete_confirmation( + self.get_conflict_entities() + ) + return is_user_agree_to_delete_activities_data + + def get_answers_needed_to_create_data(self) -> dict: + print('This functionality has not yet been implemented') + sys.exit() + + def generate_entities(self, entity_information: dict) -> dict: + print('This functionality has not yet been implemented') + sys.exit() + + def get_conflict_entities(self) -> set: + return self._conflict_entities diff --git a/cosmosdb_emulator/time_tracker_cli/strategies/customer_management_strategy.py b/cosmosdb_emulator/time_tracker_cli/strategies/customer_management_strategy.py new file mode 100644 index 00000000..38574dde --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/strategies/customer_management_strategy.py @@ -0,0 +1,38 @@ +import sys + +from cosmosdb_emulator.time_tracker_cli.enums.entites import ( + TimeTrackerEntities, +) +from cosmosdb_emulator.time_tracker_cli.questions.common import ( + ask_delete_confirmation, +) +from cosmosdb_emulator.time_tracker_cli.strategies.management_strategy import ( + ManagementStrategy, +) + + +class CustomerManagementStrategy(ManagementStrategy): + + _conflict_entities: set = { + TimeTrackerEntities.CUSTOMER.value, + TimeTrackerEntities.PROJECT.value, + TimeTrackerEntities.PROJECT_TYPE.value, + TimeTrackerEntities.TIME_ENTRY.value, + } + + def get_confirmation_to_delete_data(self) -> bool: + is_user_agree_to_delete_customers_data = ask_delete_confirmation( + self.get_conflict_entities() + ) + return is_user_agree_to_delete_customers_data + + def get_answers_needed_to_create_data(self) -> dict: + print('This functionality has not yet been implemented') + sys.exit() + + def generate_entities(self, entity_information: dict) -> dict: + print('This functionality has not yet been implemented') + sys.exit() + + def get_conflict_entities(self) -> set: + return self._conflict_entities diff --git a/cosmosdb_emulator/time_tracker_cli/strategies/management_context.py b/cosmosdb_emulator/time_tracker_cli/strategies/management_context.py new file mode 100644 index 00000000..ac2e5a29 --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/strategies/management_context.py @@ -0,0 +1,46 @@ +from cosmosdb_emulator.time_tracker_cli.data_target.data_target import ( + DataTarget, +) +from cosmosdb_emulator.time_tracker_cli.strategies.management_strategy import ( + ManagementStrategy, +) + + +class ManagementContext: + def __init__( + self, strategy: ManagementStrategy, data_target: DataTarget + ) -> None: + self._strategy = strategy + self._data_target = data_target + + @property + def strategy(self) -> ManagementStrategy: + return self._strategy + + @strategy.setter + def strategy(self, strategy: ManagementStrategy) -> None: + self._strategy = strategy + + def create_data(self): + user_answers = self._strategy.get_answers_needed_to_create_data() + entities = self._strategy.generate_entities(user_answers) + conflict_entities = self._strategy.get_conflict_entities() + print( + 'We are trying to create all the requested information, so please wait and be patient!' + ) + print('Creating the data...') + self._data_target.delete(conflict_entities) + self._data_target.save(entities) + print('Great Job! The needed data was created!') + + def delete_data(self): + is_user_agree_to_delete_data = ( + self._strategy.get_confirmation_to_delete_data() + ) + if is_user_agree_to_delete_data: + conflict_entities = self._strategy.get_conflict_entities() + print( + 'We are trying to delete all the requested information, hope you do not regret it later' + ) + self._data_target.delete(conflict_entities) + print('The requested entity and related entities were eliminated.') diff --git a/cosmosdb_emulator/time_tracker_cli/strategies/management_strategy.py b/cosmosdb_emulator/time_tracker_cli/strategies/management_strategy.py new file mode 100644 index 00000000..c911f033 --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/strategies/management_strategy.py @@ -0,0 +1,34 @@ +from abc import abstractmethod, ABC + + +class ManagementStrategy(ABC): + @abstractmethod + def get_confirmation_to_delete_data(self) -> bool: + """ + Ask the user if he/she agrees to delete the information + :return: True if user agrees to remove the information else False + """ + pass + + @abstractmethod + def get_answers_needed_to_create_data(self) -> dict: + """ + Ask the user all information needed to create a specific entity. + :return: a dict with all information needed to generate the entities + """ + pass + + @abstractmethod + def generate_entities(self, entity_information: dict) -> dict: + """ + Create all the entities related with a specific strategy. + """ + pass + + @abstractmethod + def get_conflict_entities(self) -> set: + """ + Returns all the entities that generate conflict with a specific entity + at the moment of generating the information + """ + pass diff --git a/cosmosdb_emulator/time_tracker_cli/strategies/project_management_strategy.py b/cosmosdb_emulator/time_tracker_cli/strategies/project_management_strategy.py new file mode 100644 index 00000000..bbc7ff83 --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/strategies/project_management_strategy.py @@ -0,0 +1,35 @@ +import sys + +from cosmosdb_emulator.time_tracker_cli.enums.entites import ( + TimeTrackerEntities, +) +from cosmosdb_emulator.time_tracker_cli.questions.common import ( + ask_delete_confirmation, +) +from cosmosdb_emulator.time_tracker_cli.strategies.management_strategy import ( + ManagementStrategy, +) + + +class ProjectManagementStrategy(ManagementStrategy): + _conflict_entities: set = { + TimeTrackerEntities.TIME_ENTRY.value, + TimeTrackerEntities.PROJECT.value, + } + + def get_confirmation_to_delete_data(self) -> bool: + is_user_agree_to_delete_projects_data = ask_delete_confirmation( + self.get_conflict_entities() + ) + return is_user_agree_to_delete_projects_data + + def get_conflict_entities(self) -> set: + return self._conflict_entities + + def generate_entities(self, entity_information: dict) -> dict: + print('This functionality has not yet been implemented') + sys.exit() + + def get_answers_needed_to_create_data(self) -> dict: + print('This functionality has not yet been implemented') + sys.exit() diff --git a/cosmosdb_emulator/time_tracker_cli/strategies/project_type_management_strategy.py b/cosmosdb_emulator/time_tracker_cli/strategies/project_type_management_strategy.py new file mode 100644 index 00000000..cf0e2b89 --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/strategies/project_type_management_strategy.py @@ -0,0 +1,37 @@ +import sys + +from cosmosdb_emulator.time_tracker_cli.enums.entites import ( + TimeTrackerEntities, +) +from cosmosdb_emulator.time_tracker_cli.questions.common import ( + ask_delete_confirmation, +) +from cosmosdb_emulator.time_tracker_cli.strategies.management_strategy import ( + ManagementStrategy, +) + + +class ProjectTypeManagementStrategy(ManagementStrategy): + + _conflict_entities: set = { + TimeTrackerEntities.PROJECT_TYPE.value, + TimeTrackerEntities.PROJECT.value, + TimeTrackerEntities.TIME_ENTRY.value, + } + + def get_confirmation_to_delete_data(self) -> bool: + is_user_agree_to_delete_project_types_data = ask_delete_confirmation( + self.get_conflict_entities() + ) + return is_user_agree_to_delete_project_types_data + + def get_answers_needed_to_create_data(self) -> dict: + print('This functionality has not yet been implemented') + sys.exit() + + def generate_entities(self, entity_information: dict) -> dict: + print('This functionality has not yet been implemented') + sys.exit() + + def get_conflict_entities(self) -> set: + return self._conflict_entities diff --git a/cosmosdb_emulator/time_tracker_cli/strategies/time_entry_management_strategy.py b/cosmosdb_emulator/time_tracker_cli/strategies/time_entry_management_strategy.py new file mode 100644 index 00000000..fe489ba0 --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/strategies/time_entry_management_strategy.py @@ -0,0 +1,103 @@ +import sys + +from cosmosdb_emulator.time_tracker_cli.enums.entites import ( + TimeTrackerEntities, +) +from cosmosdb_emulator.time_tracker_cli.questions.common import ( + ask_delete_confirmation, +) +from cosmosdb_emulator.time_tracker_cli.strategies.management_strategy import ( + ManagementStrategy, +) + +from cosmosdb_emulator.time_tracker_cli.questions.entries import ( + ask_delete_entries, + ask_entry_type, + ask_user_identifier, + ask_entries_owners_amount, + ask_entries_amount, +) +from cosmosdb_emulator.time_tracker_cli.utils.common import ( + get_unique_elements_from_list, +) + +from cosmosdb_emulator.time_tracker_cli.utils.time_entry import ( + get_related_information_for_entries, + generate_entries_per_user, + get_time_tracker_users_ids, +) + + +class TimeEntryManagementStrategy(ManagementStrategy): + + _conflict_entities: set = { + TimeTrackerEntities.TIME_ENTRY.value, + } + + def get_answers_needed_to_create_data(self) -> dict: + user_agree_to_delete_entries = ask_delete_entries() + + if not user_agree_to_delete_entries: + print('Thanks for coming! See you later') + sys.exit() + + entries_type = ask_entry_type() + entry_owners = [] + own_entries_type_id = 'OE' + + if entries_type == own_entries_type_id: + user_identifier = ask_user_identifier() + entry_owners.append(user_identifier) + else: + print('Be patient, we are loading important information...') + users_ids = get_time_tracker_users_ids() + users_amount = len(users_ids) + print(f'Currently in Time Tracker we are {users_amount} users') + entries_owners_amount = ask_entries_owners_amount(users_amount) + entry_owners = get_unique_elements_from_list( + elements_list=users_ids, + amount_of_elements=entries_owners_amount, + ) + + entries_amount = ask_entries_amount(entries_type) + + return {'entries_amount': entries_amount, 'entry_owners': entry_owners} + + def get_confirmation_to_delete_data(self) -> bool: + is_user_agree_to_delete_entries_data = ask_delete_confirmation( + self.get_conflict_entities() + ) + return is_user_agree_to_delete_entries_data + + def generate_entities(self, entity_information: dict) -> dict: + entries = [] + + entries_related_information = get_related_information_for_entries() + projects = entries_related_information.get( + TimeTrackerEntities.PROJECT.value + ) + activities = entries_related_information.get( + TimeTrackerEntities.ACTIVITY.value + ) + + entries_amount = entity_information.get('entries_amount') + entry_owners_ids = entity_information.get('entry_owners') + daily_entries_amount = 5 + + for owner_id in entry_owners_ids: + user_entries = generate_entries_per_user( + daily_entries_amount=daily_entries_amount, + entries_amount=entries_amount, + owner_id=owner_id, + projects=projects, + activities=activities, + ) + entries.extend(user_entries) + + entities = entries_related_information + entities[TimeTrackerEntities.TIME_ENTRY.value] = entries + + return entities + + def get_conflict_entities(self) -> set: + return self._conflict_entities diff --git a/cosmosdb_emulator/time_tracker_cli/utils/activity.py b/cosmosdb_emulator/time_tracker_cli/utils/activity.py new file mode 100644 index 00000000..a6832c41 --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/utils/activity.py @@ -0,0 +1,25 @@ +from typing import List + +from cosmosdb_emulator.time_tracker_cli.factories.activity_factory import ( + ActivityFactory, +) + + +def get_activity_json(activity_factory: ActivityFactory) -> dict: + activity = { + 'id': activity_factory.id, + 'name': activity_factory.name, + 'description': activity_factory.description, + 'tenant_id': activity_factory.tenant_id, + 'status': activity_factory.status, + } + + return activity + + +def get_activities(activities_amount: int) -> List[ActivityFactory]: + activities = [] + for index in range(activities_amount): + activity = ActivityFactory() + activities.append(activity) + return activities diff --git a/cosmosdb_emulator/time_tracker_cli/utils/common.py b/cosmosdb_emulator/time_tracker_cli/utils/common.py new file mode 100644 index 00000000..d1534824 --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/utils/common.py @@ -0,0 +1,31 @@ +import sys +from typing import List + +from faker import Faker + + +def get_time_tracker_tenant_id() -> str: + """ + This tenant id is necessary for all factories, use this value in + the field tenant_id of all factories + """ + time_tracker_tenant_id = 'cc925a5d-9644-4a4f-8d99-0bee49aadd05' + return time_tracker_tenant_id + + +def stop_execution_if_user_input_is_invalid(user_input: str): + if user_input is None: + print('Thanks for coming, see you later!') + sys.exit() + + +def get_unique_elements_from_list(elements_list, amount_of_elements) -> List: + entry_owners = Faker().random_elements( + elements=elements_list, length=amount_of_elements, unique=True + ) + return entry_owners + + +def get_random_element_from_list(elements_list): + random_element = Faker().random_element(elements=elements_list) + return random_element diff --git a/cosmosdb_emulator/time_tracker_cli/utils/customer.py b/cosmosdb_emulator/time_tracker_cli/utils/customer.py new file mode 100644 index 00000000..164d367b --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/utils/customer.py @@ -0,0 +1,20 @@ +from typing import List + +from cosmosdb_emulator.time_tracker_cli.factories.customer_factory import ( + CustomerFactory, +) + + +def get_customers(customer_amount: int) -> List[CustomerFactory]: + customers = CustomerFactory.create_batch(customer_amount) + return customers + + +def get_customer_json(customer_factory: CustomerFactory) -> dict: + customer = { + 'id': customer_factory.id, + 'name': customer_factory.name, + 'description': customer_factory.description, + 'tenant_id': customer_factory.tenant_id, + } + return customer diff --git a/cosmosdb_emulator/time_tracker_cli/utils/project.py b/cosmosdb_emulator/time_tracker_cli/utils/project.py new file mode 100644 index 00000000..c082d29a --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/utils/project.py @@ -0,0 +1,36 @@ +from typing import List + +from cosmosdb_emulator.time_tracker_cli.factories.project_factory import ( + ProjectFactory, +) +from cosmosdb_emulator.time_tracker_cli.factories.project_type_factory import ( + ProjectTypeFactory, +) + + +def get_projects( + projects_per_project_type: int, project_types: List[ProjectTypeFactory] +) -> List[ProjectFactory]: + projects = [] + + for project_type in project_types: + for index in range(projects_per_project_type): + project = ProjectFactory( + project_type_id=project_type.id, + customer_id=project_type.customer_id, + ) + projects.append(project) + + return projects + + +def get_project_json(project_factory: ProjectFactory) -> dict: + project = { + 'id': project_factory.id, + 'name': project_factory.name, + 'description': project_factory.description, + 'customer_id': project_factory.customer_id, + 'project_type_id': project_factory.project_type_id, + 'tenant_id': project_factory.tenant_id, + } + return project diff --git a/cosmosdb_emulator/time_tracker_cli/utils/project_type.py b/cosmosdb_emulator/time_tracker_cli/utils/project_type.py new file mode 100644 index 00000000..104b3044 --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/utils/project_type.py @@ -0,0 +1,33 @@ +from typing import List + +from cosmosdb_emulator.time_tracker_cli.factories.customer_factory import ( + CustomerFactory, +) +from cosmosdb_emulator.time_tracker_cli.factories.project_type_factory import ( + ProjectTypeFactory, +) + + +def get_project_types( + project_types_per_customer: int, customers: List[CustomerFactory] +) -> List[ProjectTypeFactory]: + project_types = [] + + for customer in customers: + for index in range(project_types_per_customer): + customer_id = customer.id + project_type = ProjectTypeFactory(customer_id=customer_id) + project_types.append(project_type) + + return project_types + + +def get_project_type_json(project_type_factory: ProjectTypeFactory) -> dict: + project_type = { + 'id': project_type_factory.id, + 'name': project_type_factory.name, + 'description': project_type_factory.description, + 'customer_id': project_type_factory.customer_id, + 'tenant_id': project_type_factory.tenant_id, + } + return project_type diff --git a/cosmosdb_emulator/time_tracker_cli/utils/time_entry.py b/cosmosdb_emulator/time_tracker_cli/utils/time_entry.py new file mode 100644 index 00000000..9689c763 --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/utils/time_entry.py @@ -0,0 +1,147 @@ +import math +from datetime import datetime, timedelta +from typing import List + +from cosmosdb_emulator.time_tracker_cli.enums.entites import ( + TimeTrackerEntities, +) +from cosmosdb_emulator.time_tracker_cli.factories.activity_factory import ( + ActivityFactory, +) +from cosmosdb_emulator.time_tracker_cli.factories.project_factory import ( + ProjectFactory, +) +from cosmosdb_emulator.time_tracker_cli.factories.time_entry_factory import ( + TimeEntryFactory, +) +from cosmosdb_emulator.time_tracker_cli.utils.activity import get_activities +from cosmosdb_emulator.time_tracker_cli.utils.common import ( + get_random_element_from_list, +) +from cosmosdb_emulator.time_tracker_cli.utils.customer import get_customers +from cosmosdb_emulator.time_tracker_cli.utils.project import get_projects +from cosmosdb_emulator.time_tracker_cli.utils.project_type import ( + get_project_types, +) +from utils.azure_users import AzureConnection + +""" +Note that the time zone in the DB is different from how it is handled in the UI. +For example 13:00 in the DB in the UI it will be 08:00 +""" +base_hour = 13 +base_minute = 0 + + +def get_time_tracker_users_ids() -> List[str]: + time_tracker_users = AzureConnection().users() + users_ids = [user.id for user in time_tracker_users] + return users_ids + + +def get_reference_datetime( + entries_amount: int, daily_entries_amount: int +) -> datetime: + amount_of_days = math.ceil(entries_amount / daily_entries_amount) + base_datetime = datetime.today() - timedelta(days=amount_of_days) + base_datetime = base_datetime.replace(hour=base_hour, minute=base_minute) + return base_datetime + + +def get_next_day_from_datetime(reference_date: datetime) -> datetime: + next_day = reference_date + timedelta(days=1) + next_day = next_day.replace(hour=base_hour, minute=base_minute) + return next_day + + +def get_time_entry_duration() -> int: + time_entries_duration = [30, 40, 50, 60, 70, 80, 90, 100, 120] + time_entry_duration = get_random_element_from_list(time_entries_duration) + return time_entry_duration + + +def get_time_entry_end_date(entry_start_date) -> datetime: + entry_duration = get_time_entry_duration() + entry_end_date = entry_start_date + timedelta(minutes=entry_duration) + return entry_end_date + + +def get_related_information_for_entries() -> dict: + customers = get_customers(customer_amount=10) + project_types = get_project_types( + project_types_per_customer=2, customers=customers + ) + projects = get_projects( + projects_per_project_type=1, project_types=project_types + ) + + activities = get_activities(activities_amount=20) + + related_information = { + TimeTrackerEntities.CUSTOMER.value: customers, + TimeTrackerEntities.PROJECT_TYPE.value: project_types, + TimeTrackerEntities.PROJECT.value: projects, + TimeTrackerEntities.ACTIVITY.value: activities, + } + + return related_information + + +def generate_entries_per_user( + daily_entries_amount: int, + entries_amount: int, + owner_id: str, + projects: List[ProjectFactory], + activities: List[ActivityFactory], +) -> List[TimeEntryFactory]: + + utc_format = '%Y-%m-%dT%H:%M:%SZ' + entries_per_user = [] + daily_entries = 0 + projects_ids = [project.id for project in projects] + activities_ids = [activity.id for activity in activities] + + reference_datetime = get_reference_datetime( + entries_amount=entries_amount, + daily_entries_amount=daily_entries_amount, + ) + + for index in range(entries_amount): + project_id = get_random_element_from_list(projects_ids) + activity_id = get_random_element_from_list(activities_ids) + start_date = reference_datetime + timedelta(minutes=1) + + if daily_entries == daily_entries_amount: + start_date = get_next_day_from_datetime(reference_date=start_date) + daily_entries = 0 + + end_date = get_time_entry_end_date(start_date) + + current_entry = TimeEntryFactory( + start_date=start_date.strftime(utc_format), + end_date=end_date.strftime(utc_format), + owner_id=owner_id, + project_id=project_id, + activity_id=activity_id, + ) + + entries_per_user.append(current_entry) + daily_entries += 1 + reference_datetime = end_date + + return entries_per_user + + +def get_entry_json(time_entry_factory: TimeEntryFactory) -> dict: + time_entry = { + 'project_id': time_entry_factory.project_id, + 'activity_id': time_entry_factory.activity_id, + 'technologies': time_entry_factory.technologies, + 'description': time_entry_factory.description, + 'start_date': time_entry_factory.start_date, + 'owner_id': time_entry_factory.owner_id, + 'id': time_entry_factory.id, + 'tenant_id': time_entry_factory.tenant_id, + 'end_date': time_entry_factory.end_date, + } + return time_entry diff --git a/cosmosdb_emulator/time_tracker_cli/validators/max_amount.py b/cosmosdb_emulator/time_tracker_cli/validators/max_amount.py new file mode 100644 index 00000000..68f106a7 --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/validators/max_amount.py @@ -0,0 +1,21 @@ +from prompt_toolkit.validation import ValidationError + +from cosmosdb_emulator.time_tracker_cli.validators.number import ( + NumberValidator, +) + + +class MaxAmountValidator(NumberValidator): + def __init__(self, max_amount, error_message): + self.max_amount = max_amount + self.error_message = error_message + + def validate(self, document): + super().validate(document) + + entered_value = int(document.text) + + if entered_value > self.max_amount: + raise ValidationError( + message=self.error_message, cursor_position=len(document.text) + ) diff --git a/cosmosdb_emulator/time_tracker_cli/validators/number.py b/cosmosdb_emulator/time_tracker_cli/validators/number.py new file mode 100644 index 00000000..6bf34589 --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/validators/number.py @@ -0,0 +1,21 @@ +from prompt_toolkit.validation import Validator, ValidationError + + +class NumberValidator(Validator): + def validate(self, document): + value_entered = document.text + is_number = value_entered.isnumeric() + + if not is_number: + raise ValidationError( + message='Please provide only a numeric value', + cursor_position=len(value_entered), + ) + + entered_number = int(value_entered) + + if entered_number < 1: + raise ValidationError( + message='Please provide numbers greater than 0', + cursor_position=len(value_entered), + ) diff --git a/cosmosdb_emulator/time_tracker_cli/validators/uuid.py b/cosmosdb_emulator/time_tracker_cli/validators/uuid.py new file mode 100644 index 00000000..27835bfa --- /dev/null +++ b/cosmosdb_emulator/time_tracker_cli/validators/uuid.py @@ -0,0 +1,15 @@ +import uuid + +from prompt_toolkit.validation import Validator, ValidationError + + +class UUIDValidator(Validator): + def validate(self, document): + value_entered = document.text + try: + uuid.UUID(value_entered, version=4) + except ValueError: + raise ValidationError( + message='Please provide a valid UUID', + cursor_position=len(value_entered), + ) diff --git a/cosmosdb_emulator/verify_environment.sh b/cosmosdb_emulator/verify_environment.sh new file mode 100644 index 00000000..8c0104d6 --- /dev/null +++ b/cosmosdb_emulator/verify_environment.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +echo "We are checking the development environment..." + +DATABASE_EMULATOR_KEY="C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==" +DATABASE_ENV_KEY=$DATABASE_MASTER_KEY + +if [ "$DATABASE_EMULATOR_KEY" != "$DATABASE_ENV_KEY" ]; then + echo "You are trying to run this CLI in a non-development environment. We can not proceed with this action" + exit 0 +fi + +echo "GREAT! You are on development environment" \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 5c9f7349..a7ae7ce5 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -12,39 +12,27 @@ services: - .env volumes: - .:/usr/src/app - # depends_on: - # - cosmosdb + depends_on: + - cosmosdb entrypoint: - /bin/sh - - ./cosmosdb-emulator/entrypoint.sh - # networks: - # services_net: - # ipv4_address: 172.20.0.77 + - ./cosmosdb_emulator/entrypoint.sh cosmosdb: container_name: "azurecosmosemulator" hostname: "azurecosmosemulator" image: 'mcr.microsoft.com/cosmosdb/linux/azure-cosmos-emulator' tty: true + cpu_count: 2 + restart: always mem_limit: 3GB ports: - - '8081:8081' - - '10251:10251' - - '10252:10252' - - '10253:10253' - - '10254:10254' + - '8081:8081' + - '10251:10251' + - '10252:10252' + - '10253:10253' + - '10254:10254' environment: - - AZURE_COSMOS_EMULATOR_PARTITION_COUNT=7 - - AZURE_COSMOS_EMULATOR_ARGS=/alternativenames=azurecosmosemulator - - # networks: - # services_net: - # ipv4_address: 172.20.0.78 - - -# networks: -# services_net: -# ipam: -# driver: default -# config: -# - subnet: 172.20.0.0/16 \ No newline at end of file + - AZURE_COSMOS_EMULATOR_PARTITION_COUNT=7 + - AZURE_COSMOS_EMULATOR_ENABLE_DATA_PERSISTENCE=true + - AZURE_COSMOS_EMULATOR_ARGS=/alternativenames=azurecosmosemulator \ No newline at end of file diff --git a/requirements/time_tracker_api/dev.txt b/requirements/time_tracker_api/dev.txt index c85a2bbc..6d8a1599 100644 --- a/requirements/time_tracker_api/dev.txt +++ b/requirements/time_tracker_api/dev.txt @@ -16,3 +16,8 @@ coverage==4.5.1 # Git hooks pre-commit==2.2.0 + +# CLI tools +PyInquirer==1.0.3 +pyfiglet==0.7 +factory_boy==3.2.0 \ No newline at end of file From bb7d711c926c6217619d210cbeb3d89eca8f40eb Mon Sep 17 00:00:00 2001 From: semantic-release Date: Mon, 26 Jul 2021 21:20:18 +0000 Subject: [PATCH 04/74] 0.37.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 4 ++++ time_tracker_api/version.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 35437293..b75b8262 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ +## v0.37.0 (2021-07-26) +### Feature +* TT-293 Create Script to generate data in the Database ([#310](https://github.com/ioet/time-tracker-backend/issues/310)) ([`50f8d46`](https://github.com/ioet/time-tracker-backend/commit/50f8d468d77835a6f90b958d4642d338f36d5f37)) + ## v0.36.4 (2021-07-16) ### Fix * TT-288 Fix and improve tests on find_interception_with_date_range function ([#308](https://github.com/ioet/time-tracker-backend/issues/308)) ([`00ed5ae`](https://github.com/ioet/time-tracker-backend/commit/00ed5ae9afc1b3d5c9f60c25007132a7f737742d)) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index c1935a24..8935b5b5 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.36.4' +__version__ = '0.37.0' From f7aba96802a629d2829fc09606c67a07364c3016 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean=20Carlos=20Alarc=C3=B3n?= <56373098+jcalarcon98@users.noreply.github.com> Date: Wed, 4 Aug 2021 11:53:29 -0500 Subject: [PATCH 05/74] fix: TT-302 Fix URLLIB3 dependencies vulnerabilities (#313) --- requirements/azure_cosmos.txt | 2 +- requirements/commons.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/azure_cosmos.txt b/requirements/azure_cosmos.txt index f4d95df0..62ae1c17 100644 --- a/requirements/azure_cosmos.txt +++ b/requirements/azure_cosmos.txt @@ -9,7 +9,7 @@ certifi==2019.11.28 chardet==3.0.4 idna==2.8 six==1.13.0 -urllib3==1.25.8 +urllib3==1.26.5 virtualenv==16.7.9 virtualenv-clone==0.5.3 diff --git a/requirements/commons.txt b/requirements/commons.txt index 9b5d811c..aef1f707 100644 --- a/requirements/commons.txt +++ b/requirements/commons.txt @@ -3,7 +3,7 @@ # For Common dependencies # Handling requests -requests==2.23.0 +requests==2.25.1 # To create sample content in tests and API documentation Faker==4.0.2 From 58dbc1588576d1603162e5d29780b315f5f784a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean=20Carlos=20Alarc=C3=B3n?= <56373098+jcalarcon98@users.noreply.github.com> Date: Thu, 5 Aug 2021 21:23:07 -0500 Subject: [PATCH 06/74] docs: TT-301 Update readme documentation and add Time Tracker CLI docs (#314) * docs: TT-301 Update readme documentation and add Time Tracker CLI docs * docs: TT-301 Implement suggestions made in the PR --- README.md | 454 +++++++++++++++----------- cosmosdb_emulator/README.md | 90 +++++ requirements/time_tracker_api/dev.txt | 3 - time-tracker.sh | 11 + 4 files changed, 368 insertions(+), 190 deletions(-) create mode 100644 cosmosdb_emulator/README.md create mode 100644 time-tracker.sh diff --git a/README.md b/README.md index 63029102..25bbe017 100644 --- a/README.md +++ b/README.md @@ -6,78 +6,202 @@ This is the mono-repository for the backend services and their common codebase ## Getting started -Follow the following instructions to get the project ready to use ASAP. +Follow the next instructions to get the project ready to use ASAP. -### Requirements +Currently, there are two ways to run the project, the production mode using a virtual environment and installing all the necessary libraries +there and the other way is using the development mode with Docker and docker-compose. It is recommended to use the development mode and in special cases the production mode. -Be sure you have installed in your system +## Requirements: + +For both modes it is necessary to have the following requirements installed: - [Python version 3](https://www.python.org/download/releases/3.0/) (recommended 3.8 or less) in your path. It will install automatically [pip](https://pip.pypa.io/en/stable/) as well. -- A virtual environment, namely [venv](https://docs.python.org/3/library/venv.html). +- A virtual environment, namely [.venv](https://docs.python.org/3/library/venv.html). - Optionally for running Azure functions locally: [Azure functions core tool](https://docs.microsoft.com/en-us/azure/azure-functions/functions-run-local?tabs=macos%2Ccsharp%2Cbash). -### Setup +## Settings for each mode + +Before proceeding to the configuration for each of the modes, +it is important to perform the following step regardless of the mode to be used. + +### Create a virtual environment -- Create and activate the environment, +Execute the next command at the root of the project: - In Windows: +```shell +python -m venv .venv +``` + +> **Note:** We can replace python for python3 or python3.8 according to the version you have installed, +> but do not forget the initial requirements. - ``` - #Create virtual enviroment - python -m venv .venv +**Activate the environment** - #Execute virtual enviroment - .venv\Scripts\activate.bat - ``` +Windows: +```shell +.venv\Scripts\activate.bat +``` - In Unix based operative systems: +In Unix based operative systems: - ``` - #Create virtual enviroment - virtualenv .venv +```shell +source .venv/bin/activate +``` - #Execute virtual enviroment - source .venv/bin/activate - ``` +### Setup for each mode -**Note:** If you're a linux user you will need to install an additional dependency to have it working. +The configuration required for each of the modes is as follows: -Type in the terminal the following command to install the required dependency to have pyodbc working locally: +
+ Development Mode + +### Requirements: -```sh -sudo apt-get install unixodbc-dev +In addition to the initial requirements, it is necessary to have the following requirements installed: + +- Docker + + You can follow the instructions below to install on each of the following operating systems: + - [**Mac**](https://docs.docker.com/docker-for-mac/install/) + - [**Linux**](https://docs.docker.com/engine/install/) + - [**Windows**](https://docs.docker.com/docker-for-windows/install/) + +- Docker Compose + + To install Docker Compose, please choose the operating system you use and follow the steps [here](https://docs.docker.com/compose/install/). + +### Setup + +Once installed Docker and Docker Compose we must create a `.env` file in the root of our project where we will put the following environment variables. + +```shell +export MS_AUTHORITY=XXXX +export MS_CLIENT_ID=XXXX +export MS_SCOPE=XXXX +export MS_SECRET=yFo=XXXX +export MS_ENDPOINT=XXXX +export DATABASE_ACCOUNT_URI=XXXX +export DATABASE_MASTER_KEY=XXXX +export DATABASE_NAME=XXXX +export FLASK_APP=XXXX +export AZURE_APP_CONFIGURATION_CONNECTION_STRING=XXXX +export FLASK_DEBUG=XXXX +export REQUESTS_CA_BUNDLE=XXXX ``` +> **Please, contact the project development team for the content of the variables mentioned above.** + +### Run containers + +Once all the project configuration is done, we are going to execute the following command in the terminal, taking into account that we are inside the root folder of the project: + +```shell +docker-compose up --build +``` + +This command will build all images with the necessary configurations for each one, also +raises the cosmos emulator in combination with the backend, now you can open in the browser: + +- `http://127.0.0.1:5000/` open backend API. +- `https://127.0.0.1:8081/_explorer/index.html` to open Cosmos DB emulator. + +> If you have already executed the command (`docker-compose up --build`) previously in this project, +> it is not necessary to execute it again, instead it should be executed like this: +> `docker-compose up` + +> It is also important to clarify that if packages or any extra configuration is added to the image's construction, +> you need to run again `docker-compose up --build`, you can see more information about this flag [here](https://docs.docker.com/compose/reference/up/) + +### Development + +#### Generate Fake Data + +In order to generate fake data to test functionalities or correct errors, +we have built a CLI, called 'Time Tracker CLI', which is in charge of generating +the fake information inside the Cosmos emulator. + +To learn how this CLI works, you can see the instructions [here](https://github.com/ioet/time-tracker-backend/tree/master/cosmosdb_emulator) + +> It is important to clarify that Time Tracker CLI only works in development mode. + +### Test + +We are using [Pytest](https://docs.pytest.org/en/latest/index.html) for tests. The tests are located in the package +`tests` and use the [conventions for python test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery). + +> Remember to run any available test command we have to have the containers up (`docker-compose up`). + +This command run all tests: + +```shell +./time-tracker.sh pytest -v +``` + +Run a single test: + +```shell +./time-tracker.sh pytest -v -k name-test +``` + +#### Coverage + +To check the coverage of the tests execute: + +```shell +./time-tracker.sh coverage run -m pytest -v +``` + +To get a report table: + +```shell +./time-tracker.sh coverage report +``` + +To get a full report in html: + +```shell +./time-tracker.sh coverage html +``` +Then check in the [htmlcov/index.html](./htmlcov/index.html) to see it. + +If you want that previously collected coverage data is erased, you can execute: + +```shell +./time-tracker.sh coverage erase +``` + +
-- Install the requirements: +
- ``` - python3 -m pip install -r requirements//.txt - ``` +
+ Production Mode - If you use Windows, you will use this comand: +### Setup + +#### Install the requirements: + +``` +python3 -m pip install -r requirements//.txt +``` - ``` - python -m pip install -r requirements//.txt - ``` +If you use Windows, you will use this command: - Where `` is one of the executable app namespace, e.g. `time_tracker_api` or `time_tracker_events` (**Note:** Currently, only `time_tracker_api` is used.). The `stage` can be +``` +python -m pip install -r requirements//.txt +``` - - `dev`: Used for working locally - - `prod`: For anything deployed +Where `` is one of the executable app namespace, e.g. `time_tracker_api` or `time_tracker_events` (**Note:** Currently, only `time_tracker_api` is used.). The `stage` can be -Remember to do it with Python 3. +- `dev`: Used for working locally +- `prod`: For anything deployed Bear in mind that the requirements for `time_tracker_events`, must be located on its local requirements.txt, by [convention](https://docs.microsoft.com/en-us/azure/azure-functions/functions-reference-python#folder-structure). -- Run `pre-commit install -t pre-commit -t commit-msg`. For more details, see section Development > Git hooks. - ### Set environment variables -Set environment variables with the content pinned in our slack channel #time-tracker-developer: - -When you use Bash or GitBash you should use: +When you use Bash or GitBash you should create a .env file and add the next variables: ``` export MS_AUTHORITY=XXX @@ -93,7 +217,7 @@ export AZURE_APP_CONFIGURATION_CONNECTION_STRING=XXX export FLASK_DEBUG=True ``` -If you use PowerShell, you should use: +If you use PowerShell, you should create a .env.bat file and add the next variables: ``` $env:MS_AUTHORITY="XXX" @@ -109,7 +233,7 @@ $env:AZURE_APP_CONFIGURATION_CONNECTION_STRING="XXX" $env:FLASK_DEBUG="True" ``` -If you use Command Prompt, you should use: +If you use Command Prompt, you should create a .env.ps1 file and add the next variables: ``` set "MS_AUTHORITY=XXX" @@ -125,21 +249,114 @@ set "AZURE_APP_CONFIGURATION_CONNECTION_STRING=XXX" set "FLASK_DEBUG=True" ``` -**Note:** You can create .env (Bash, GitBash), .env.bat (Command Prompt), .env.ps1 (PowerShell) files with environment variables and run them in the corresponding console. - -Important: You should set the environment variables each time the application is run. +> **Important:** Ask the development team for the values of the environment variables, also +> you should set the environment variables each time the application is run. -### How to use it +### Run application - Start the app: - ``` - flask run - ``` +```shell +flask run +``` - Open `http://127.0.0.1:5000/` in a browser. You will find in the presented UI a link to the swagger.json with the definition of the api. +### Test + +We are using [Pytest](https://docs.pytest.org/en/latest/index.html) for tests. The tests are located in the package +`tests` and use the [conventions for python test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery). + +This command run all tests: + +```shell +pytest -v +``` + +> **Note:** If you get the error "No module named azure.functions", execute the command `pip install azure-functions`: + +To run a single test: + +```shell +pytest -v -k name-test +``` + +#### Coverage + +To check the coverage of the tests execute: + +```shell +coverage run -m pytest -v +``` + +To get a report table: + +```shell +coverage report +``` + +To get a full report in html: + +```shell +coverage html +``` +Then check in the [htmlcov/index.html](./htmlcov/index.html) to see it. + +If you want that previously collected coverage data is erased, you can execute: + +```shell +coverage erase +``` + +
+ +
+ +### Git hooks +We use [pre-commit](https://github.com/pre-commit/pre-commit) library to manage local git hooks. + +This library allows you to execute code right before the commit, for example: +- Check if the commit contains the correct formatting. +- Format modified files based on a Style Guide such as PEP 8, etc + +To install and use `pre-commit` in development mode we have to perform the next command: + +```shell +python3 -m pip install pre-commit +``` + +Once `pre-commit` library is installed, we just need to run in our virtual environment: +```shell +pre-commit install -t pre-commit -t commit-msg +``` + +> Remember to execute these commands with the virtual environment active. + +For more details, see section Development > Git hooks. + +With this command the library will take configuration from `.pre-commit-config.yaml` and will set up the hooks by us. + +### Commit message style + +Use the following commit message style. e.g: + +```shell +'feat: TT-123 Applying some changes' +'fix: TT-321 Fixing something broken' +'feat(config): TT-00 Fix something in config files' +``` + +The value `TT-###` refers to the Jira issue that is being solved. Use TT-00 if the commit does not refer to any issue. + +### Branch names format + +For example if your task in Jira is **TT-48 implement semantic versioning** your branch name is: + +```shell +TT-48-implement-semantic-versioning +``` + ### Handling Cosmos DB triggers for creating events with time_tracker_events The project `time_tracker_events` is an Azure Function project. Its main responsibility is to respond to calls related to @@ -227,120 +444,6 @@ If you require to deploy `time_tracker_events` from your local machine to Azure func azure functionapp publish time-tracker-events --build local ``` -## Development - -### Git hooks - -We use [pre-commit](https://github.com/pre-commit/pre-commit) library to manage local git hooks, as developers we just need to run in our virtual environment: - -``` -pre-commit install -t pre-commit -t commit-msg -``` - -With this command the library will take configuration from `.pre-commit-config.yaml` and will set up the hooks by us. - -### Commit message style - -Use the following commit message style. e.g: - -``` -'feat: TT-123 Applying some changes' -'fix: TT-321 Fixing something broken' -'feat(config): TT-00 Fix something in config files' -``` - -The value `TT-###` refers to the Jira issue that is being solved. Use TT-00 if the commit does not refer to any issue. - -### Branch names format - -For example if your task in Jira is **TT-48 implement semantic versioning** your branch name is: - -``` - TT-48-implement-semantic-versioning -``` - -### Test - -We are using [Pytest](https://docs.pytest.org/en/latest/index.html) for tests. The tests are located in the package -`tests` and use the [conventions for python test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery). - -#### Integration tests - -The [integrations tests](https://en.wikipedia.org/wiki/Integration_testing) verifies that all the components of the app -are working well together. These are the default tests we should run: - -This command run all tests: - -```dotenv -python3 -m pytest -v --ignore=tests/commons/data_access_layer/azure/sql_repository_test.py -``` - -In windows - -``` -python -m pytest -v --ignore=tests/commons/data_access_layer/azure/sql_repository_test.py -``` - -**Note:** If you get the error "No module named azure.functions", execute the command: - -``` -pip install azure-functions -``` - -To run a sigle test: - -``` -pytest -v -k name-test -``` - -As you may have noticed we are ignoring the tests related with the repository. - -#### System tests - -In addition to the integration testing we might include tests to the data access layer in order to verify that the -persisted data is being managed the right way, i.e. it actually works. We may classify the execution of all the existing -tests as [system testing](https://en.wikipedia.org/wiki/System_testing): - -```dotenv -python3 -m pytest -v -``` - -The database tests will be done in the table `tests` of the database specified by the variable `SQL_DATABASE_URI`. If this -variable is not specified it will automatically connect to SQLite database in-memory. This will do, because we are using -[SQL Alchemy](https://www.sqlalchemy.org/features.html) to be able connect to any SQL database maintaining the same -codebase. - -The option `-v` shows which tests failed or succeeded. Have into account that you can also debug each test -(test\_\* files) with the help of an IDE like PyCharm. - -#### Coverage - -To check the coverage of the tests execute - -```bash - coverage run -m pytest -v -``` - -To get a report table - -```bash - coverage report -``` - -To get a full report in html - -```bash - coverage html -``` - -Then check in the [htmlcov/index.html](./htmlcov/index.html) to see it. - -If you want that previously collected coverage data is erased, you can execute: - -``` -coverage erase -``` - ### CLI There are available commands, aware of the API, that can be very helpful to you. You @@ -374,22 +477,6 @@ standard commit message style. [python-semantic-release](https://python-semantic-release.readthedocs.io/en/latest/commands.html#publish) for details of underlying operations. -## Run as docker container - -1. Build image - -```bash -docker build -t time_tracker_api:local . -``` - -2. Run app - -```bash -docker run -p 5000:5000 time_tracker_api:local -``` - -3. Visit `127.0.0.1:5000` - ## Migrations Looking for a DB-agnostic migration tool, the only choice I found was [migrate-anything](https://pypi.org/project/migrate-anything/). @@ -438,13 +525,6 @@ They will be automatically run during the Continuous Deployment process. Shared file with all the Feature Toggles we create, so we can have a history of them [Feature Toggles dictionary](https://github.com/ioet/time-tracker-ui/wiki/Feature-Toggles-dictionary) -## Support for docker-compose and cosmosdb emulator - -To run the dev enviroment in docker-compose: -```bash -docker-compose up -``` - ## More information about the project [Starting in Time Tracker](https://github.com/ioet/time-tracker-ui/wiki/Time-tracker) diff --git a/cosmosdb_emulator/README.md b/cosmosdb_emulator/README.md new file mode 100644 index 00000000..20103ced --- /dev/null +++ b/cosmosdb_emulator/README.md @@ -0,0 +1,90 @@ +# Time Tracker CLI + +Here you can find all the source code of the Time Tracker CLI. +This is responsible for automatically generating fake data for the Cosmos emulator, +in order to have information when testing new features or correcting bugs. + +> This feature is only available in development mode. + +## Prerequisites + +- Backend and cosmos emulator containers up. +- Environment variables correctly configured + +### Environment Variables. + +The main environment variables that you need to take into account are the following: + +```shell +export DATABASE_ACCOUNT_URI=https://azurecosmosemulator:8081 +export DATABASE_MASTER_KEY=C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw== +export DATABASE_NAME=time_tracker_testing_database +``` +Verify that the variables are the same as those shown above. + +## How to use Time Tracker CLI? + +If we are in the project's root folder, we need to redirect to the folder `cosmosdb_emulator` and open a terminal. + +We have two main alternatives for running the CLI: + +### Execute CLI with flags. + +In order to see all the available flags for the CLI we are going to execute the following command: + +```shell +./cli.sh main.py --help +``` + +When executing the above command, the following information will be displayed: + +![image](https://user-images.githubusercontent.com/56373098/127604274-041c2af7-d7a8-4b8d-b784-8280773b68c8.png) + +Where you can see the actions we can perform on a given Entity: + +Currently, the CLI only allows the creation of Time-entries and allows the deletion of any entity. + +Available Actions: + +- Create: Allows creating new fake data about a certain entity. +- Delete: Allows deleting information of an entity in the cosmos emulator. + +> To delete information about a certain entity you have to take into account the relationship +that this entity has with other entities, since this related information will also be eliminated, +for this purpose the following diagram can be used as a reference: +![image](https://user-images.githubusercontent.com/56373098/127604828-77cc1f90-21d4-4c63-9881-9d6546d84445.png) + +Available Entities: + +- Customers +- Projects +- Project-Types +- Activities +- Time-entries + +Considering the actions that we can execute on the entities we can perform the following command +to generate entries: +```shell +./cli.sh main.py -a Create -e Time-entries +``` + +The result of this command will be as follows: + +![image](https://user-images.githubusercontent.com/56373098/127606245-6cb5a0d1-ada6-4194-bbeb-6bd9679b676b.png) + +In this way we can continue with the generation of entities in an interactive way. + +### Execute CLI in an interactive way + +To run the CLI interactively, we need to execute the following command: + +```shell +./cli.sh main.py +``` +After executing the above command, the following will be displayed: + +![image](https://user-images.githubusercontent.com/56373098/127606606-422c6841-bd40-4f36-be2e-e765d333beed.png) + +This way we can interact dynamically with the CLI for the generation/deletion of entities. + +> Currently, for the generation of personal entries it is necessary to know the identifier of our user within Time Tracker. \ No newline at end of file diff --git a/requirements/time_tracker_api/dev.txt b/requirements/time_tracker_api/dev.txt index 6d8a1599..302acb78 100644 --- a/requirements/time_tracker_api/dev.txt +++ b/requirements/time_tracker_api/dev.txt @@ -14,9 +14,6 @@ pytest-mock==2.0.0 # Coverage coverage==4.5.1 -# Git hooks -pre-commit==2.2.0 - # CLI tools PyInquirer==1.0.3 pyfiglet==0.7 diff --git a/time-tracker.sh b/time-tracker.sh new file mode 100644 index 00000000..fe6b0068 --- /dev/null +++ b/time-tracker.sh @@ -0,0 +1,11 @@ +#!/bin/sh +COMMAND=$@ +PYTHON_COMMAND="pip install azure-functions" +API_CONTAINER_NAME="time-tracker-backend_api" + +execute(){ + docker exec -ti $API_CONTAINER_NAME sh -c "$PYTHON_COMMAND" + docker exec -ti $API_CONTAINER_NAME sh -c "$COMMAND" +} + +execute \ No newline at end of file From 23a35e7220f3c7bd2333b45f6b69be9e12874d6f Mon Sep 17 00:00:00 2001 From: semantic-release Date: Fri, 6 Aug 2021 02:31:57 +0000 Subject: [PATCH 07/74] 0.37.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 7 +++++++ time_tracker_api/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b75b8262..81930604 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,13 @@ +## v0.37.1 (2021-08-06) +### Fix +* TT-302 Fix URLLIB3 dependencies vulnerabilities ([#313](https://github.com/ioet/time-tracker-backend/issues/313)) ([`f7aba96`](https://github.com/ioet/time-tracker-backend/commit/f7aba96802a629d2829fc09606c67a07364c3016)) + +### Documentation +* TT-301 Update readme documentation and add Time Tracker CLI docs ([#314](https://github.com/ioet/time-tracker-backend/issues/314)) ([`58dbc15`](https://github.com/ioet/time-tracker-backend/commit/58dbc1588576d1603162e5d29780b315f5f784a5)) + ## v0.37.0 (2021-07-26) ### Feature * TT-293 Create Script to generate data in the Database ([#310](https://github.com/ioet/time-tracker-backend/issues/310)) ([`50f8d46`](https://github.com/ioet/time-tracker-backend/commit/50f8d468d77835a6f90b958d4642d338f36d5f37)) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index 8935b5b5..a4b38359 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.37.0' +__version__ = '0.37.1' From 86867c97f798c8a46d26b8e57b677c0fc77253d1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean=20Carlos=20Alarc=C3=B3n?= <56373098+jcalarcon98@users.noreply.github.com> Date: Thu, 5 Aug 2021 21:52:25 -0500 Subject: [PATCH 08/74] refactor: TT-245 refactor on find all function in cosmos db repository class (#312) * refactor: TT-245 Refactor function find all inside cosmosdb Repository * refactor: TT-245 remove unnecesary functions and add some testing --- commons/data_access_layer/cosmos_db.py | 124 ++++-------------- .../data_access_layer/cosmos_db_test.py | 22 ---- .../activities/activities_model_test.py | 4 - .../time_entries_namespace_test.py | 23 ---- .../time_entries_query_builder_test.py | 46 +------ tests/utils/query_builder_test.py | 90 +++++++++++++ .../time_entries_query_builder.py | 17 --- utils/query_builder.py | 35 +++++ 8 files changed, 151 insertions(+), 210 deletions(-) diff --git a/commons/data_access_layer/cosmos_db.py b/commons/data_access_layer/cosmos_db.py index 9cdf7f1c..3c8555d0 100644 --- a/commons/data_access_layer/cosmos_db.py +++ b/commons/data_access_layer/cosmos_db.py @@ -1,15 +1,15 @@ import dataclasses import logging -from typing import Callable, List +from typing import Callable import azure.cosmos.cosmos_client as cosmos_client import azure.cosmos.exceptions as exceptions -import flask from azure.cosmos import ContainerProxy, PartitionKey from flask import Flask from werkzeug.exceptions import HTTPException from commons.data_access_layer.database import CRUDDao, EventContext +from utils.query_builder import CosmosDBQueryBuilder class CosmosDBFacade: @@ -124,55 +124,6 @@ def from_definition( custom_cosmos_helper=custom_cosmos_helper, ) - @staticmethod - def create_sql_condition_for_visibility( - visible_only: bool, container_name='c' - ) -> str: - if visible_only: - # We are considering that `deleted == null` is not a choice - return 'AND NOT IS_DEFINED(%s.deleted)' % container_name - return '' - - @staticmethod - def create_sql_active_condition( - status_value: str, container_name='c' - ) -> str: - if status_value != None: - not_defined_condition = '' - condition_operand = ' AND ' - if status_value == 'active': - not_defined_condition = ( - 'AND NOT IS_DEFINED({container_name}.status)'.format( - container_name=container_name - ) - ) - condition_operand = ' OR ' - - defined_condition = '(IS_DEFINED({container_name}.status) \ - AND {container_name}.status = \'{status_value}\')'.format( - container_name=container_name, status_value=status_value - ) - return ( - not_defined_condition + condition_operand + defined_condition - ) - - return '' - - @staticmethod - def create_sql_where_conditions( - conditions: dict, container_name='c' - ) -> str: - where_conditions = [] - for k in conditions.keys(): - where_conditions.append(f'{container_name}.{k} = @{k}') - - if len(where_conditions) > 0: - return "AND {where_conditions_clause}".format( - where_conditions_clause=" AND ".join(where_conditions) - ) - else: - return "" - @staticmethod def generate_params(conditions: dict) -> list: result = [] @@ -206,16 +157,6 @@ def attach_context(data: dict, event_context: EventContext): "session_id": event_context.session_id, } - @staticmethod - def create_sql_date_range_filter(date_range: dict) -> str: - if 'start_date' in date_range and 'end_date' in date_range: - return """ - AND ((c.start_date BETWEEN @start_date AND @end_date) OR - (c.end_date BETWEEN @start_date AND @end_date)) - """ - else: - return '' - def create( self, data: dict, event_context: EventContext, mapper: Callable = None ): @@ -257,53 +198,38 @@ def find_all( mapper: Callable = None, ): conditions = conditions if conditions else {} - partition_key_value = self.find_partition_key_value(event_context) - max_count = self.get_page_size_or(max_count) - params = [ - {"name": "@partition_key_value", "value": partition_key_value}, - {"name": "@offset", "value": offset}, - {"name": "@max_count", "value": max_count}, - ] - - status_value = None - if conditions.get('status') != None: - status_value = conditions.get('status') + max_count: int = self.get_page_size_or(max_count) + + status_value = conditions.get('status') + if status_value: conditions.pop('status') date_range = date_range if date_range else {} - date_range_params = ( - self.generate_params(date_range) if date_range else [] - ) - params.extend(self.generate_params(conditions)) - params.extend(date_range_params) - - query_str = """ - SELECT * FROM c - WHERE c.{partition_key_attribute}=@partition_key_value - {conditions_clause} - {active_condition} - {date_range_sql_condition} - {visibility_condition} - {order_clause} - OFFSET @offset LIMIT @max_count - """.format( - partition_key_attribute=self.partition_key_attribute, - visibility_condition=self.create_sql_condition_for_visibility( - visible_only - ), - active_condition=self.create_sql_active_condition(status_value), - conditions_clause=self.create_sql_where_conditions(conditions), - date_range_sql_condition=self.create_sql_date_range_filter( - date_range - ), - order_clause=self.create_sql_order_clause(), + + query_builder = ( + CosmosDBQueryBuilder() + .add_sql_where_equal_condition(conditions) + .add_sql_active_condition(status_value) + .add_sql_date_range_condition(date_range) + .add_sql_visibility_condition(visible_only) + .add_sql_limit_condition(max_count) + .add_sql_offset_condition(offset) + .build() ) + if len(self.order_fields) > 1: + attribute = self.order_fields[0] + order = self.order_fields[1] + query_builder.add_sql_order_by_condition(attribute, order) + + query_str = query_builder.get_query() + params = query_builder.get_parameters() + partition_key_value = self.find_partition_key_value(event_context) + result = self.container.query_items( query=query_str, parameters=params, partition_key=partition_key_value, - max_item_count=max_count, ) function_mapper = self.get_mapper_or_dict(mapper) diff --git a/tests/commons/data_access_layer/cosmos_db_test.py b/tests/commons/data_access_layer/cosmos_db_test.py index c7a04eaf..07548988 100644 --- a/tests/commons/data_access_layer/cosmos_db_test.py +++ b/tests/commons/data_access_layer/cosmos_db_test.py @@ -660,28 +660,6 @@ def test_delete_permanently_with_valid_id_should_succeed( assert e.status_code == 404 -def test_repository_create_sql_where_conditions_with_multiple_values( - cosmos_db_repository: CosmosDBRepository, -): - result = cosmos_db_repository.create_sql_where_conditions( - {'owner_id': 'mark', 'customer_id': 'me'}, "c" - ) - - assert result is not None - assert ( - result == "AND c.owner_id = @owner_id AND c.customer_id = @customer_id" - ) - - -def test_repository_create_sql_where_conditions_with_no_values( - cosmos_db_repository: CosmosDBRepository, -): - result = cosmos_db_repository.create_sql_where_conditions({}, "c") - - assert result is not None - assert result == "" - - def test_repository_append_conditions_values( cosmos_db_repository: CosmosDBRepository, ): diff --git a/tests/time_tracker_api/activities/activities_model_test.py b/tests/time_tracker_api/activities/activities_model_test.py index e9ea54b3..fe84ce3a 100644 --- a/tests/time_tracker_api/activities/activities_model_test.py +++ b/tests/time_tracker_api/activities/activities_model_test.py @@ -1,5 +1,4 @@ from unittest.mock import Mock, patch -import pytest from commons.data_access_layer.database import EventContext from time_tracker_api.activities.activities_model import ( @@ -8,9 +7,6 @@ ) -@patch( - 'time_tracker_api.activities.activities_model.ActivityCosmosDBRepository.create_sql_condition_for_visibility' -) @patch( 'time_tracker_api.activities.activities_model.ActivityCosmosDBRepository.find_partition_key_value' ) diff --git a/tests/time_tracker_api/time_entries/time_entries_namespace_test.py b/tests/time_tracker_api/time_entries/time_entries_namespace_test.py index 8f22f45f..ce4a3a23 100644 --- a/tests/time_tracker_api/time_entries/time_entries_namespace_test.py +++ b/tests/time_tracker_api/time_entries/time_entries_namespace_test.py @@ -8,12 +8,8 @@ from pytest_mock import MockFixture, pytest from utils.time import ( - get_current_year, - get_current_month, current_datetime, current_datetime_str, - get_date_range_of_month, - datetime_str, ) from utils import worked_time from time_tracker_api.time_entries.time_entries_model import ( @@ -204,10 +200,6 @@ def test_get_time_entry_should_succeed_with_valid_id( 'time_tracker_api.time_entries.time_entries_dao.TimeEntriesCosmosDBDao.handle_date_filter_args', Mock(), ) -@patch( - 'time_tracker_api.time_entries.time_entries_repository.TimeEntryCosmosDBRepository.create_sql_date_range_filter', - Mock(), -) @patch( 'commons.data_access_layer.cosmos_db.CosmosDBRepository.generate_params', Mock(), @@ -232,7 +224,6 @@ def test_get_time_entries_by_type_of_user_when_is_user_tester( expected_user_ids, ): test_user_id = "id1" - non_test_user_id = "id2" te1 = TimeEntryCosmosDBModel( { "id": '1', @@ -285,10 +276,6 @@ def test_get_time_entries_by_type_of_user_when_is_user_tester( 'time_tracker_api.time_entries.time_entries_dao.TimeEntriesCosmosDBDao.handle_date_filter_args', Mock(), ) -@patch( - 'time_tracker_api.time_entries.time_entries_repository.TimeEntryCosmosDBRepository.create_sql_date_range_filter', - Mock(), -) @patch( 'commons.data_access_layer.cosmos_db.CosmosDBRepository.generate_params', Mock(), @@ -313,7 +300,6 @@ def test_get_time_entries_by_type_of_user_when_is_not_user_tester( expected_user_ids, ): test_user_id = "id1" - non_test_user_id = "id2" te1 = TimeEntryCosmosDBModel( { "id": '1', @@ -386,7 +372,6 @@ def test_get_time_entry_should_succeed_with_valid_id( ) def test_get_time_entry_raise_http_exception( client: FlaskClient, - mocker: MockFixture, valid_header: dict, valid_id: str, http_exception: HTTPException, @@ -407,7 +392,6 @@ def test_get_time_entry_raise_http_exception( def test_update_time_entry_calls_partial_update_with_incoming_payload( client: FlaskClient, - mocker: MockFixture, valid_header: dict, valid_id: str, owner_id: str, @@ -465,7 +449,6 @@ def test_update_time_entry_should_reject_bad_request( def test_update_time_entry_raise_not_found( client: FlaskClient, - mocker: MockFixture, valid_header: dict, valid_id: str, owner_id: str, @@ -499,7 +482,6 @@ def test_update_time_entry_raise_not_found( def test_delete_time_entry_calls_delete( client: FlaskClient, - mocker: MockFixture, valid_header: dict, valid_id: str, time_entries_dao, @@ -529,7 +511,6 @@ def test_delete_time_entry_calls_delete( ) def test_delete_time_entry_raise_http_exception( client: FlaskClient, - mocker: MockFixture, valid_header: dict, valid_id: str, http_exception: HTTPException, @@ -554,7 +535,6 @@ def test_delete_time_entry_raise_http_exception( def test_stop_time_entry_calls_partial_update( client: FlaskClient, - mocker: MockFixture, valid_header: dict, valid_id: str, time_entries_dao, @@ -581,7 +561,6 @@ def test_stop_time_entry_calls_partial_update( def test_stop_time_entry_raise_unprocessable_entity( client: FlaskClient, - mocker: MockFixture, valid_header: dict, valid_id: str, time_entries_dao, @@ -611,7 +590,6 @@ def test_stop_time_entry_raise_unprocessable_entity( def test_restart_time_entry_calls_partial_update( client: FlaskClient, - mocker: MockFixture, valid_header: dict, valid_id: str, time_entries_dao, @@ -638,7 +616,6 @@ def test_restart_time_entry_calls_partial_update( def test_restart_time_entry_raise_unprocessable_entity( client: FlaskClient, - mocker: MockFixture, valid_header: dict, valid_id: str, time_entries_dao, diff --git a/tests/time_tracker_api/time_entries/time_entries_query_builder_test.py b/tests/time_tracker_api/time_entries/time_entries_query_builder_test.py index fd23bd01..f3fa7efa 100644 --- a/tests/time_tracker_api/time_entries/time_entries_query_builder_test.py +++ b/tests/time_tracker_api/time_entries/time_entries_query_builder_test.py @@ -6,7 +6,7 @@ from utils.repository import remove_white_spaces -def test_TimeEntryQueryBuilder_is_subclass_CosmosDBQueryBuilder(): +def test_time_entry_query_builder_should_be_subclass_of_cosmos_query_builder(): query_builder = CosmosDBQueryBuilder() time_entries_query_builder = TimeEntryQueryBuilder() @@ -15,50 +15,6 @@ def test_TimeEntryQueryBuilder_is_subclass_CosmosDBQueryBuilder(): ) -def test_add_sql_date_range_condition_should_update_where_list(): - start_date = "2021-03-19T05:07:00.000Z" - end_date = "2021-03-25T10:00:00.000Z" - time_entry_query_builder = ( - TimeEntryQueryBuilder().add_sql_date_range_condition( - { - "start_date": start_date, - "end_date": end_date, - } - ) - ) - expected_params = [ - {"name": "@start_date", "value": start_date}, - {"name": "@end_date", "value": end_date}, - ] - assert len(time_entry_query_builder.where_conditions) == 1 - assert len(time_entry_query_builder.parameters) == len(expected_params) - assert time_entry_query_builder.get_parameters() == expected_params - - -def test_build_with_add_sql_date_range_condition(): - time_entry_query_builder = ( - TimeEntryQueryBuilder() - .add_sql_date_range_condition( - { - "start_date": "2021-04-19T05:00:00.000Z", - "end_date": "2021-04-20T10:00:00.000Z", - } - ) - .build() - ) - - expected_query = """ - SELECT * FROM c - WHERE ((c.start_date BETWEEN @start_date AND @end_date) OR - (c.end_date BETWEEN @start_date AND @end_date)) - """ - query = time_entry_query_builder.get_query() - - assert remove_white_spaces(query) == remove_white_spaces(expected_query) - assert len(time_entry_query_builder.where_conditions) == 1 - assert len(time_entry_query_builder.get_parameters()) == 2 - - def test_add_sql_interception_with_date_range_condition(): start_date = "2021-01-19T05:07:00.000Z" end_date = "2021-01-25T10:00:00.000Z" diff --git a/tests/utils/query_builder_test.py b/tests/utils/query_builder_test.py index 742730db..dc66b4f1 100644 --- a/tests/utils/query_builder_test.py +++ b/tests/utils/query_builder_test.py @@ -331,3 +331,93 @@ def test_add_sql_not_in_condition( ) assert len(query_builder.where_conditions) == len(expected_not_in_list) assert query_builder.where_conditions == expected_not_in_list + + +def test_add_sql_date_range_condition_should_update_where_list(): + start_date = "2021-03-19T05:07:00.000Z" + end_date = "2021-03-25T10:00:00.000Z" + query_builder = CosmosDBQueryBuilder().add_sql_date_range_condition( + { + "start_date": start_date, + "end_date": end_date, + } + ) + expected_params = [ + {"name": "@start_date", "value": start_date}, + {"name": "@end_date", "value": end_date}, + ] + assert len(query_builder.where_conditions) == 1 + assert len(query_builder.parameters) == len(expected_params) + assert query_builder.get_parameters() == expected_params + + +def test_build_with_add_sql_date_range_condition(): + query_builder = ( + CosmosDBQueryBuilder() + .add_sql_date_range_condition( + { + "start_date": "2021-04-19T05:00:00.000Z", + "end_date": "2021-04-20T10:00:00.000Z", + } + ) + .build() + ) + + expected_query = """ + SELECT * FROM c + WHERE ((c.start_date BETWEEN @start_date AND @end_date) OR + (c.end_date BETWEEN @start_date AND @end_date)) + """ + query = query_builder.get_query() + + assert remove_white_spaces(query) == remove_white_spaces(expected_query) + assert len(query_builder.where_conditions) == 1 + assert len(query_builder.get_parameters()) == 2 + + +def test_add_sql_active_condition_should_update_where_conditions(): + status_value = 'active' + expected_active_query = f""" + SELECT * FROM c + WHERE NOT IS_DEFINED(c.status) OR (IS_DEFINED(c.status) AND c.status = '{status_value}') + """ + expected_condition = f"NOT IS_DEFINED(c.status) OR (IS_DEFINED(c.status) AND c.status = '{status_value}')" + + query_builder = ( + CosmosDBQueryBuilder() + .add_sql_active_condition(status_value=status_value) + .build() + ) + + active_query = query_builder.get_query() + + assert remove_white_spaces(active_query) == remove_white_spaces( + expected_active_query + ) + assert len(query_builder.where_conditions) == 1 + assert query_builder.where_conditions[0] == expected_condition + + +def test_add_sql_inactive_condition_should_update_where_conditions(): + status_value = 'inactive' + expected_inactive_query = f""" + SELECT * FROM c + WHERE (IS_DEFINED(c.status) AND c.status = '{status_value}') + """ + expected_condition = ( + f"(IS_DEFINED(c.status) AND c.status = '{status_value}')" + ) + + query_builder = ( + CosmosDBQueryBuilder() + .add_sql_active_condition(status_value=status_value) + .build() + ) + + inactive_query = query_builder.get_query() + + assert remove_white_spaces(inactive_query) == remove_white_spaces( + expected_inactive_query + ) + assert len(query_builder.where_conditions) == 1 + assert query_builder.where_conditions[0] == expected_condition diff --git a/time_tracker_api/time_entries/time_entries_query_builder.py b/time_tracker_api/time_entries/time_entries_query_builder.py index 3147d43f..2417ac85 100644 --- a/time_tracker_api/time_entries/time_entries_query_builder.py +++ b/time_tracker_api/time_entries/time_entries_query_builder.py @@ -5,23 +5,6 @@ class TimeEntryQueryBuilder(CosmosDBQueryBuilder): def __init__(self): super(TimeEntryQueryBuilder, self).__init__() - def add_sql_date_range_condition(self, date_range: tuple = None): - if date_range and len(date_range) == 2: - start_date = date_range['start_date'] - end_date = date_range['end_date'] - condition = """ - ((c.start_date BETWEEN @start_date AND @end_date) OR - (c.end_date BETWEEN @start_date AND @end_date)) - """ - self.where_conditions.append(condition) - self.parameters.extend( - [ - {'name': '@start_date', 'value': start_date}, - {'name': '@end_date', 'value': end_date}, - ] - ) - return self - def add_sql_interception_with_date_range_condition( self, start_date, end_date ): diff --git a/utils/query_builder.py b/utils/query_builder.py index 2899aab4..b66f9ec1 100644 --- a/utils/query_builder.py +++ b/utils/query_builder.py @@ -34,6 +34,41 @@ def add_sql_in_condition( self.where_conditions.append(f"c.{attribute} IN {ids_values}") return self + def add_sql_active_condition(self, status_value: str): + if status_value: + not_defined_condition = '' + condition_operand = '' + if status_value == 'active': + not_defined_condition = 'NOT IS_DEFINED(c.status)' + condition_operand = ' OR ' + + defined_condition = ( + f"(IS_DEFINED(c.status) AND c.status = '{status_value}')" + ) + condition = ( + not_defined_condition + condition_operand + defined_condition + ) + self.where_conditions.append(condition) + return self + + def add_sql_date_range_condition(self, date_range: dict = None): + if date_range: + start_date = date_range.get('start_date') + end_date = date_range.get('end_date') + if start_date and end_date: + condition = """ + ((c.start_date BETWEEN @start_date AND @end_date) OR + (c.end_date BETWEEN @start_date AND @end_date)) + """ + self.where_conditions.append(condition) + self.parameters.extend( + [ + {'name': '@start_date', 'value': start_date}, + {'name': '@end_date', 'value': end_date}, + ] + ) + return self + def add_sql_where_equal_condition(self, data: dict = None): if data: for k, v in data.items(): From 9ef736fbbe824246351d51b355a975b83c95bfe9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean=20Carlos=20Alarc=C3=B3n?= <56373098+jcalarcon98@users.noreply.github.com> Date: Fri, 6 Aug 2021 12:29:03 -0500 Subject: [PATCH 09/74] fix: TT-303 Create activity with active status (#315) --- .../activities/activities_model_test.py | 38 ++++++++++++++++--- .../activities/activities_model.py | 13 ++++--- 2 files changed, 40 insertions(+), 11 deletions(-) diff --git a/tests/time_tracker_api/activities/activities_model_test.py b/tests/time_tracker_api/activities/activities_model_test.py index fe84ce3a..37c61e0f 100644 --- a/tests/time_tracker_api/activities/activities_model_test.py +++ b/tests/time_tracker_api/activities/activities_model_test.py @@ -1,11 +1,15 @@ -from unittest.mock import Mock, patch - +import copy +from unittest.mock import Mock, patch, ANY +from faker import Faker from commons.data_access_layer.database import EventContext from time_tracker_api.activities.activities_model import ( ActivityCosmosDBRepository, ActivityCosmosDBModel, + create_dao, ) +faker = Faker() + @patch( 'time_tracker_api.activities.activities_model.ActivityCosmosDBRepository.find_partition_key_value' @@ -16,10 +20,10 @@ def test_find_all_with_id_in_list( activity_repository: ActivityCosmosDBRepository, ): expected_item = { - 'id': 'id1', - 'name': 'testing', - 'description': 'do some testing', - 'tenant_id': 'tenantid1', + 'id': faker.uuid4(), + 'name': faker.name(), + 'description': faker.sentence(nb_words=4), + 'tenant_id': faker.uuid4(), } query_items_mock = Mock(return_value=[expected_item]) @@ -37,3 +41,25 @@ def test_find_all_with_id_in_list( activity = result[0] assert isinstance(activity, ActivityCosmosDBModel) assert activity.__dict__ == expected_item + + +def test_create_activity_should_add_active_status( + mocker, +): + activity_payload = { + 'name': faker.name(), + 'description': faker.sentence(nb_words=5), + 'tenant_id': faker.uuid4(), + } + activity_repository_create_mock = mocker.patch.object( + ActivityCosmosDBRepository, 'create' + ) + + activity_dao = create_dao() + activity_dao.create(activity_payload) + + expect_argument = copy.copy(activity_payload) + expect_argument['status'] = 'active' + activity_repository_create_mock.assert_called_with( + data=expect_argument, event_context=ANY + ) diff --git a/time_tracker_api/activities/activities_model.py b/time_tracker_api/activities/activities_model.py index 2a1de900..83f10fff 100644 --- a/time_tracker_api/activities/activities_model.py +++ b/time_tracker_api/activities/activities_model.py @@ -10,11 +10,7 @@ from time_tracker_api.database import CRUDDao, APICosmosDBDao from typing import List, Callable from commons.data_access_layer.database import EventContext -from utils.repository import ( - convert_list_to_tuple_string, - create_sql_in_condition, -) -from utils.query_builder import CosmosDBQueryBuilder, Order +from utils.query_builder import CosmosDBQueryBuilder class ActivityDao(CRUDDao): @@ -150,6 +146,13 @@ def get_all( ) return activities + def create(self, activity_payload: dict): + event_ctx = self.create_event_context('create') + activity_payload['status'] = 'active' + return self.repository.create( + data=activity_payload, event_context=event_ctx + ) + def create_dao() -> ActivityDao: repository = ActivityCosmosDBRepository() From bd5b4fc52dc3ba7acb09cd9b437f9fd435730202 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Fri, 6 Aug 2021 17:37:53 +0000 Subject: [PATCH 10/74] 0.37.2 Automatically generated by python-semantic-release --- CHANGELOG.md | 4 ++++ time_tracker_api/version.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 81930604..6a90a65d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ +## v0.37.2 (2021-08-06) +### Fix +* TT-303 Create activity with active status ([#315](https://github.com/ioet/time-tracker-backend/issues/315)) ([`9ef736f`](https://github.com/ioet/time-tracker-backend/commit/9ef736fbbe824246351d51b355a975b83c95bfe9)) + ## v0.37.1 (2021-08-06) ### Fix * TT-302 Fix URLLIB3 dependencies vulnerabilities ([#313](https://github.com/ioet/time-tracker-backend/issues/313)) ([`f7aba96`](https://github.com/ioet/time-tracker-backend/commit/f7aba96802a629d2829fc09606c67a07364c3016)) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index a4b38359..89a93b15 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.37.1' +__version__ = '0.37.2' From 0ba1b51440b5b34dcd2e9a4f53e1b951f5ba5596 Mon Sep 17 00:00:00 2001 From: Santiago Pozo Ruiz <38196801+DrFreud1@users.noreply.github.com> Date: Tue, 10 Aug 2021 17:25:01 -0500 Subject: [PATCH 11/74] Tt 305 tags semantic release (#318) * refactor: TT-305 remove the style commit tag * Revert "refactor: TT-305 remove the style commit tag" This reverts commit 934b988a46bae91e93ce14464e6b5ccbdd3e0ad6. * remove the style commit tag * Revert "remove the style commit tag" This reverts commit 59d4dbd0e821b521a18c69c1fc44f6907a43ad92. * refactor: TT-305 remove the style commit tag * Revert "refactor: TT-305 remove the style commit tag" This reverts commit 63452204247cb9466cd2c3e6caf48abc7f8c781c. * refactor: TT-305 remove style tag from semantic commit message checker * ci: TT-305 testing commits * Revert "ci: TT-305 testing commits" This reverts commit 95f3e377b52776485ca5bdadac70fc782e2802b9. * Revert "Revert "ci: TT-305 testing commits"" This reverts commit 9f1819df980a8f304360a6a225a20076759a9e3d. * test: TT-305 testing commits * docs: TT-305 testing commit and added a dot in README.md * refactor: TT-305 add code-smell tag * code-smell: TT-305 test tag --- .pre-commit-config.yaml | 2 +- commons/git_hooks/enforce_semantic_commit_msg.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 77e4a0c2..c9c54289 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ repos: language: python stages : [commit-msg] - repo: https://github.com/psf/black - rev: stable + rev: 21.7b0 hooks: - id: black args: [--line-length=79, --skip-string-normalization] diff --git a/commons/git_hooks/enforce_semantic_commit_msg.py b/commons/git_hooks/enforce_semantic_commit_msg.py index ea354e1d..53c46216 100644 --- a/commons/git_hooks/enforce_semantic_commit_msg.py +++ b/commons/git_hooks/enforce_semantic_commit_msg.py @@ -13,7 +13,7 @@ SUCCESS_MSG = "Commit succeed!. Semantic commit message is correct." -COMMIT_MSG_REGEX = r'(build|ci|docs|feat|fix|perf|refactor|style|test|chore|revert)(\([\w\-]+\))?: TT-[0-9]+ .*' +COMMIT_MSG_REGEX = r'(build|ci|docs|feat|fix|perf|refactor|test|chore|revert|code-smell)(\([\w\-]+\))?: TT-[0-9]+ .*' # Get the commit message file commit_msg_file = open(sys.argv[1]) # The first argument is the file From 3cac4705bc7eb86640ea05edfcca0c2c666a34d4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean=20Carlos=20Alarc=C3=B3n?= <56373098+jcalarcon98@users.noreply.github.com> Date: Tue, 10 Aug 2021 18:40:36 -0500 Subject: [PATCH 12/74] refactor: TT-308 Use enums insted defined variables (#317) --- cosmosdb_emulator/time_tracker_cli/providers/common.py | 4 +++- .../activities/activities_model_test.py | 3 ++- .../activities/activities_namespace_test.py | 10 ++++++---- .../customers/customers_namespace_test.py | 8 +++++--- .../projects/projects_namespace_test.py | 7 ++++--- tests/utils/query_builder_test.py | 6 ++++-- time_tracker_api/activities/activities_model.py | 3 ++- time_tracker_api/activities/activities_namespace.py | 7 ++++--- time_tracker_api/customers/customers_model.py | 3 ++- time_tracker_api/customers/customers_namespace.py | 7 ++++--- time_tracker_api/projects/projects_namespace.py | 9 +++++---- utils/enums/status.py | 6 ++++++ utils/query_builder.py | 4 +++- 13 files changed, 50 insertions(+), 27 deletions(-) create mode 100644 utils/enums/status.py diff --git a/cosmosdb_emulator/time_tracker_cli/providers/common.py b/cosmosdb_emulator/time_tracker_cli/providers/common.py index c5ec3e24..c27e60b1 100644 --- a/cosmosdb_emulator/time_tracker_cli/providers/common.py +++ b/cosmosdb_emulator/time_tracker_cli/providers/common.py @@ -1,7 +1,9 @@ from faker.providers import BaseProvider +from utils.enums.status import Status + class CommonProvider(BaseProvider): def status(self) -> str: - available_status = ['active', 'inactive'] + available_status = [Status.ACTIVE.value, Status.INACTIVE.value] return self.random_element(elements=available_status) diff --git a/tests/time_tracker_api/activities/activities_model_test.py b/tests/time_tracker_api/activities/activities_model_test.py index 37c61e0f..c1a1b243 100644 --- a/tests/time_tracker_api/activities/activities_model_test.py +++ b/tests/time_tracker_api/activities/activities_model_test.py @@ -7,6 +7,7 @@ ActivityCosmosDBModel, create_dao, ) +from utils.enums.status import Status faker = Faker() @@ -59,7 +60,7 @@ def test_create_activity_should_add_active_status( activity_dao.create(activity_payload) expect_argument = copy.copy(activity_payload) - expect_argument['status'] = 'active' + expect_argument['status'] = Status.ACTIVE.value activity_repository_create_mock.assert_called_with( data=expect_argument, event_context=ANY ) diff --git a/tests/time_tracker_api/activities/activities_namespace_test.py b/tests/time_tracker_api/activities/activities_namespace_test.py index 13958d8a..a2b9ab20 100644 --- a/tests/time_tracker_api/activities/activities_namespace_test.py +++ b/tests/time_tracker_api/activities/activities_namespace_test.py @@ -6,6 +6,8 @@ from flask_restplus._http import HTTPStatus from pytest_mock import MockFixture +from utils.enums.status import Status + fake = Faker() valid_activity_data = { @@ -101,7 +103,7 @@ def test_list_all_active_activities( repository_find_all_mock.assert_called_once_with( event_context=ANY, - conditions={'status': 'active'}, + conditions={'status': Status.ACTIVE.value}, activities_id=ANY, visible_only=ANY, max_count=ANY, @@ -259,7 +261,7 @@ def test_delete_activity_should_succeed_with_valid_id( assert HTTPStatus.NO_CONTENT == response.status_code assert b'' == response.data repository_remove_mock.assert_called_once_with( - str(valid_id), {'status': 'inactive'}, ANY + str(valid_id), {'status': Status.INACTIVE.value}, ANY ) @@ -283,7 +285,7 @@ def test_delete_activity_should_return_not_found_with_invalid_id( assert HTTPStatus.NOT_FOUND == response.status_code repository_remove_mock.assert_called_once_with( - str(invalid_id), {'status': 'inactive'}, ANY + str(invalid_id), {'status': Status.INACTIVE.value}, ANY ) @@ -309,5 +311,5 @@ def test_delete_activity_should_return_422_for_invalid_id_format( assert HTTPStatus.UNPROCESSABLE_ENTITY == response.status_code repository_remove_mock.assert_called_once_with( - str(invalid_id), {'status': 'inactive'}, ANY + str(invalid_id), {'status': Status.INACTIVE.value}, ANY ) diff --git a/tests/time_tracker_api/customers/customers_namespace_test.py b/tests/time_tracker_api/customers/customers_namespace_test.py index 9c8bfa07..880e7fc2 100644 --- a/tests/time_tracker_api/customers/customers_namespace_test.py +++ b/tests/time_tracker_api/customers/customers_namespace_test.py @@ -6,6 +6,8 @@ from flask_restplus._http import HTTPStatus from pytest_mock import MockFixture +from utils.enums.status import Status + fake = Faker() valid_customer_data = { @@ -229,7 +231,7 @@ def test_delete_customer_should_succeed_with_valid_id( assert HTTPStatus.NO_CONTENT == response.status_code assert b'' == response.data repository_remove_mock.assert_called_once_with( - str(valid_id), {'status': 'inactive'}, ANY + str(valid_id), {'status': Status.INACTIVE.value}, ANY ) @@ -253,7 +255,7 @@ def test_delete_customer_should_return_not_found_with_invalid_id( assert HTTPStatus.NOT_FOUND == response.status_code repository_remove_mock.assert_called_once_with( - str(invalid_id), {'status': 'inactive'}, ANY + str(invalid_id), {'status': Status.INACTIVE.value}, ANY ) @@ -282,7 +284,7 @@ def test_delete_customer_should_return_422_for_invalid_id_format( assert HTTPStatus.UNPROCESSABLE_ENTITY == response.status_code repository_remove_mock.assert_called_once_with( - str(invalid_id), {'status': 'inactive'}, ANY + str(invalid_id), {'status': Status.INACTIVE.value}, ANY ) diff --git a/tests/time_tracker_api/projects/projects_namespace_test.py b/tests/time_tracker_api/projects/projects_namespace_test.py index e8b1d35c..f302d892 100644 --- a/tests/time_tracker_api/projects/projects_namespace_test.py +++ b/tests/time_tracker_api/projects/projects_namespace_test.py @@ -7,6 +7,7 @@ from pytest_mock import MockFixture from time_tracker_api.projects.projects_model import ProjectCosmosDBDao +from utils.enums.status import Status fake = Faker() @@ -256,7 +257,7 @@ def test_delete_project_should_succeed_with_valid_id( assert HTTPStatus.NO_CONTENT == response.status_code assert b'' == response.data repository_remove_mock.assert_called_once_with( - str(valid_id), {'status': 'inactive'}, ANY + str(valid_id), {'status': Status.INACTIVE.value}, ANY ) @@ -280,7 +281,7 @@ def test_delete_project_should_return_not_found_with_invalid_id( assert HTTPStatus.NOT_FOUND == response.status_code repository_remove_mock.assert_called_once_with( - str(invalid_id), {'status': 'inactive'}, ANY + str(invalid_id), {'status': Status.INACTIVE.value}, ANY ) @@ -306,5 +307,5 @@ def test_delete_project_should_return_unprocessable_entity_for_invalid_id_format assert HTTPStatus.UNPROCESSABLE_ENTITY == response.status_code repository_remove_mock.assert_called_once_with( - str(invalid_id), {'status': 'inactive'}, ANY + str(invalid_id), {'status': Status.INACTIVE.value}, ANY ) diff --git a/tests/utils/query_builder_test.py b/tests/utils/query_builder_test.py index dc66b4f1..55608921 100644 --- a/tests/utils/query_builder_test.py +++ b/tests/utils/query_builder_test.py @@ -1,4 +1,6 @@ from unittest.mock import patch + +from utils.enums.status import Status from utils.query_builder import CosmosDBQueryBuilder, Order from utils.repository import remove_white_spaces import pytest @@ -376,7 +378,7 @@ def test_build_with_add_sql_date_range_condition(): def test_add_sql_active_condition_should_update_where_conditions(): - status_value = 'active' + status_value = Status.ACTIVE.value expected_active_query = f""" SELECT * FROM c WHERE NOT IS_DEFINED(c.status) OR (IS_DEFINED(c.status) AND c.status = '{status_value}') @@ -399,7 +401,7 @@ def test_add_sql_active_condition_should_update_where_conditions(): def test_add_sql_inactive_condition_should_update_where_conditions(): - status_value = 'inactive' + status_value = Status.INACTIVE.value expected_inactive_query = f""" SELECT * FROM c WHERE (IS_DEFINED(c.status) AND c.status = '{status_value}') diff --git a/time_tracker_api/activities/activities_model.py b/time_tracker_api/activities/activities_model.py index 83f10fff..cbfd0d20 100644 --- a/time_tracker_api/activities/activities_model.py +++ b/time_tracker_api/activities/activities_model.py @@ -10,6 +10,7 @@ from time_tracker_api.database import CRUDDao, APICosmosDBDao from typing import List, Callable from commons.data_access_layer.database import EventContext +from utils.enums.status import Status from utils.query_builder import CosmosDBQueryBuilder @@ -148,7 +149,7 @@ def get_all( def create(self, activity_payload: dict): event_ctx = self.create_event_context('create') - activity_payload['status'] = 'active' + activity_payload['status'] = Status.ACTIVE.value return self.repository.create( data=activity_payload, event_context=event_ctx ) diff --git a/time_tracker_api/activities/activities_namespace.py b/time_tracker_api/activities/activities_namespace.py index 36973542..df3c104a 100644 --- a/time_tracker_api/activities/activities_namespace.py +++ b/time_tracker_api/activities/activities_namespace.py @@ -9,6 +9,7 @@ remove_required_constraint, NullableString, ) +from utils.enums.status import Status faker = Faker() @@ -40,8 +41,8 @@ example=Faker().words( 2, [ - 'active', - 'inactive', + Status.ACTIVE.value, + Status.INACTIVE.value, ], unique=True, ), @@ -117,5 +118,5 @@ def put(self, id): @ns.response(HTTPStatus.NO_CONTENT, 'Activity deleted successfully') def delete(self, id): """Delete an activity""" - activity_dao.update(id, {'status': 'inactive'}) + activity_dao.update(id, {'status': Status.INACTIVE.value}) return None, HTTPStatus.NO_CONTENT diff --git a/time_tracker_api/customers/customers_model.py b/time_tracker_api/customers/customers_model.py index cb245afe..a81b6d78 100644 --- a/time_tracker_api/customers/customers_model.py +++ b/time_tracker_api/customers/customers_model.py @@ -8,6 +8,7 @@ CosmosDBDao, ) from time_tracker_api.database import CRUDDao, APICosmosDBDao +from utils.enums.status import Status class CustomerDao(CRUDDao): @@ -32,7 +33,7 @@ class CustomerCosmosDBModel(CosmosDBModel): description: str deleted: str tenant_id: str - status: str = field(default='active') + status: str = field(default=Status.ACTIVE.value) def __init__(self, data): super(CustomerCosmosDBModel, self).__init__(data) # pragma: no cover diff --git a/time_tracker_api/customers/customers_namespace.py b/time_tracker_api/customers/customers_namespace.py index 584616a4..d9ebfcc3 100644 --- a/time_tracker_api/customers/customers_namespace.py +++ b/time_tracker_api/customers/customers_namespace.py @@ -9,6 +9,7 @@ NullableString, ) from time_tracker_api.customers.customers_model import create_dao +from utils.enums.status import Status faker = Faker() @@ -41,8 +42,8 @@ example=Faker().words( 2, [ - 'active', - 'inactive', + Status.ACTIVE.value, + Status.INACTIVE.value, ], unique=True, ), @@ -122,5 +123,5 @@ def put(self, id): @ns.response(HTTPStatus.NO_CONTENT, 'Customer successfully deleted') def delete(self, id): """Delete a customer""" - customer_dao.update(id, {'status': 'inactive'}) + customer_dao.update(id, {'status': Status.INACTIVE.value}) return None, HTTPStatus.NO_CONTENT diff --git a/time_tracker_api/projects/projects_namespace.py b/time_tracker_api/projects/projects_namespace.py index 7030f63e..a09455fd 100644 --- a/time_tracker_api/projects/projects_namespace.py +++ b/time_tracker_api/projects/projects_namespace.py @@ -11,6 +11,7 @@ NullableString, ) from time_tracker_api.projects.projects_model import create_dao +from utils.enums.status import Status faker = Faker() @@ -61,8 +62,8 @@ example=Faker().words( 2, [ - 'active', - 'inactive', + Status.ACTIVE.value, + Status.INACTIVE.value, ], unique=True, ), @@ -142,7 +143,7 @@ def get(self): """List all projects""" conditions = attributes_filter.parse_args() return project_dao.get_all( - conditions=conditions, customer_status='active' + conditions=conditions, customer_status=Status.ACTIVE.value ) @ns.doc('create_project') @@ -190,5 +191,5 @@ def put(self, id): @ns.response(HTTPStatus.NO_CONTENT, 'Project successfully deleted') def delete(self, id): """Delete a project""" - project_dao.update(id, {'status': 'inactive'}) + project_dao.update(id, {'status': Status.INACTIVE.value}) return None, HTTPStatus.NO_CONTENT diff --git a/utils/enums/status.py b/utils/enums/status.py new file mode 100644 index 00000000..256319fb --- /dev/null +++ b/utils/enums/status.py @@ -0,0 +1,6 @@ +from enum import Enum + + +class Status(Enum): + ACTIVE = 'active' + INACTIVE = 'inactive' diff --git a/utils/query_builder.py b/utils/query_builder.py index b66f9ec1..3d9b200a 100644 --- a/utils/query_builder.py +++ b/utils/query_builder.py @@ -1,4 +1,6 @@ from typing import List + +from utils.enums.status import Status from utils.repository import convert_list_to_tuple_string from enum import Enum @@ -38,7 +40,7 @@ def add_sql_active_condition(self, status_value: str): if status_value: not_defined_condition = '' condition_operand = '' - if status_value == 'active': + if status_value == Status.ACTIVE.value: not_defined_condition = 'NOT IS_DEFINED(c.status)' condition_operand = ' OR ' From ac06aab9b3f07886462b116d67534c3ce7f02a6f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Edgar=20David=20Guam=C3=A1n?= Date: Wed, 11 Aug 2021 20:02:53 -0500 Subject: [PATCH 13/74] fix: TT-304 Handle message: the data could not be load (#316) Authored-by: Edgar Guaman --- .../time_entries/time_entries_repository_test.py | 2 +- time_tracker_api/time_entries/time_entries_repository.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/time_tracker_api/time_entries/time_entries_repository_test.py b/tests/time_tracker_api/time_entries/time_entries_repository_test.py index 70b8fd52..c6bb7a99 100644 --- a/tests/time_tracker_api/time_entries/time_entries_repository_test.py +++ b/tests/time_tracker_api/time_entries/time_entries_repository_test.py @@ -95,7 +95,7 @@ def test_add_complementary_info_when_there_are_not_time_entries( ): with pytest.raises(HTTPException) as http_error: time_entry_repository.add_complementary_info( - time_entries=None, exist_conditions=False + time_entries=None, exist_conditions=True ) status_code = http_error.value.code message = http_error.value.data.get('message') diff --git a/time_tracker_api/time_entries/time_entries_repository.py b/time_tracker_api/time_entries/time_entries_repository.py index d773f2a9..7ec9905a 100644 --- a/time_tracker_api/time_entries/time_entries_repository.py +++ b/time_tracker_api/time_entries/time_entries_repository.py @@ -120,7 +120,7 @@ def add_complementary_info( users = AzureConnection().users() add_user_email_to_time_entries(time_entries, users) - elif not time_entries and not exist_conditions: + elif not time_entries and exist_conditions: abort(HTTPStatus.NOT_FOUND, "Time entry not found") return time_entries From db77699719a1145424adc720b78f77cf0910b994 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Thu, 12 Aug 2021 01:09:18 +0000 Subject: [PATCH 14/74] 0.37.3 Automatically generated by python-semantic-release --- CHANGELOG.md | 4 ++++ time_tracker_api/version.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6a90a65d..f849c5ab 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ +## v0.37.3 (2021-08-12) +### Fix +* TT-304 Handle message: the data could not be load ([#316](https://github.com/ioet/time-tracker-backend/issues/316)) ([`ac06aab`](https://github.com/ioet/time-tracker-backend/commit/ac06aab9b3f07886462b116d67534c3ce7f02a6f)) + ## v0.37.2 (2021-08-06) ### Fix * TT-303 Create activity with active status ([#315](https://github.com/ioet/time-tracker-backend/issues/315)) ([`9ef736f`](https://github.com/ioet/time-tracker-backend/commit/9ef736fbbe824246351d51b355a975b83c95bfe9)) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index 89a93b15..57a635ed 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.37.2' +__version__ = '0.37.3' From 8e2aadc0937d3a26752b7fb8a1dd837af2f6d6a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean=20Carlos=20Alarc=C3=B3n?= <56373098+jcalarcon98@users.noreply.github.com> Date: Wed, 8 Sep 2021 10:11:26 -0500 Subject: [PATCH 15/74] feat: TT-326 Get recent projects feature added (#319) --- .../factories/project_factory.py | 2 + .../time_tracker_cli/utils/project.py | 1 + .../projects/projects_model_test.py | 61 +++++++++++++++++++ .../projects/projects_namespace_test.py | 17 ++++++ .../time_entries/time_entries_dao_test.py | 49 +++++++++++++++ time_tracker_api/projects/projects_model.py | 32 +++++++++- .../projects/projects_namespace.py | 8 +++ .../time_entries/time_entries_dao.py | 23 ++++++- 8 files changed, 190 insertions(+), 3 deletions(-) create mode 100644 tests/time_tracker_api/time_entries/time_entries_dao_test.py diff --git a/cosmosdb_emulator/time_tracker_cli/factories/project_factory.py b/cosmosdb_emulator/time_tracker_cli/factories/project_factory.py index a03f9ae0..e5031c3f 100644 --- a/cosmosdb_emulator/time_tracker_cli/factories/project_factory.py +++ b/cosmosdb_emulator/time_tracker_cli/factories/project_factory.py @@ -14,6 +14,7 @@ class Project(NamedTuple): project_type_id: int customer_id: str tenant_id: str + status: str class ProjectFactory(Factory): @@ -28,3 +29,4 @@ def __init__(self, project_type_id, customer_id): name = Faker('name') description = Faker('sentence', nb_words=10) tenant_id = get_time_tracker_tenant_id() + status = 'active' diff --git a/cosmosdb_emulator/time_tracker_cli/utils/project.py b/cosmosdb_emulator/time_tracker_cli/utils/project.py index c082d29a..28d6f9e3 100644 --- a/cosmosdb_emulator/time_tracker_cli/utils/project.py +++ b/cosmosdb_emulator/time_tracker_cli/utils/project.py @@ -32,5 +32,6 @@ def get_project_json(project_factory: ProjectFactory) -> dict: 'customer_id': project_factory.customer_id, 'project_type_id': project_factory.project_type_id, 'tenant_id': project_factory.tenant_id, + 'status': project_factory.status, } return project diff --git a/tests/time_tracker_api/projects/projects_model_test.py b/tests/time_tracker_api/projects/projects_model_test.py index c6b63c40..4d571272 100644 --- a/tests/time_tracker_api/projects/projects_model_test.py +++ b/tests/time_tracker_api/projects/projects_model_test.py @@ -13,9 +13,18 @@ ProjectCosmosDBRepository, ProjectCosmosDBModel, create_dao, + ProjectCosmosDBDao, ) from faker import Faker +from time_tracker_api.time_entries.time_entries_dao import ( + TimeEntriesCosmosDBDao, +) +from time_tracker_api.time_entries.time_entries_model import ( + TimeEntryCosmosDBModel, +) +from utils.enums.status import Status + fake = Faker() @@ -138,3 +147,55 @@ def test_get_all_projects_with_customers( assert isinstance(projects[0], ProjectCosmosDBModel) assert projects[0].__dict__['customer_name'] == customer_data['name'] assert len(projects) == 1 + + +def test_get_recent_projects_get_all_method_should_have_been_called_with_specific_arguments( + mocker, +): + projects_amount = 5 + expected_conditions = {'status': Status.ACTIVE.value} + expected_projects_ids = list( + set([fake.uuid4() for i in range(projects_amount)]) + ) + user_time_entries = [] + + for project_id in expected_projects_ids: + current_entry = TimeEntryCosmosDBModel( + {'project_id': project_id, 'id': fake.uuid4()} + ) + user_time_entries.append(current_entry) + + mocker.patch.object( + TimeEntriesCosmosDBDao, + 'get_latest_entries', + return_value=user_time_entries, + ) + project_cosmos_db_dao_get_all_mock = mocker.patch.object( + ProjectCosmosDBDao, 'get_all' + ) + projects_dao = create_dao() + + projects_dao.get_recent_projects() + + project_cosmos_db_dao_get_all_mock.assert_called_once_with( + conditions=expected_conditions, + project_ids=expected_projects_ids, + customer_status=Status.ACTIVE.value, + ) + + +def test_get_recent_projects_should_return_an_empty_array_if_the_user_has_no_entries( + mocker, +): + user_time_entries = [] + mocker.patch.object( + TimeEntriesCosmosDBDao, + 'get_latest_entries', + return_value=user_time_entries, + ) + + projects_dao = create_dao() + + recent_projects = projects_dao.get_recent_projects() + + assert len(recent_projects) == 0 diff --git a/tests/time_tracker_api/projects/projects_namespace_test.py b/tests/time_tracker_api/projects/projects_namespace_test.py index f302d892..72ae9293 100644 --- a/tests/time_tracker_api/projects/projects_namespace_test.py +++ b/tests/time_tracker_api/projects/projects_namespace_test.py @@ -309,3 +309,20 @@ def test_delete_project_should_return_unprocessable_entity_for_invalid_id_format repository_remove_mock.assert_called_once_with( str(invalid_id), {'status': Status.INACTIVE.value}, ANY ) + + +def test_get_recent_projects_should_call_method_get_recent_projects_from_project_dao( + client: FlaskClient, mocker: MockFixture, valid_header: dict +): + project_dao_get_recent_projects_mock = mocker.patch.object( + ProjectCosmosDBDao, 'get_recent_projects', return_value=[] + ) + + response = client.get( + "/projects/recent", + headers=valid_header, + follow_redirects=True, + ) + + assert response.status_code == HTTPStatus.OK + project_dao_get_recent_projects_mock.assert_called_once() diff --git a/tests/time_tracker_api/time_entries/time_entries_dao_test.py b/tests/time_tracker_api/time_entries/time_entries_dao_test.py new file mode 100644 index 00000000..123441f2 --- /dev/null +++ b/tests/time_tracker_api/time_entries/time_entries_dao_test.py @@ -0,0 +1,49 @@ +from unittest.mock import ANY + +from time_tracker_api.database import APICosmosDBDao +from time_tracker_api.time_entries.time_entries_repository import ( + TimeEntryCosmosDBRepository, +) + + +def test_get_latest_entries_must_be_called_with_default_amount_of_entries( + mocker, time_entries_dao +): + expected_conditions = {'owner_id': ANY} + + expected_entries_amount = 20 + + time_entries_repository_find_all_mock = mocker.patch.object( + TimeEntryCosmosDBRepository, 'find_all' + ) + mocker.patch.object(APICosmosDBDao, 'create_event_context') + + time_entries_dao.get_latest_entries() + + time_entries_repository_find_all_mock.assert_called_with( + conditions=expected_conditions, + max_count=expected_entries_amount, + event_context=ANY, + ) + + +def test_get_latest_entries_must_be_called_with_amount_of_entries_passed_in_condition( + mocker, time_entries_dao +): + time_entries_repository_find_all_mock = mocker.patch.object( + TimeEntryCosmosDBRepository, 'find_all' + ) + mocker.patch.object(APICosmosDBDao, 'create_event_context') + + expected_entries_amount = 40 + conditions = {'limit': expected_entries_amount} + + time_entries_dao.get_latest_entries(conditions=conditions) + + conditions.update({'owner_id': ANY}) + + time_entries_repository_find_all_mock.assert_called_with( + conditions=conditions, + max_count=expected_entries_amount, + event_context=ANY, + ) diff --git a/time_tracker_api/projects/projects_model.py b/time_tracker_api/projects/projects_model.py index 0d767251..6f2723d5 100644 --- a/time_tracker_api/projects/projects_model.py +++ b/time_tracker_api/projects/projects_model.py @@ -15,6 +15,7 @@ create_dao as project_types_create_dao, ) from time_tracker_api.customers.customers_model import CustomerCosmosDBModel +from utils.enums.status import Status from utils.query_builder import CosmosDBQueryBuilder from utils.extend_model import ( add_customer_name_to_projects, @@ -158,8 +159,37 @@ def get_all( add_customer_name_to_projects(projects, customers) return projects + def get_recent_projects(self): + """ + Gets the last projects in which the person has generated entries. + The import had to be carried out within the method to avoid circular dependency. + """ + from time_tracker_api.time_entries.time_entries_dao import ( + create_dao as create_entries_dao, + ) + + recent_projects = [] + time_entries_dao = create_entries_dao() + last_time_entries = time_entries_dao.get_latest_entries() + + last_time_entries_amount = len(last_time_entries) + + if last_time_entries_amount == 0: + return recent_projects + + project_ids = list( + set([entry.project_id for entry in last_time_entries]) + ) + conditions = {'status': Status.ACTIVE.value} + recent_projects = self.get_all( + conditions=conditions, + project_ids=project_ids, + customer_status=Status.ACTIVE.value, + ) + + return recent_projects + def create_dao() -> ProjectDao: repository = ProjectCosmosDBRepository() - return ProjectCosmosDBDao(repository) diff --git a/time_tracker_api/projects/projects_namespace.py b/time_tracker_api/projects/projects_namespace.py index a09455fd..1c546707 100644 --- a/time_tracker_api/projects/projects_namespace.py +++ b/time_tracker_api/projects/projects_namespace.py @@ -193,3 +193,11 @@ def delete(self, id): """Delete a project""" project_dao.update(id, {'status': Status.INACTIVE.value}) return None, HTTPStatus.NO_CONTENT + + +@ns.route('/recent') +class RecentProjects(Resource): + @ns.doc('list_recent_projects') + @ns.marshal_list_with(project) + def get(self): + return project_dao.get_recent_projects() diff --git a/time_tracker_api/time_entries/time_entries_dao.py b/time_tracker_api/time_entries/time_entries_dao.py index 98fb64b4..3cfbbdb2 100644 --- a/time_tracker_api/time_entries/time_entries_dao.py +++ b/time_tracker_api/time_entries/time_entries_dao.py @@ -2,12 +2,10 @@ from commons.data_access_layer.cosmos_db import ( CosmosDBDao, CustomError, - CosmosDBRepository, ) from utils.extend_model import ( add_project_info_to_time_entries, add_activity_name_to_time_entries, - create_custom_query_from_str, create_list_from_str, ) from utils.time import ( @@ -121,6 +119,27 @@ def get_all(self, conditions: dict = None, **kwargs) -> list: return time_entries_list + def get_latest_entries(self, conditions: dict = None): + """ + Get the latest entries without taking into account a data range. + It would only be necessary to pass the number of last entries that + you need, this parameter must be passed by the conditions. + The default value for the entries amount is 20. + """ + conditions = conditions if conditions else {} + + default_entries_amount = 20 + event_context = self.create_event_context('read_many') + conditions.update({'owner_id': event_context.user_id}) + entries_amount = conditions.pop("limit", default_entries_amount) + time_entries = self.repository.find_all( + conditions=conditions, + max_count=entries_amount, + event_context=event_context, + ) + + return time_entries + def get_lastest_entries_by_project( self, conditions: dict = None, **kwargs ) -> list: From 36ccc659441063b6fbc8eba6f92de0aa14af22dc Mon Sep 17 00:00:00 2001 From: semantic-release Date: Wed, 8 Sep 2021 15:20:12 +0000 Subject: [PATCH 16/74] 0.38.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 4 ++++ time_tracker_api/version.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f849c5ab..3ee95b66 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ +## v0.38.0 (2021-09-08) +### Feature +* TT-326 Get recent projects feature added ([#319](https://github.com/ioet/time-tracker-backend/issues/319)) ([`8e2aadc`](https://github.com/ioet/time-tracker-backend/commit/8e2aadc0937d3a26752b7fb8a1dd837af2f6d6a0)) + ## v0.37.3 (2021-08-12) ### Fix * TT-304 Handle message: the data could not be load ([#316](https://github.com/ioet/time-tracker-backend/issues/316)) ([`ac06aab`](https://github.com/ioet/time-tracker-backend/commit/ac06aab9b3f07886462b116d67534c3ce7f02a6f)) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index 57a635ed..457618b1 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.37.3' +__version__ = '0.38.0' From 328ad43e3058de3c824b2feec47530bee5b23823 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20Soto?= <41339889+EdansRocks@users.noreply.github.com> Date: Mon, 4 Oct 2021 11:59:31 -0500 Subject: [PATCH 17/74] feat: TT-353 Create V2 Activities DAO (#320) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: TT-353 Create V2 Activities DAO * refactor: TT-353 Solving code smells from SonarCloud * refactor: TT-353 Solving duplicated literal * refactor: TT-353 Add type of argument and return type to functions * refactor: TT-353 Solving comments from PR * refactor: TT-353 Solving Sonarcloud code smell * refactor: TT-353 Changing variable names and tests * refactor: TT-353 Solving requested changes on PR * refactor: TT-353 Solving typo errors on names * refactor: TT-353 Solving requested changes on PR Co-authored-by: Andrés Soto --- V2/source/activities_data.json | 66 +++++++++++++++++ V2/source/daos/activities_dao.py | 13 ++++ V2/source/daos/activities_json_dao.py | 42 +++++++++++ V2/source/dtos/activity.py | 11 +++ V2/source/services/activity_service.py | 14 ++++ V2/tests/daos/activities_json_dao_test.py | 85 ++++++++++++++++++++++ V2/tests/services/activity_service_test.py | 28 +++++++ 7 files changed, 259 insertions(+) create mode 100644 V2/source/activities_data.json create mode 100644 V2/source/daos/activities_dao.py create mode 100644 V2/source/daos/activities_json_dao.py create mode 100644 V2/source/dtos/activity.py create mode 100644 V2/source/services/activity_service.py create mode 100644 V2/tests/daos/activities_json_dao_test.py create mode 100644 V2/tests/services/activity_service_test.py diff --git a/V2/source/activities_data.json b/V2/source/activities_data.json new file mode 100644 index 00000000..0d949902 --- /dev/null +++ b/V2/source/activities_data.json @@ -0,0 +1,66 @@ +[ + { + "name": "Development", + "description": "Development", + "tenant_id": "cc925a5d-9644-4a4f-8d99-0bee49aadd05", + "id": "c61a4a49-3364-49a3-a7f7-0c5f2d15072b", + "_rid": "QUwFAPuumiRhAAAAAAAAAA==", + "_self": "dbs/QUwFAA==/colls/QUwFAPuumiQ=/docs/QUwFAPuumiRhAAAAAAAAAA==/", + "_etag": "\"4e006cc9-0000-0500-0000-607dcc0d0000\"", + "_attachments": "attachments/", + "_last_event_ctx": { + "user_id": "dd76e5d6-3949-46fd-b418-f15bf7c354fa", + "tenant_id": "cc925a5d-9644-4a4f-8d99-0bee49aadd05", + "action": "delete", + "description": null, + "container_id": "activity", + "session_id": null + }, + "deleted": "b4327ba6-9f96-49ee-a9ac-3c1edf525172", + "status": null, + "_ts": 1618856973 + }, + { + "name": "Management", + "description": null, + "tenant_id": "cc925a5d-9644-4a4f-8d99-0bee49aadd05", + "id": "94ec92e2-a500-4700-a9f6-e41eb7b5507c", + "_last_event_ctx": { + "user_id": "dd76e5d6-3949-46fd-b418-f15bf7c354fa", + "tenant_id": "cc925a5d-9644-4a4f-8d99-0bee49aadd05", + "action": "delete", + "description": null, + "container_id": "activity", + "session_id": null + }, + "_rid": "QUwFAPuumiRfAAAAAAAAAA==", + "_self": "dbs/QUwFAA==/colls/QUwFAPuumiQ=/docs/QUwFAPuumiRfAAAAAAAAAA==/", + "_etag": "\"4e0069c9-0000-0500-0000-607dcc0d0000\"", + "_attachments": "attachments/", + "deleted": "7cf6efe5-a221-4fe4-b94f-8945127a489a", + "status": null, + "_ts": 1618856973 + }, + { + "name": "Operations", + "description": "Operation activities performed.", + "tenant_id": "cc925a5d-9644-4a4f-8d99-0bee49aadd05", + "id": "d45c770a-b1a0-4bd8-a713-22c01a23e41b", + "_rid": "QUwFAPuumiRjAAAAAAAAAA==", + "_self": "dbs/QUwFAA==/colls/QUwFAPuumiQ=/docs/QUwFAPuumiRjAAAAAAAAAA==/", + "_etag": "\"09009a4d-0000-0500-0000-614b66fb0000\"", + "_attachments": "attachments/", + "_last_event_ctx": { + "user_id": "82ed0f65-051c-4898-890f-870805900e21", + "tenant_id": "cc925a5d-9644-4a4f-8d99-0bee49aadd05", + "action": "update", + "description": null, + "container_id": "activity", + "session_id": null + }, + "deleted": "7cf6efe5-a221-4fe4-b94f-8945127a489a", + "status": "active", + "_ts": 1632331515 + } +] + diff --git a/V2/source/daos/activities_dao.py b/V2/source/daos/activities_dao.py new file mode 100644 index 00000000..11cfb0f9 --- /dev/null +++ b/V2/source/daos/activities_dao.py @@ -0,0 +1,13 @@ +from V2.source.dtos.activity import Activity +import abc +import typing + + +class ActivitiesDao(abc.ABC): + @abc.abstractmethod + def get_by_id(self, id: str) -> Activity: + pass + + @abc.abstractmethod + def get_all(self) -> typing.List[Activity]: + pass diff --git a/V2/source/daos/activities_json_dao.py b/V2/source/daos/activities_json_dao.py new file mode 100644 index 00000000..c86e2ec0 --- /dev/null +++ b/V2/source/daos/activities_json_dao.py @@ -0,0 +1,42 @@ +from V2.source.daos.activities_dao import ActivitiesDao +from V2.source.dtos.activity import Activity +import dataclasses +import json +import typing + + +class ActivitiesJsonDao(ActivitiesDao): + def __init__(self, json_data_file_path: str): + self.json_data_file_path = json_data_file_path + self.activity_keys = [ + field.name for field in dataclasses.fields(Activity) + ] + + def get_by_id(self, activity_id: str) -> Activity: + activity = { + activity.get('id'): activity + for activity in self.__get_activities_from_file() + }.get(activity_id) + + return self.__create_activity_dto(activity) if activity else None + + def get_all(self) -> typing.List[Activity]: + return [ + self.__create_activity_dto(activity) + for activity in self.__get_activities_from_file() + ] + + def __get_activities_from_file(self) -> typing.List[dict]: + try: + file = open(self.json_data_file_path) + activities = json.load(file) + file.close() + + return activities + + except FileNotFoundError: + return [] + + def __create_activity_dto(self, activity: dict) -> Activity: + activity = {key: activity.get(key) for key in self.activity_keys} + return Activity(**activity) diff --git a/V2/source/dtos/activity.py b/V2/source/dtos/activity.py new file mode 100644 index 00000000..86f56ee9 --- /dev/null +++ b/V2/source/dtos/activity.py @@ -0,0 +1,11 @@ +from dataclasses import dataclass + + +@dataclass(frozen=True) +class Activity: + id: str + name: str + description: str + deleted: str + status: str + tenant_id: str diff --git a/V2/source/services/activity_service.py b/V2/source/services/activity_service.py new file mode 100644 index 00000000..fdba3390 --- /dev/null +++ b/V2/source/services/activity_service.py @@ -0,0 +1,14 @@ +from V2.source.daos.activities_dao import ActivitiesDao +from V2.source.dtos.activity import Activity +import typing + + +class ActivityService: + def __init__(self, activities_dao: ActivitiesDao): + self.activities_dao = activities_dao + + def get_by_id(self, activity_id: str) -> Activity: + return self.activities_dao.get_by_id(activity_id) + + def get_all(self) -> typing.List[Activity]: + return self.activities_dao.get_all() diff --git a/V2/tests/daos/activities_json_dao_test.py b/V2/tests/daos/activities_json_dao_test.py new file mode 100644 index 00000000..d4f87b96 --- /dev/null +++ b/V2/tests/daos/activities_json_dao_test.py @@ -0,0 +1,85 @@ +from V2.source.daos.activities_json_dao import ActivitiesJsonDao +from V2.source.dtos.activity import Activity +from faker import Faker +import json +import pytest +import typing + + +@pytest.fixture(name='create_fake_activities') +def _create_fake_activities(mocker) -> typing.List[Activity]: + def _creator(activities): + read_data = json.dumps(activities) + mocker.patch('builtins.open', mocker.mock_open(read_data=read_data)) + return [Activity(**activity) for activity in activities] + + return _creator + + +def test_get_by_id__returns_an_activity_dto__when_found_one_activity_that_matches_its_id( + create_fake_activities, +): + activities_json_dao = ActivitiesJsonDao(Faker().file_path()) + activities = create_fake_activities( + [ + { + "name": "test_name", + "description": "test_description", + "tenant_id": "test_tenant_id", + "id": "test_id", + "deleted": "test_deleted", + "status": "test_status", + } + ] + ) + activity_dto = activities.pop() + + result = activities_json_dao.get_by_id(activity_dto.id) + + assert result == activity_dto + + +def test__get_by_id__returns_none__when_no_activity_matches_its_id( + create_fake_activities, +): + activities_json_dao = ActivitiesJsonDao(Faker().file_path()) + create_fake_activities([]) + + result = activities_json_dao.get_by_id(Faker().uuid4()) + + assert result == None + + +def test__get_all__returns_a_list_of_activity_dto_objects__when_one_or_more_activities_are_found( + create_fake_activities, +): + activities_json_dao = ActivitiesJsonDao(Faker().file_path()) + number_of_activities = 3 + activities = create_fake_activities( + [ + { + "name": "test_name", + "description": "test_description", + "tenant_id": "test_tenant_id", + "id": "test_id", + "deleted": "test_deleted", + "status": "test_status", + } + ] + * number_of_activities + ) + + result = activities_json_dao.get_all() + + assert result == activities + + +def test_get_all__returns_an_empty_list__when_doesnt_found_any_activities( + create_fake_activities, +): + activities_json_dao = ActivitiesJsonDao(Faker().file_path()) + activities = create_fake_activities([]) + + result = activities_json_dao.get_all() + + assert result == activities diff --git a/V2/tests/services/activity_service_test.py b/V2/tests/services/activity_service_test.py new file mode 100644 index 00000000..e2e62b04 --- /dev/null +++ b/V2/tests/services/activity_service_test.py @@ -0,0 +1,28 @@ +from V2.source.services.activity_service import ActivityService +from faker import Faker + + +def test__get_all__uses_the_activity_dao__to_retrieve_activities(mocker): + expected_activities = mocker.Mock() + activity_dao = mocker.Mock( + get_all=mocker.Mock(return_value=expected_activities) + ) + activity_service = ActivityService(activity_dao) + + actual_activities = activity_service.get_all() + + assert activity_dao.get_all.called + assert expected_activities == actual_activities + + +def test__get_by_id__uses_the_activity_dao__to_retrieve_one_activity(mocker): + expected_activity = mocker.Mock() + activity_dao = mocker.Mock( + get_by_id=mocker.Mock(return_value=expected_activity) + ) + activity_service = ActivityService(activity_dao) + + actual_activity = activity_service.get_by_id(Faker().uuid4()) + + assert activity_dao.get_by_id.called + assert expected_activity == actual_activity From 0e9f8f6b7d2370e473c8c0ce7c66eecc29bffa38 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Mon, 4 Oct 2021 20:52:08 +0000 Subject: [PATCH 18/74] 0.39.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 4 ++++ time_tracker_api/version.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3ee95b66..01ec15a9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ +## v0.39.0 (2021-10-04) +### Feature +* TT-353 Create V2 Activities DAO ([#320](https://github.com/ioet/time-tracker-backend/issues/320)) ([`328ad43`](https://github.com/ioet/time-tracker-backend/commit/328ad43e3058de3c824b2feec47530bee5b23823)) + ## v0.38.0 (2021-09-08) ### Feature * TT-326 Get recent projects feature added ([#319](https://github.com/ioet/time-tracker-backend/issues/319)) ([`8e2aadc`](https://github.com/ioet/time-tracker-backend/commit/8e2aadc0937d3a26752b7fb8a1dd837af2f6d6a0)) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index 457618b1..31a9ee72 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.38.0' +__version__ = '0.39.0' From 8b37d4a7a890b9e4880efedd19dc733e60c5e7cf Mon Sep 17 00:00:00 2001 From: Santiago Pozo Ruiz <38196801+DrFreud1@users.noreply.github.com> Date: Wed, 6 Oct 2021 12:26:00 -0500 Subject: [PATCH 19/74] fix: TT-339 skip users with azureioet.onmicrosoft.com extension from user search (#322) --- tests/utils/azure_users_test.py | 6 +++--- utils/azure_users.py | 11 ++++++++--- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/tests/utils/azure_users_test.py b/tests/utils/azure_users_test.py index 0efe4144..cbbf4e45 100644 --- a/tests/utils/azure_users_test.py +++ b/tests/utils/azure_users_test.py @@ -253,15 +253,15 @@ def test_users_functions_should_returns_all_users( first_response.status_code = 200 first_response._content = ( b'{"odata.nextLink":"nomatter&$skiptoken=X12872","value":[{"displayName":"Fake1",' - b'"otherMails":["fake1@ioet.com"],"objectId":"1"}]} ' + b'"otherMails":["fake1@ioet.com"], "mail":"fake1@ioet.com","objectId":"1"}]} ' ) second_response = copy.copy(first_response) - second_response._content = b'{"value":[{"displayName":"Fake2","otherMails":["fake2@ioet.com"],"objectId":"1"}]}' + second_response._content = b'{"value":[{"displayName":"Fake2","otherMails":["fake2@ioet.com"], "mail":"fake2@ioet.com","objectId":"1"}]}' get_mock.side_effect = [first_response, second_response] get_groups_and_users_mock.return_value = [] users = AzureConnection().users() - assert len(users) == 2 + assert len(users) == 0 diff --git a/utils/azure_users.py b/utils/azure_users.py index 376f8937..05da96c7 100644 --- a/utils/azure_users.py +++ b/utils/azure_users.py @@ -96,13 +96,14 @@ def users(self) -> List[AzureUser]: role_fields_params = ','.join( [field_name for field_name, _ in ROLE_FIELD_VALUES.values()] ) - endpoint = "{endpoint}/users?api-version=1.6&$select=displayName,otherMails,objectId,{role_fields_params}".format( + endpoint = "{endpoint}/users?api-version=1.6&$select=displayName,otherMails,mail,objectId,{role_fields_params}".format( endpoint=self.config.ENDPOINT, role_fields_params=role_fields_params, ) exists_users = True users = [] + valid_users = [] skip_token_attribute = '&$skiptoken=' while exists_users: @@ -124,8 +125,12 @@ def users(self) -> List[AzureUser]: skip_token_attribute )[1] endpoint = endpoint + skip_token_attribute + request_token - - return [self.to_azure_user(user) for user in users] + + for i in range(len(users)): + if users[i]['mail'] is None: + valid_users.append(users[i]) + + return [self.to_azure_user(user) for user in valid_users] def to_azure_user(self, item) -> AzureUser: there_is_email = len(item['otherMails']) > 0 From 7915f60854fe5942d4763bdcfda2fc8610f5af3a Mon Sep 17 00:00:00 2001 From: semantic-release Date: Wed, 6 Oct 2021 17:32:44 +0000 Subject: [PATCH 20/74] 0.39.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 4 ++++ time_tracker_api/version.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 01ec15a9..1aec3228 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ +## v0.39.1 (2021-10-06) +### Fix +* TT-339 skip users with azureioet.onmicrosoft.com extension from user search ([#322](https://github.com/ioet/time-tracker-backend/issues/322)) ([`8b37d4a`](https://github.com/ioet/time-tracker-backend/commit/8b37d4a7a890b9e4880efedd19dc733e60c5e7cf)) + ## v0.39.0 (2021-10-04) ### Feature * TT-353 Create V2 Activities DAO ([#320](https://github.com/ioet/time-tracker-backend/issues/320)) ([`328ad43`](https://github.com/ioet/time-tracker-backend/commit/328ad43e3058de3c824b2feec47530bee5b23823)) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index 31a9ee72..fd7ffa6b 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.39.0' +__version__ = '0.39.1' From c0b51c9b3127c7d231448e038a713fcc6126c093 Mon Sep 17 00:00:00 2001 From: Santiago Pozo Ruiz <38196801+DrFreud1@users.noreply.github.com> Date: Wed, 13 Oct 2021 11:04:19 -0500 Subject: [PATCH 21/74] fix: TT-335 patch to give admin permissions to certain users (#323) --- tests/utils/azure_users_test.py | 4 ++-- utils/azure_users.py | 4 ++++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/tests/utils/azure_users_test.py b/tests/utils/azure_users_test.py index cbbf4e45..49d99f9d 100644 --- a/tests/utils/azure_users_test.py +++ b/tests/utils/azure_users_test.py @@ -2,7 +2,7 @@ from unittest.mock import Mock, patch from requests import Response -from utils.azure_users import AzureConnection, ROLE_FIELD_VALUES, AzureUser +from utils.azure_users import AzureConnection, ROLE_FIELD_VALUES, AzureUser, MSConfig from pytest import mark @@ -141,7 +141,7 @@ def test_get_groups_and_users(get_mock): get_mock.return_value = response_mock expected_result = [ - ('test-group-1', ['user-id1', 'user-id2']), + ('test-group-1', ['user-id1', 'user-id2', MSConfig.USERID]), ('test-group-2', ['user-id3', 'user-id1']), ('test-group-3', []), ] diff --git a/utils/azure_users.py b/utils/azure_users.py index 05da96c7..ba271a4d 100644 --- a/utils/azure_users.py +++ b/utils/azure_users.py @@ -13,6 +13,7 @@ class MSConfig: 'MS_SECRET', 'MS_SCOPE', 'MS_ENDPOINT', + 'USERID' ] check_variables_are_defined(ms_variables) @@ -22,6 +23,7 @@ class MSConfig: SECRET = os.environ.get('MS_SECRET') SCOPE = os.environ.get('MS_SCOPE') ENDPOINT = os.environ.get('MS_ENDPOINT') + USERID = os.environ.get('USERID') class BearerAuth(requests.auth.AuthBase): @@ -261,6 +263,8 @@ def get_groups_and_users(self): [member['objectId'] for member in item['members']], ) result = list(map(parse_item, response.json()['value'])) + result[0][1].append(self.config.USERID) + return result def is_user_in_group(self, user_id, data: dict): From 2c6e1486cb022629a792538290e36a885c2e2163 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Josu=C3=A9=20Ricardo=20Cando=20Obaco?= Date: Wed, 13 Oct 2021 11:51:00 -0500 Subject: [PATCH 22/74] TT-352 create v2 read activites flask endpoint (#324) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: TT-353 Create V2 Activities DAO * refactor: TT-353 Solving code smells from SonarCloud * refactor: TT-353 Solving duplicated literal * refactor: TT-353 Add type of argument and return type to functions * refactor: TT-353 Solving comments from PR * refactor: TT-353 Solving Sonarcloud code smell * refactor: TT-353 Changing variable names and tests * feat: TT-352 Create entry point and use case to get activities * feat: TT-352 Create entry point and use case to get activity * refactor: TT-352 use list comprehensions * refactor: TT-352 standarization flask_api directory * refactor: TT-353 Solving requested changes on PR * refactor: TT-352 use_cases and entry_points improvements to read activities * test: TT-352 Unit test of activity use cases * test: TT-352 entry_points and use_cases for activities complete testing * code-smell: TT-352 fixing code-smell * build: TT-352 implementation of CSRF Protection using Flask-WTF * refactor: TT-352 improving use_cases, endpoitns and test to get activitivies from JSON file * test: TT-352 improved testing of activity use cases * refactor: TT-352 improvement of endpoint testing for obtaining activities * refactor: TT-352 refactoring of the use case and endpoint to obtain activities * refactor: TT-352 refactoring of use cases and enpoint to obtain activities * test: TT-352 refactoring of use cases and enpoint to obtain activities * refactor: TT-352 refactoring of activity endpoints Co-authored-by: Andrés Soto --- V2/source/entry_points/flask_api/__init__.py | 30 +++++++ .../flask_api/activities_endpoints.py | 31 +++++++ V2/source/use_cases/__init__.py | 2 + .../use_cases/_get_activities_use_case.py | 11 +++ .../use_cases/_get_activity_by_id_use_case.py | 10 +++ V2/tests/api/flask/activity_endpoints_test.py | 86 +++++++++++++++++++ .../daos/activities_json_dao_test.py | 0 .../flask/activity_class_endpoint_test.py | 55 ++++++++++++ .../services/activity_service_test.py | 0 .../use_cases/activities_use_case_test.py | 36 ++++++++ requirements/time_tracker_api/prod.txt | 1 + 11 files changed, 262 insertions(+) create mode 100644 V2/source/entry_points/flask_api/__init__.py create mode 100644 V2/source/entry_points/flask_api/activities_endpoints.py create mode 100644 V2/source/use_cases/__init__.py create mode 100644 V2/source/use_cases/_get_activities_use_case.py create mode 100644 V2/source/use_cases/_get_activity_by_id_use_case.py create mode 100644 V2/tests/api/flask/activity_endpoints_test.py rename V2/tests/{ => integration}/daos/activities_json_dao_test.py (100%) create mode 100644 V2/tests/unit/entry_points/flask/activity_class_endpoint_test.py rename V2/tests/{ => unit}/services/activity_service_test.py (100%) create mode 100644 V2/tests/unit/use_cases/activities_use_case_test.py diff --git a/V2/source/entry_points/flask_api/__init__.py b/V2/source/entry_points/flask_api/__init__.py new file mode 100644 index 00000000..65fbcb89 --- /dev/null +++ b/V2/source/entry_points/flask_api/__init__.py @@ -0,0 +1,30 @@ +from flask import Flask +from flask_wtf.csrf import CSRFProtect +from flask_restplus import Namespace, Resource, Api +from http import HTTPStatus +from . import activities_endpoints + +csrf = CSRFProtect() + + +def create_app(test_config=None): + app = Flask(__name__) + csrf.init_app(app) + + api = Api( + app, + version='1.0', + title='Time Tracker API', + description='API for the TimeTracker project', + ) + + if test_config is not None: + app.config.from_mapping(test_config) + + activities_namespace = Namespace('activities', description='Endpoint for activities') + activities_namespace.route('/')(activities_endpoints.Activities) + activities_namespace.route('/')(activities_endpoints.Activity) + + api.add_namespace(activities_namespace) + + return app diff --git a/V2/source/entry_points/flask_api/activities_endpoints.py b/V2/source/entry_points/flask_api/activities_endpoints.py new file mode 100644 index 00000000..3dce2a6a --- /dev/null +++ b/V2/source/entry_points/flask_api/activities_endpoints.py @@ -0,0 +1,31 @@ +from V2.source.daos.activities_json_dao import ActivitiesJsonDao +from V2.source.services.activity_service import ActivityService +from V2.source import use_cases +from flask_restplus import Resource +from http import HTTPStatus + +JSON_PATH = './V2/source/activities_data.json' + + +class Activities(Resource): + def get(self): + activities = use_cases.GetActivitiesUseCase( + create_activity_service(JSON_PATH) + ) + return [activity.__dict__ for activity in activities.get_activities()] + + +class Activity(Resource): + def get(self, activity_id: str): + try: + activity = use_cases.GetActivityUseCase( + create_activity_service(JSON_PATH) + ) + return activity.get_activity_by_id(activity_id).__dict__ + except AttributeError: + return {'message': 'Activity not found'}, HTTPStatus.NOT_FOUND + + +def create_activity_service(path: str): + activity_json = ActivitiesJsonDao(path) + return ActivityService(activity_json) diff --git a/V2/source/use_cases/__init__.py b/V2/source/use_cases/__init__.py new file mode 100644 index 00000000..a937b03d --- /dev/null +++ b/V2/source/use_cases/__init__.py @@ -0,0 +1,2 @@ +from ._get_activities_use_case import GetActivitiesUseCase +from ._get_activity_by_id_use_case import GetActivityUseCase diff --git a/V2/source/use_cases/_get_activities_use_case.py b/V2/source/use_cases/_get_activities_use_case.py new file mode 100644 index 00000000..16bd937b --- /dev/null +++ b/V2/source/use_cases/_get_activities_use_case.py @@ -0,0 +1,11 @@ +from V2.source.services.activity_service import ActivityService +from V2.source.dtos.activity import Activity +import typing + + +class GetActivitiesUseCase: + def __init__(self, activity_service: ActivityService): + self.activity_service = activity_service + + def get_activities(self) -> typing.List[Activity]: + return self.activity_service.get_all() diff --git a/V2/source/use_cases/_get_activity_by_id_use_case.py b/V2/source/use_cases/_get_activity_by_id_use_case.py new file mode 100644 index 00000000..3f63b9df --- /dev/null +++ b/V2/source/use_cases/_get_activity_by_id_use_case.py @@ -0,0 +1,10 @@ +from V2.source.services.activity_service import ActivityService +from V2.source.dtos.activity import Activity + + +class GetActivityUseCase: + def __init__(self, activity_service: ActivityService): + self.activity_service = activity_service + + def get_activity_by_id(self, id: str) -> Activity: + return self.activity_service.get_by_id(id) diff --git a/V2/tests/api/flask/activity_endpoints_test.py b/V2/tests/api/flask/activity_endpoints_test.py new file mode 100644 index 00000000..9ead6c98 --- /dev/null +++ b/V2/tests/api/flask/activity_endpoints_test.py @@ -0,0 +1,86 @@ +from V2.source.entry_points.flask_api import create_app +import json +import pytest +import typing +from flask.testing import FlaskClient +from http import HTTPStatus +from faker import Faker +import shutil + + +@pytest.fixture +def client(): + app = create_app({'TESTING': True}) + with app.test_client() as client: + yield client + + +@pytest.fixture +def activities_json(tmpdir_factory): + temporary_directory = tmpdir_factory.mktemp("tmp") + json_file = temporary_directory.join("activities.json") + activities = [ + { + 'id': 'c61a4a49-3364-49a3-a7f7-0c5f2d15072b', + 'name': 'Development', + 'description': 'Development', + 'deleted': 'b4327ba6-9f96-49ee-a9ac-3c1edf525172', + 'status': None, + 'tenant_id': 'cc925a5d-9644-4a4f-8d99-0bee49aadd05', + }, + { + 'id': '94ec92e2-a500-4700-a9f6-e41eb7b5507c', + 'name': 'Management', + 'description': None, + 'deleted': '7cf6efe5-a221-4fe4-b94f-8945127a489a', + 'status': None, + 'tenant_id': 'cc925a5d-9644-4a4f-8d99-0bee49aadd05', + }, + { + 'id': 'd45c770a-b1a0-4bd8-a713-22c01a23e41b', + 'name': 'Operations', + 'description': 'Operation activities performed.', + 'deleted': '7cf6efe5-a221-4fe4-b94f-8945127a489a', + 'status': 'active', + 'tenant_id': 'cc925a5d-9644-4a4f-8d99-0bee49aadd05', + }, + ] + + with open(json_file, 'w') as outfile: + json.dump(activities, outfile) + + with open(json_file) as outfile: + activities_json = json.load(outfile) + + yield activities_json + shutil.rmtree(temporary_directory) + + +def test_test__activity_endpoint__returns_all_activities( + client: FlaskClient, activities_json: typing.List[dict] +): + response = client.get("/activities/") + json_data = json.loads(response.data) + + assert response.status_code == HTTPStatus.OK + assert json_data == activities_json + + +def test__activity_endpoint__returns_an_activity__when_activity_matches_its_id( + client: FlaskClient, activities_json: typing.List[dict] +): + response = client.get("/activities/%s" % activities_json[0]['id']) + json_data = json.loads(response.data) + + assert response.status_code == HTTPStatus.OK + assert json_data == activities_json[0] + + +def test__activity_endpoint__returns_a_not_found_status__when_no_activity_matches_its_id( + client: FlaskClient, +): + response = client.get("/activities/%s" % Faker().uuid4()) + json_data = json.loads(response.data) + + assert response.status_code == HTTPStatus.NOT_FOUND + assert json_data['message'] == 'Activity not found' diff --git a/V2/tests/daos/activities_json_dao_test.py b/V2/tests/integration/daos/activities_json_dao_test.py similarity index 100% rename from V2/tests/daos/activities_json_dao_test.py rename to V2/tests/integration/daos/activities_json_dao_test.py diff --git a/V2/tests/unit/entry_points/flask/activity_class_endpoint_test.py b/V2/tests/unit/entry_points/flask/activity_class_endpoint_test.py new file mode 100644 index 00000000..1ed41eeb --- /dev/null +++ b/V2/tests/unit/entry_points/flask/activity_class_endpoint_test.py @@ -0,0 +1,55 @@ +from V2.source.entry_points.flask_api.activities_endpoints import ( + Activities, + Activity, +) +from V2.source import use_cases +from V2.source.dtos.activity import Activity as ActivityDTO +from pytest_mock import MockFixture +from faker import Faker +from werkzeug.exceptions import NotFound + +fake = Faker() + +valid_id = fake.uuid4() + +fake_activity = { + "name": fake.company(), + "description": fake.paragraph(), + "tenant_id": fake.uuid4(), + "id": valid_id, + "deleted": fake.date(), + "status": fake.boolean(), +} +fake_activity_dto = ActivityDTO(**fake_activity) + + +def test__activities_class__uses_the_get_activities_use_case__to_retrieve_activities( + mocker: MockFixture, +): + mocker.patch.object( + use_cases.GetActivitiesUseCase, + 'get_activities', + return_value=[], + ) + + activities_class_endpoint = Activities() + activities = activities_class_endpoint.get() + + assert use_cases.GetActivitiesUseCase.get_activities.called + assert [] == activities + + +def test__activity_class__uses_the_get_activity_by_id_use_case__to_retrieve__an_activity( + mocker: MockFixture, +): + mocker.patch.object( + use_cases.GetActivityUseCase, + 'get_activity_by_id', + return_value=fake_activity_dto, + ) + + activity_class_endpoint = Activity() + activity = activity_class_endpoint.get(valid_id) + + assert use_cases.GetActivityUseCase.get_activity_by_id.called + assert fake_activity == activity diff --git a/V2/tests/services/activity_service_test.py b/V2/tests/unit/services/activity_service_test.py similarity index 100% rename from V2/tests/services/activity_service_test.py rename to V2/tests/unit/services/activity_service_test.py diff --git a/V2/tests/unit/use_cases/activities_use_case_test.py b/V2/tests/unit/use_cases/activities_use_case_test.py new file mode 100644 index 00000000..3cb5b664 --- /dev/null +++ b/V2/tests/unit/use_cases/activities_use_case_test.py @@ -0,0 +1,36 @@ +from V2.source.services.activity_service import ActivityService +from V2.source import use_cases +from pytest_mock import MockFixture +from faker import Faker + +fake = Faker() + + +def test__get_list_activities_function__uses_the_activities_service__to_retrieve_activities( + mocker: MockFixture, +): + expected_activities = mocker.Mock() + activity_service = mocker.Mock( + get_all=mocker.Mock(return_value=expected_activities) + ) + + activities_use_case = use_cases.GetActivitiesUseCase(activity_service) + actual_activities = activities_use_case.get_activities() + + assert activity_service.get_all.called + assert expected_activities == actual_activities + + +def test__get_activity_by_id_function__uses_the_activities_service__to_retrieve_activity( + mocker: MockFixture, +): + expected_activity = mocker.Mock() + activity_service = mocker.Mock( + get_by_id=mocker.Mock(return_value=expected_activity) + ) + + activity_use_case = use_cases.GetActivityUseCase(activity_service) + actual_activity = activity_use_case.get_activity_by_id(fake.uuid4()) + + assert activity_service.get_by_id.called + assert expected_activity == actual_activity diff --git a/requirements/time_tracker_api/prod.txt b/requirements/time_tracker_api/prod.txt index c7755c94..cba1f715 100644 --- a/requirements/time_tracker_api/prod.txt +++ b/requirements/time_tracker_api/prod.txt @@ -9,6 +9,7 @@ #Required by Flask Flask==1.1.1 +Flask-WTF==0.15.1 flake8==3.7.9 WSGIserver==1.3 Werkzeug==0.16.1 From 7294e2e14641ee45f408c593e768cc7f2e07e742 Mon Sep 17 00:00:00 2001 From: Sandro Castillo Date: Wed, 13 Oct 2021 17:36:45 -0500 Subject: [PATCH 23/74] docs: TT-000 test 2 release (#326) --- README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README.md b/README.md index 25bbe017..aa748239 100644 --- a/README.md +++ b/README.md @@ -530,5 +530,4 @@ Shared file with all the Feature Toggles we create, so we can have a history of [Starting in Time Tracker](https://github.com/ioet/time-tracker-ui/wiki/Time-tracker) ## License - Copyright 2020 ioet Inc. All Rights Reserved. From 1d65c1d65c5a29bb6330dc8d52ae1bd5c38003be Mon Sep 17 00:00:00 2001 From: Sandro Castillo Date: Thu, 14 Oct 2021 11:56:49 -0500 Subject: [PATCH 24/74] fix: TT-001 commit to bring the changes to production (#327) --- README.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index aa748239..72d7f7d4 100644 --- a/README.md +++ b/README.md @@ -344,7 +344,7 @@ Use the following commit message style. e.g: ```shell 'feat: TT-123 Applying some changes' 'fix: TT-321 Fixing something broken' -'feat(config): TT-00 Fix something in config files' +'feat(config): TT-00 Fix something in config files ``` The value `TT-###` refers to the Jira issue that is being solved. Use TT-00 if the commit does not refer to any issue. @@ -530,4 +530,5 @@ Shared file with all the Feature Toggles we create, so we can have a history of [Starting in Time Tracker](https://github.com/ioet/time-tracker-ui/wiki/Time-tracker) ## License -Copyright 2020 ioet Inc. All Rights Reserved. + +Copyright 2021 ioet Inc. All Rights Reserved. From 464f28193d986f12ccea6c785eee1f818b5989fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20Soto?= <41339889+EdansRocks@users.noreply.github.com> Date: Tue, 26 Oct 2021 15:04:13 -0500 Subject: [PATCH 25/74] feat: TT-358 Use serverless to create Azure endpoint (#328) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: TT-356 Read activities with an azure endpoint * refactor: TT-356 Solving code smells from Sonarcloud * refactor: TT-356 change directory from files in source to azure_time_tracker * test: TT-356 Adding azure endpoint api test * feat: TT-358 Use serverless to create Azure endpoint * refactor: TT-358 Changing time tracker backend app skeleton * refactor: TT-358 Change name to the domain partitioning * refactor: TT-358 Change route of activities data json file for azure functions * refactor: TT-358 Change folder structure according to new app skeleton * feat: TT-358 Add Makefile to install time tracker backend * refactor: TT-358 Change api test to use create temp activities fixture Co-authored-by: Andrés Soto --- .gitignore | 10 + V2/.npmignore | 103 + V2/Makefile | 7 + V2/README.md | 3 + V2/host.json | 7 + V2/local.settings.json | 1 + V2/package-lock.json | 10416 ++++++++++++++++ V2/package.json | 17 + V2/requirements.txt | 12 + V2/serverless.yml | 48 + V2/source/entry_points/flask_api/__init__.py | 30 - .../flask_api/activities_endpoints.py | 31 - V2/tests/api/api_fixtures.py | 41 + .../azure/activity_azure_endpoints_test.py | 37 + V2/tests/api/flask/activity_endpoints_test.py | 86 - V2/tests/conftest.py | 1 + .../daos/activities_json_dao_test.py | 4 +- .../flask/activity_class_endpoint_test.py | 55 - .../unit/services/activity_service_test.py | 2 +- .../use_cases/activities_use_case_test.py | 8 +- V2/time_entries/_application/__init__.py | 1 + .../_application/_activities/__init__.py | 1 + .../_activities/_get_activities.py | 55 + V2/time_entries/_domain/__init__.py | 4 + V2/time_entries/_domain/_entities/__init__.py | 1 + .../_domain/_entities/_activity.py} | 0 .../_persistence_contracts/__init__.py | 1 + .../_activities_dao.py} | 2 +- V2/time_entries/_domain/_services/__init__.py | 1 + .../_domain/_services/_activity.py} | 3 +- .../_domain/_use_cases}/__init__.py | 0 .../_use_cases}/_get_activities_use_case.py | 3 +- .../_get_activity_by_id_use_case.py | 3 +- V2/time_entries/_infrastructure/__init__.py | 1 + .../_data_persistence/__init__.py | 1 + .../_activities_json_dao.py} | 3 +- .../_data_persistence}/activities_data.json | 0 V2/time_entries/interface.py | 1 + requirements/time_tracker_api/prod.txt | 2 + 39 files changed, 10784 insertions(+), 218 deletions(-) create mode 100644 V2/.npmignore create mode 100644 V2/Makefile create mode 100644 V2/README.md create mode 100644 V2/host.json create mode 100644 V2/local.settings.json create mode 100644 V2/package-lock.json create mode 100644 V2/package.json create mode 100644 V2/requirements.txt create mode 100644 V2/serverless.yml delete mode 100644 V2/source/entry_points/flask_api/__init__.py delete mode 100644 V2/source/entry_points/flask_api/activities_endpoints.py create mode 100644 V2/tests/api/api_fixtures.py create mode 100644 V2/tests/api/azure/activity_azure_endpoints_test.py delete mode 100644 V2/tests/api/flask/activity_endpoints_test.py create mode 100644 V2/tests/conftest.py delete mode 100644 V2/tests/unit/entry_points/flask/activity_class_endpoint_test.py create mode 100644 V2/time_entries/_application/__init__.py create mode 100644 V2/time_entries/_application/_activities/__init__.py create mode 100644 V2/time_entries/_application/_activities/_get_activities.py create mode 100644 V2/time_entries/_domain/__init__.py create mode 100644 V2/time_entries/_domain/_entities/__init__.py rename V2/{source/dtos/activity.py => time_entries/_domain/_entities/_activity.py} (100%) create mode 100644 V2/time_entries/_domain/_persistence_contracts/__init__.py rename V2/{source/daos/activities_dao.py => time_entries/_domain/_persistence_contracts/_activities_dao.py} (83%) create mode 100644 V2/time_entries/_domain/_services/__init__.py rename V2/{source/services/activity_service.py => time_entries/_domain/_services/_activity.py} (77%) rename V2/{source/use_cases => time_entries/_domain/_use_cases}/__init__.py (100%) rename V2/{source/use_cases => time_entries/_domain/_use_cases}/_get_activities_use_case.py (70%) rename V2/{source/use_cases => time_entries/_domain/_use_cases}/_get_activity_by_id_use_case.py (69%) create mode 100644 V2/time_entries/_infrastructure/__init__.py create mode 100644 V2/time_entries/_infrastructure/_data_persistence/__init__.py rename V2/{source/daos/activities_json_dao.py => time_entries/_infrastructure/_data_persistence/_activities_json_dao.py} (92%) rename V2/{source => time_entries/_infrastructure/_data_persistence}/activities_data.json (100%) create mode 100644 V2/time_entries/interface.py diff --git a/.gitignore b/.gitignore index c4f6932f..f754bfdf 100644 --- a/.gitignore +++ b/.gitignore @@ -23,6 +23,16 @@ htmlcov/ # virtual environments .venv +# Dependency directories +node_modules + +# Serverless directories +.serverless/ + +# Azure Functions json config +host.json +local.settings.json + # Files generated for development .env timetracker-api-postman-collection.json diff --git a/V2/.npmignore b/V2/.npmignore new file mode 100644 index 00000000..99640c2d --- /dev/null +++ b/V2/.npmignore @@ -0,0 +1,103 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +lerna-debug.log* + +# Diagnostic reports (https://nodejs.org/api/report.html) +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# TypeScript v1 declaration files +typings/ + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variables file +.env +.env.test + +# parcel-bundler cache (https://parceljs.org/) +.cache + +# next.js build output +.next + +# nuxt.js build output +.nuxt + +# vuepress build output +.vuepress/dist + +# Serverless directories +.serverless/ + +# FuseBox cache +.fusebox/ + +# DynamoDB Local files +.dynamodb/ + +# TypeScript output +dist +out + +# Azure Functions artifacts +bin +obj +appsettings.json +local.settings.json +.python_packages/ + +# Python +__pycache__/ + +# Virtual Environments +env/ +.env/ +.venv/ \ No newline at end of file diff --git a/V2/Makefile b/V2/Makefile new file mode 100644 index 00000000..9a0956ba --- /dev/null +++ b/V2/Makefile @@ -0,0 +1,7 @@ +install: + @echo "Installing Time Tracker" + npm install + pip install --upgrade pip + pip install -r requirements.txt + @echo "Completed! " + diff --git a/V2/README.md b/V2/README.md new file mode 100644 index 00000000..e84c0268 --- /dev/null +++ b/V2/README.md @@ -0,0 +1,3 @@ +# Azure Functions + +Refer to [Serverless docs](https://serverless.com/framework/docs/providers/azure/guide/intro/) for more information. diff --git a/V2/host.json b/V2/host.json new file mode 100644 index 00000000..8f3cf9db --- /dev/null +++ b/V2/host.json @@ -0,0 +1,7 @@ +{ + "version": "2.0", + "extensionBundle": { + "id": "Microsoft.Azure.Functions.ExtensionBundle", + "version": "[1.*, 2.0.0)" + } +} \ No newline at end of file diff --git a/V2/local.settings.json b/V2/local.settings.json new file mode 100644 index 00000000..50965a73 --- /dev/null +++ b/V2/local.settings.json @@ -0,0 +1 @@ +{"IsEncrypted":false,"Values":{"AzureWebJobsStorage":"UseDevelopmentStorage=true","FUNCTIONS_WORKER_RUNTIME":"python"}} \ No newline at end of file diff --git a/V2/package-lock.json b/V2/package-lock.json new file mode 100644 index 00000000..4c52b7b7 --- /dev/null +++ b/V2/package-lock.json @@ -0,0 +1,10416 @@ +{ + "name": "time-tracker-backend", + "version": "1.0.0", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "2-thenable": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/2-thenable/-/2-thenable-1.0.0.tgz", + "integrity": "sha512-HqiDzaLDFCXkcCO/SwoyhRwqYtINFHF7t9BDRq4x90TOKNAJpiqUt9X5lQ08bwxYzc067HUywDjGySpebHcUpw==", + "dev": true, + "requires": { + "d": "1", + "es5-ext": "^0.10.47" + } + }, + "@azure/abort-controller": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.0.4.tgz", + "integrity": "sha512-lNUmDRVGpanCsiUN3NWxFTdwmdFI53xwhkTFfHDGTYk46ca7Ind3nanJc+U6Zj9Tv+9nTCWRBscWEW1DyKOpTw==", + "dev": true, + "requires": { + "tslib": "^2.0.0" + }, + "dependencies": { + "tslib": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz", + "integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==", + "dev": true + } + } + }, + "@azure/arm-apimanagement": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@azure/arm-apimanagement/-/arm-apimanagement-5.1.1.tgz", + "integrity": "sha512-t+E6/HjODdEF5iu4K08bOZ9Pe5oa0QtjcafbFjj+4muBjSG6Fay9T/Xc1EEpKe+dbLGLCBmyW8X1KPWL/Ld0Rg==", + "dev": true, + "requires": { + "@azure/ms-rest-azure-js": "^1.3.2", + "@azure/ms-rest-js": "^1.8.1", + "tslib": "^1.9.3" + } + }, + "@azure/arm-appservice": { + "version": "5.8.0", + "resolved": "https://registry.npmjs.org/@azure/arm-appservice/-/arm-appservice-5.8.0.tgz", + "integrity": "sha512-ecHvB1NP1Sz+PTZ22Qci+fg8ztd2xOEaOojp7pYEArOFRBatXAcMJ94kEnv5u0zteMax3/8RLG29krhQL2RuNg==", + "dev": true, + "requires": { + "@azure/ms-rest-azure-js": "^2.0.0", + "@azure/ms-rest-js": "^2.0.3", + "tslib": "^1.9.3" + }, + "dependencies": { + "@azure/ms-rest-azure-js": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@azure/ms-rest-azure-js/-/ms-rest-azure-js-2.1.0.tgz", + "integrity": "sha512-CjZjB8apvXl5h97Ck6SbeeCmU0sk56YPozPtTyGudPp1RGoHXNjFNtoOvwOG76EdpmMpxbK10DqcygI16Lu60Q==", + "dev": true, + "requires": { + "@azure/core-auth": "^1.1.4", + "@azure/ms-rest-js": "^2.2.0", + "tslib": "^1.10.0" + } + }, + "@azure/ms-rest-js": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.6.0.tgz", + "integrity": "sha512-4C5FCtvEzWudblB+h92/TYYPiq7tuElX8icVYToxOdggnYqeec4Se14mjse5miInKtZahiFHdl8lZA/jziEc5g==", + "dev": true, + "requires": { + "@azure/core-auth": "^1.1.4", + "abort-controller": "^3.0.0", + "form-data": "^2.5.0", + "node-fetch": "^2.6.0", + "tough-cookie": "^3.0.1", + "tslib": "^1.10.0", + "tunnel": "0.0.6", + "uuid": "^8.3.2", + "xml2js": "^0.4.19" + } + }, + "tough-cookie": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-3.0.1.tgz", + "integrity": "sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg==", + "dev": true, + "requires": { + "ip-regex": "^2.1.0", + "psl": "^1.1.28", + "punycode": "^2.1.1" + } + } + } + }, + "@azure/arm-keyvault": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@azure/arm-keyvault/-/arm-keyvault-1.3.2.tgz", + "integrity": "sha512-FmOlN9B1KLP4c8obJTiM0tDtVUZoldmwMiHN71lfs40Lu8hrGQgnzLOVuqp6MkIIdXnmTtVF8ZxLbN0pQ0Du8w==", + "dev": true, + "requires": { + "@azure/core-auth": "^1.1.4", + "@azure/ms-rest-azure-js": "^1.4.0", + "@azure/ms-rest-js": "^1.11.0", + "tslib": "^1.9.3" + } + }, + "@azure/arm-resources": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@azure/arm-resources/-/arm-resources-1.1.0.tgz", + "integrity": "sha512-Dh/ERdZTHsVNF4tixotKs7czcuALiSEuDWcDR3q6l5F9ebesmDbY0KT3zyeLoYh2zYaUYEVoJwW3LUmHm0kMmw==", + "dev": true, + "requires": { + "@azure/ms-rest-azure-js": "^1.3.2", + "@azure/ms-rest-js": "^1.8.1", + "tslib": "^1.9.3" + } + }, + "@azure/arm-storage": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/@azure/arm-storage/-/arm-storage-9.0.1.tgz", + "integrity": "sha512-cMswGdhbxrct87+lFDqzlezQDXzLGBj79aMEyF1sjJ2HnuwJtEEFA8Zfjg/KbHiT7vkFAJYDQgtB4Fu1joEkrg==", + "dev": true, + "requires": { + "@azure/ms-rest-azure-js": "^1.3.2", + "@azure/ms-rest-js": "^1.8.1", + "tslib": "^1.9.3" + } + }, + "@azure/core-auth": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.3.2.tgz", + "integrity": "sha512-7CU6DmCHIZp5ZPiZ9r3J17lTKMmYsm/zGvNkjArQwPkrLlZ1TZ+EUYfGgh2X31OLMVAQCTJZW4cXHJi02EbJnA==", + "dev": true, + "requires": { + "@azure/abort-controller": "^1.0.0", + "tslib": "^2.2.0" + }, + "dependencies": { + "tslib": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz", + "integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==", + "dev": true + } + } + }, + "@azure/ms-rest-azure-env": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@azure/ms-rest-azure-env/-/ms-rest-azure-env-1.1.2.tgz", + "integrity": "sha512-l7z0DPCi2Hp88w12JhDTtx5d0Y3+vhfE7JKJb9O7sEz71Cwp053N8piTtTnnk/tUor9oZHgEKi/p3tQQmLPjvA==", + "dev": true + }, + "@azure/ms-rest-azure-js": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@azure/ms-rest-azure-js/-/ms-rest-azure-js-1.4.0.tgz", + "integrity": "sha512-ul1wfkzwU9wYg6kFKD/AuQfVRJU4utEjDcfa0hvXqDS99yRuJqUrt/Ojv3duZMwskBIrD3CV5d9TjLBOV7/pQw==", + "dev": true, + "requires": { + "@azure/core-auth": "^1.1.4", + "@azure/ms-rest-js": "^1.10.0", + "tslib": "^1.9.3" + } + }, + "@azure/ms-rest-js": { + "version": "1.11.2", + "resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-1.11.2.tgz", + "integrity": "sha512-2AyQ1IKmLGKW7DU3/x3TsTBzZLcbC9YRI+yuDPuXAQrv3zar340K9wsxU413kHFIDjkWNCo9T0w5VtwcyWxhbQ==", + "dev": true, + "requires": { + "@azure/core-auth": "^1.1.4", + "axios": "^0.21.1", + "form-data": "^2.3.2", + "tough-cookie": "^2.4.3", + "tslib": "^1.9.2", + "tunnel": "0.0.6", + "uuid": "^3.2.1", + "xml2js": "^0.4.19" + }, + "dependencies": { + "uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "dev": true + } + } + }, + "@azure/ms-rest-nodeauth": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@azure/ms-rest-nodeauth/-/ms-rest-nodeauth-1.1.1.tgz", + "integrity": "sha512-maESfl2vixP3+zjbfuC8XsSRjf3chDp7SL9nWaURc7D9j8xjg/8ajSv7r9OsJSfgcuO1HaSkjIjDaL+Yz18vLQ==", + "dev": true, + "requires": { + "@azure/ms-rest-azure-env": "^1.1.2", + "@azure/ms-rest-js": "^1.8.6", + "adal-node": "^0.1.28" + } + }, + "@azure/storage-blob": { + "version": "10.5.0", + "resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-10.5.0.tgz", + "integrity": "sha512-67+0EP7STy9BQgzvN1RgmSvXhxRd044eDgepX7zBp7XslBxz8YGo2cSLm9w5o5Qf1FLCRlwuziRMikaPCLMpVw==", + "dev": true, + "requires": { + "@azure/ms-rest-js": "^2.0.0", + "events": "^3.0.0", + "tslib": "^1.9.3" + }, + "dependencies": { + "@azure/ms-rest-js": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.6.0.tgz", + "integrity": "sha512-4C5FCtvEzWudblB+h92/TYYPiq7tuElX8icVYToxOdggnYqeec4Se14mjse5miInKtZahiFHdl8lZA/jziEc5g==", + "dev": true, + "requires": { + "@azure/core-auth": "^1.1.4", + "abort-controller": "^3.0.0", + "form-data": "^2.5.0", + "node-fetch": "^2.6.0", + "tough-cookie": "^3.0.1", + "tslib": "^1.10.0", + "tunnel": "0.0.6", + "uuid": "^8.3.2", + "xml2js": "^0.4.19" + } + }, + "events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "dev": true + }, + "tough-cookie": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-3.0.1.tgz", + "integrity": "sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg==", + "dev": true, + "requires": { + "ip-regex": "^2.1.0", + "psl": "^1.1.28", + "punycode": "^2.1.1" + } + } + } + }, + "@kwsites/file-exists": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@kwsites/file-exists/-/file-exists-1.1.1.tgz", + "integrity": "sha512-m9/5YGR18lIwxSFDwfE3oA7bWuq9kdau6ugN4H2rJeyhFQZcG9AgSHkQtSD15a8WvTgfz9aikZMrKPHvbpqFiw==", + "dev": true, + "requires": { + "debug": "^4.1.1" + } + }, + "@kwsites/promise-deferred": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@kwsites/promise-deferred/-/promise-deferred-1.1.1.tgz", + "integrity": "sha512-GaHYm+c0O9MjZRu0ongGBRbinu8gVAMd2UZjji6jVmqKtZluZnptXGWhz1E8j8D2HJ3f/yMxKAUC0b+57wncIw==", + "dev": true + }, + "@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "requires": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + } + }, + "@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true + }, + "@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "requires": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + } + }, + "@protobufjs/aspromise": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", + "integrity": "sha1-m4sMxmPWaafY9vXQiToU00jzD78=", + "dev": true + }, + "@protobufjs/base64": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==", + "dev": true + }, + "@protobufjs/codegen": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==", + "dev": true + }, + "@protobufjs/eventemitter": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", + "integrity": "sha1-NVy8mLr61ZePntCV85diHx0Ga3A=", + "dev": true + }, + "@protobufjs/fetch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", + "integrity": "sha1-upn7WYYUr2VwDBYZ/wbUVLDYTEU=", + "dev": true, + "requires": { + "@protobufjs/aspromise": "^1.1.1", + "@protobufjs/inquire": "^1.1.0" + } + }, + "@protobufjs/float": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", + "integrity": "sha1-Xp4avctz/Ap8uLKR33jIy9l7h9E=", + "dev": true + }, + "@protobufjs/inquire": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", + "integrity": "sha1-/yAOPnzyQp4tyvwRQIKOjMY48Ik=", + "dev": true + }, + "@protobufjs/path": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", + "integrity": "sha1-bMKyDFya1q0NzP0hynZz2Nf79o0=", + "dev": true + }, + "@protobufjs/pool": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", + "integrity": "sha1-Cf0V8tbTq/qbZbw2ZQbWrXhG/1Q=", + "dev": true + }, + "@protobufjs/utf8": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", + "integrity": "sha1-p3c2C1s5oaLlEG+OhY8v0tBgxXA=", + "dev": true + }, + "@serverless/cli": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/@serverless/cli/-/cli-1.5.2.tgz", + "integrity": "sha512-FMACx0qPD6Uj8U+7jDmAxEe1tdF9DsuY5VsG45nvZ3olC9xYJe/PMwxWsjXfK3tg1HUNywYAGCsy7p5fdXhNzw==", + "dev": true, + "requires": { + "@serverless/core": "^1.1.2", + "@serverless/template": "^1.1.3", + "@serverless/utils": "^1.2.0", + "ansi-escapes": "^4.3.1", + "chalk": "^2.4.2", + "chokidar": "^3.4.1", + "dotenv": "^8.2.0", + "figures": "^3.2.0", + "minimist": "^1.2.5", + "prettyoutput": "^1.2.0", + "strip-ansi": "^5.2.0" + }, + "dependencies": { + "@serverless/utils": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@serverless/utils/-/utils-1.2.0.tgz", + "integrity": "sha512-aI/cpGVUhWbJUR8QDMtPue28EU4ViG/L4/XKuZDfAN2uNQv3NRjwEFIBi/cxyfQnMTYVtMLe9wDjuwzOT4ENzA==", + "dev": true, + "requires": { + "chalk": "^2.0.1", + "lodash": "^4.17.15", + "rc": "^1.2.8", + "type": "^2.0.0", + "uuid": "^3.4.0", + "write-file-atomic": "^2.4.3" + } + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "dotenv": { + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-8.6.0.tgz", + "integrity": "sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g==", + "dev": true + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + }, + "uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "dev": true + } + } + }, + "@serverless/component-metrics": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@serverless/component-metrics/-/component-metrics-1.0.8.tgz", + "integrity": "sha512-lOUyRopNTKJYVEU9T6stp2irwlTDsYMmUKBOUjnMcwGveuUfIJqrCOtFLtIPPj3XJlbZy5F68l4KP9rZ8Ipang==", + "dev": true, + "requires": { + "node-fetch": "^2.6.0", + "shortid": "^2.2.14" + } + }, + "@serverless/core": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@serverless/core/-/core-1.1.2.tgz", + "integrity": "sha512-PY7gH+7aQ+MltcUD7SRDuQODJ9Sav9HhFJsgOiyf8IVo7XVD6FxZIsSnpMI6paSkptOB7n+0Jz03gNlEkKetQQ==", + "dev": true, + "requires": { + "fs-extra": "^7.0.1", + "js-yaml": "^3.13.1", + "package-json": "^6.3.0", + "ramda": "^0.26.1", + "semver": "^6.1.1" + }, + "dependencies": { + "fs-extra": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-7.0.1.tgz", + "integrity": "sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw==", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + } + }, + "js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + } + } + }, + "@serverless/dashboard-plugin": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@serverless/dashboard-plugin/-/dashboard-plugin-5.5.0.tgz", + "integrity": "sha512-a3vWcAacJrUeUFcGXhm/tDxvZjqQI1KjRjFGrqbxoN0N5fCdkLtOn6578Iq4hdP8BF2XanS1xGGdhPjcYBdsUA==", + "dev": true, + "requires": { + "@serverless/event-mocks": "^1.1.1", + "@serverless/platform-client": "^4.3.0", + "@serverless/utils": "^5.18.0", + "chalk": "^4.1.2", + "child-process-ext": "^2.1.1", + "chokidar": "^3.5.2", + "cli-color": "^2.0.1", + "flat": "^5.0.2", + "fs-extra": "^9.1.0", + "js-yaml": "^4.1.0", + "jszip": "^3.7.1", + "lodash": "^4.17.21", + "memoizee": "^0.4.15", + "ncjsm": "^4.2.0", + "node-dir": "^0.1.17", + "node-fetch": "^2.6.5", + "open": "^7.4.2", + "semver": "^7.3.5", + "simple-git": "^2.46.0", + "uuid": "^8.3.2", + "yamljs": "^0.3.0" + } + }, + "@serverless/event-mocks": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@serverless/event-mocks/-/event-mocks-1.1.1.tgz", + "integrity": "sha512-YAV5V/y+XIOfd+HEVeXfPWZb8C6QLruFk9tBivoX2roQLWVq145s4uxf8D0QioCueuRzkukHUS4JIj+KVoS34A==", + "dev": true, + "requires": { + "@types/lodash": "^4.14.123", + "lodash": "^4.17.11" + } + }, + "@serverless/platform-client": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/@serverless/platform-client/-/platform-client-4.3.0.tgz", + "integrity": "sha512-q2CMqCkKeBaKA/UwfJAZLkdUsbghSbiYPvAX4rl9rsR5APm4KWtjKQP9CTOtVO5JRMWYoysK6jF0d5VJOABRzQ==", + "dev": true, + "requires": { + "adm-zip": "^0.5.5", + "archiver": "^5.3.0", + "axios": "^0.21.1", + "fast-glob": "^3.2.7", + "https-proxy-agent": "^5.0.0", + "ignore": "^5.1.8", + "isomorphic-ws": "^4.0.1", + "js-yaml": "^3.14.1", + "jwt-decode": "^2.2.0", + "minimatch": "^3.0.4", + "querystring": "^0.2.1", + "run-parallel-limit": "^1.1.0", + "throat": "^5.0.0", + "traverse": "^0.6.6", + "ws": "^7.5.3" + }, + "dependencies": { + "js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + } + } + }, + "@serverless/platform-client-china": { + "version": "2.2.7", + "resolved": "https://registry.npmjs.org/@serverless/platform-client-china/-/platform-client-china-2.2.7.tgz", + "integrity": "sha512-6wkOQamu7jPNq4bI/TbMrk69BrIMm/nzRwKQbq84fNmdxC+u1sxLkWuAafGYOQePbA7lq/oLEsVO3fWAA83jCA==", + "dev": true, + "requires": { + "@serverless/utils-china": "^1.1.4", + "adm-zip": "^0.5.1", + "archiver": "^5.0.2", + "axios": "^0.21.1", + "dotenv": "^8.2.0", + "fast-glob": "^3.2.4", + "fs-extra": "^9.0.1", + "https-proxy-agent": "^5.0.0", + "js-yaml": "^3.14.0", + "minimatch": "^3.0.4", + "querystring": "^0.2.0", + "run-parallel-limit": "^1.0.6", + "traverse": "^0.6.6", + "urlencode": "^1.1.0", + "ws": "^7.3.1" + }, + "dependencies": { + "dotenv": { + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-8.6.0.tgz", + "integrity": "sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g==", + "dev": true + }, + "js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + } + } + }, + "@serverless/template": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/@serverless/template/-/template-1.1.4.tgz", + "integrity": "sha512-LYC+RmSD4ozStdCxSHInpVWP8h+0sSa0lmPGjAb1Fw4Ppk+LCJqJTrohbhHmF2ixgaIBu6ceNtVTB4qM+2NvIA==", + "dev": true, + "requires": { + "@serverless/component-metrics": "^1.0.8", + "@serverless/core": "^1.1.2", + "graphlib": "^2.1.8", + "ramda": "^0.26.1", + "traverse": "^0.6.6" + } + }, + "@serverless/utils": { + "version": "5.19.0", + "resolved": "https://registry.npmjs.org/@serverless/utils/-/utils-5.19.0.tgz", + "integrity": "sha512-bgQawVfBgxcZoS1wxukJfRYKkMOZncZfOSTCRUnYzwH78fAAE79vfu49LGx2EGEJa8BThmtzjinZ9SK9yS0kIw==", + "dev": true, + "requires": { + "archive-type": "^4.0.0", + "chalk": "^4.1.2", + "ci-info": "^3.2.0", + "cli-progress-footer": "^2.1.1", + "content-disposition": "^0.5.3", + "d": "^1.0.1", + "decompress": "^4.2.1", + "event-emitter": "^0.3.5", + "ext": "^1.6.0", + "ext-name": "^5.0.0", + "file-type": "^16.5.3", + "filenamify": "^4.3.0", + "get-stream": "^6.0.1", + "got": "^11.8.2", + "inquirer": "^7.3.3", + "js-yaml": "^4.1.0", + "jwt-decode": "^3.1.2", + "lodash": "^4.17.21", + "log": "^6.3.1", + "log-node": "^8.0.1", + "make-dir": "^3.1.0", + "memoizee": "^0.4.15", + "ncjsm": "^4.2.0", + "p-event": "^4.2.0", + "supports-color": "^8.1.1", + "type": "^2.5.0", + "uni-global": "^1.0.0", + "uuid": "^8.3.2", + "write-file-atomic": "^3.0.3" + }, + "dependencies": { + "get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "jwt-decode": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-3.1.2.tgz", + "integrity": "sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A==", + "dev": true + }, + "supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + }, + "write-file-atomic": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz", + "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==", + "dev": true, + "requires": { + "imurmurhash": "^0.1.4", + "is-typedarray": "^1.0.0", + "signal-exit": "^3.0.2", + "typedarray-to-buffer": "^3.1.5" + } + } + } + }, + "@serverless/utils-china": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/@serverless/utils-china/-/utils-china-1.1.4.tgz", + "integrity": "sha512-8s73M1k+mST7Z/Rp8wgmZh50tjpwX+fqsbYYRuFGgyuWTvgqAlUflDOWAeQuDx4pEndWEqjbG09ZrZNqlHuZqQ==", + "dev": true, + "requires": { + "@tencent-sdk/capi": "^1.1.8", + "dijkstrajs": "^1.0.1", + "dot-qs": "0.2.0", + "duplexify": "^4.1.1", + "end-of-stream": "^1.4.4", + "got": "^11.8.2", + "https-proxy-agent": "^5.0.0", + "kafka-node": "^5.0.0", + "protobufjs": "^6.9.0", + "qrcode-terminal": "^0.12.0", + "socket.io-client": "^2.3.0", + "winston": "3.2.1" + } + }, + "@sindresorhus/is": { + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.14.0.tgz", + "integrity": "sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ==", + "dev": true + }, + "@szmarczak/http-timer": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-1.1.2.tgz", + "integrity": "sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA==", + "dev": true, + "requires": { + "defer-to-connect": "^1.0.1" + } + }, + "@tencent-sdk/capi": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/@tencent-sdk/capi/-/capi-1.1.8.tgz", + "integrity": "sha512-AmyMQndtxMsM59eDeA0gGiw8T2LzNvDhx/xl+ygFXXrsw+yb/mit73ndHkiHKcRA1EpNHTyD1PN9ATxghzplfg==", + "dev": true, + "requires": { + "@types/request": "^2.48.3", + "@types/request-promise-native": "^1.0.17", + "request": "^2.88.0", + "request-promise-native": "^1.0.8" + } + }, + "@tokenizer/token": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@tokenizer/token/-/token-0.3.0.tgz", + "integrity": "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==", + "dev": true + }, + "@types/cacheable-request": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.2.tgz", + "integrity": "sha512-B3xVo+dlKM6nnKTcmm5ZtY/OL8bOAOd2Olee9M1zft65ox50OzjEHW91sDiU9j6cvW8Ejg1/Qkf4xd2kugApUA==", + "dev": true, + "requires": { + "@types/http-cache-semantics": "*", + "@types/keyv": "*", + "@types/node": "*", + "@types/responselike": "*" + } + }, + "@types/caseless": { + "version": "0.12.2", + "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.2.tgz", + "integrity": "sha512-6ckxMjBBD8URvjB6J3NcnuAn5Pkl7t3TizAg+xdlzzQGSPSmBcXf8KoIH0ua/i+tio+ZRUHEXp0HEmvaR4kt0w==", + "dev": true + }, + "@types/http-cache-semantics": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz", + "integrity": "sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ==", + "dev": true + }, + "@types/keyv": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/@types/keyv/-/keyv-3.1.3.tgz", + "integrity": "sha512-FXCJgyyN3ivVgRoml4h94G/p3kY+u/B86La+QptcqJaWtBWtmc6TtkNfS40n9bIvyLteHh7zXOtgbobORKPbDg==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, + "@types/lodash": { + "version": "4.14.176", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.176.tgz", + "integrity": "sha512-xZmuPTa3rlZoIbtDUyJKZQimJV3bxCmzMIO2c9Pz9afyDro6kr7R79GwcB6mRhuoPmV2p1Vb66WOJH7F886WKQ==", + "dev": true + }, + "@types/long": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.1.tgz", + "integrity": "sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w==", + "dev": true + }, + "@types/node": { + "version": "16.11.2", + "resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.2.tgz", + "integrity": "sha512-w34LtBB0OkDTs19FQHXy4Ig/TOXI4zqvXS2Kk1PAsRKZ0I+nik7LlMYxckW0tSNGtvWmzB+mrCTbuEjuB9DVsw==", + "dev": true + }, + "@types/request": { + "version": "2.48.7", + "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.7.tgz", + "integrity": "sha512-GWP9AZW7foLd4YQxyFZDBepl0lPsWLMEXDZUjQ/c1gqVPDPECrRZyEzuhJdnPWioFCq3Tv0qoGpMD6U+ygd4ZA==", + "dev": true, + "requires": { + "@types/caseless": "*", + "@types/node": "*", + "@types/tough-cookie": "*", + "form-data": "^2.5.0" + } + }, + "@types/request-promise-native": { + "version": "1.0.18", + "resolved": "https://registry.npmjs.org/@types/request-promise-native/-/request-promise-native-1.0.18.tgz", + "integrity": "sha512-tPnODeISFc/c1LjWyLuZUY+Z0uLB3+IMfNoQyDEi395+j6kTFTTRAqjENjoPJUid4vHRGEozoTrcTrfZM+AcbA==", + "dev": true, + "requires": { + "@types/request": "*" + } + }, + "@types/responselike": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@types/responselike/-/responselike-1.0.0.tgz", + "integrity": "sha512-85Y2BjiufFzaMIlvJDvTTB8Fxl2xfLo4HgmHzVBz08w4wDePCTjYw66PdrolO0kzli3yam/YCgRufyo1DdQVTA==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, + "@types/tough-cookie": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.1.tgz", + "integrity": "sha512-Y0K95ThC3esLEYD6ZuqNek29lNX2EM1qxV8y2FTLUB0ff5wWrk7az+mLrnNFUnaXcgKye22+sFBRXOgpPILZNg==", + "dev": true + }, + "abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "dev": true, + "requires": { + "event-target-shim": "^5.0.0" + } + }, + "acorn": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "dev": true + }, + "acorn-dynamic-import": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/acorn-dynamic-import/-/acorn-dynamic-import-2.0.2.tgz", + "integrity": "sha1-x1K9IQvvZ5UBtsbLf8hPj0cVjMQ=", + "dev": true, + "optional": true, + "requires": { + "acorn": "^4.0.3" + }, + "dependencies": { + "acorn": { + "version": "4.0.13", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-4.0.13.tgz", + "integrity": "sha1-EFSVrlNh1pe9GVyCUZLhrX8lN4c=", + "dev": true, + "optional": true + } + } + }, + "adal-node": { + "version": "0.1.28", + "resolved": "https://registry.npmjs.org/adal-node/-/adal-node-0.1.28.tgz", + "integrity": "sha1-RoxLs+u9lrEnBmn0ucuk4AZepIU=", + "dev": true, + "requires": { + "@types/node": "^8.0.47", + "async": ">=0.6.0", + "date-utils": "*", + "jws": "3.x.x", + "request": ">= 2.52.0", + "underscore": ">= 1.3.1", + "uuid": "^3.1.0", + "xmldom": ">= 0.1.x", + "xpath.js": "~1.1.0" + }, + "dependencies": { + "@types/node": { + "version": "8.10.66", + "resolved": "https://registry.npmjs.org/@types/node/-/node-8.10.66.tgz", + "integrity": "sha512-tktOkFUA4kXx2hhhrB8bIFb5TbwzS4uOhKEmwiD+NoiL0qtP2OQ9mFldbgD4dV1djrlBYP6eBuQZiWjuHUpqFw==", + "dev": true + }, + "uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "dev": true + } + } + }, + "adm-zip": { + "version": "0.5.9", + "resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.9.tgz", + "integrity": "sha512-s+3fXLkeeLjZ2kLjCBwQufpI5fuN+kIGBxu6530nVQZGVol0d7Y/M88/xw9HGGUcJjKf8LutN3VPRUBq6N7Ajg==", + "dev": true + }, + "after": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/after/-/after-0.8.2.tgz", + "integrity": "sha1-/ts5T58OAqqXaOcCvaI7UF+ufh8=", + "dev": true + }, + "agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "requires": { + "debug": "4" + } + }, + "ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "ajv-keywords": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", + "dev": true + }, + "align-text": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/align-text/-/align-text-0.1.4.tgz", + "integrity": "sha1-DNkKVhCT810KmSVsIrcGlDP60Rc=", + "dev": true, + "optional": true, + "requires": { + "kind-of": "^3.0.2", + "longest": "^1.0.1", + "repeat-string": "^1.5.2" + } + }, + "ansi-align": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.1.tgz", + "integrity": "sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==", + "dev": true, + "requires": { + "string-width": "^4.1.0" + }, + "dependencies": { + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + } + } + }, + "ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, + "requires": { + "type-fest": "^0.21.3" + } + }, + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "requires": { + "color-convert": "^1.9.0" + } + }, + "anymatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz", + "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==", + "dev": true, + "requires": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + } + }, + "aproba": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==", + "dev": true, + "optional": true + }, + "archive-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/archive-type/-/archive-type-4.0.0.tgz", + "integrity": "sha1-+S5yIzBW38aWlHJ0nCZ72wRrHXA=", + "dev": true, + "requires": { + "file-type": "^4.2.0" + }, + "dependencies": { + "file-type": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-4.4.0.tgz", + "integrity": "sha1-G2AOX8ofvcboDApwxxyNul95BsU=", + "dev": true + } + } + }, + "archiver": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/archiver/-/archiver-5.3.0.tgz", + "integrity": "sha512-iUw+oDwK0fgNpvveEsdQ0Ase6IIKztBJU2U0E9MzszMfmVVUyv1QJhS2ITW9ZCqx8dktAxVAjWWkKehuZE8OPg==", + "dev": true, + "requires": { + "archiver-utils": "^2.1.0", + "async": "^3.2.0", + "buffer-crc32": "^0.2.1", + "readable-stream": "^3.6.0", + "readdir-glob": "^1.0.0", + "tar-stream": "^2.2.0", + "zip-stream": "^4.1.0" + } + }, + "archiver-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-2.1.0.tgz", + "integrity": "sha512-bEL/yUb/fNNiNTuUz979Z0Yg5L+LzLxGJz8x79lYmR54fmTIb6ob/hNQgkQnIUDWIFjZVQwl9Xs356I6BAMHfw==", + "dev": true, + "requires": { + "glob": "^7.1.4", + "graceful-fs": "^4.2.0", + "lazystream": "^1.0.0", + "lodash.defaults": "^4.2.0", + "lodash.difference": "^4.5.0", + "lodash.flatten": "^4.4.0", + "lodash.isplainobject": "^4.0.6", + "lodash.union": "^4.6.0", + "normalize-path": "^3.0.0", + "readable-stream": "^2.0.0" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + } + } + }, + "are-we-there-yet": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.7.tgz", + "integrity": "sha512-nxwy40TuMiUGqMyRHgCSWZ9FM4VAoRP4xUYSTv5ImRog+h9yISPbVH7H8fASCIzYn9wlEv4zvFL7uKDMCFQm3g==", + "dev": true, + "optional": true, + "requires": { + "delegates": "^1.0.0", + "readable-stream": "^2.0.6" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "optional": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + } + } + }, + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "requires": { + "sprintf-js": "~1.0.2" + } + }, + "arr-diff": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-4.0.0.tgz", + "integrity": "sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA=", + "dev": true, + "optional": true + }, + "arr-flatten": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/arr-flatten/-/arr-flatten-1.1.0.tgz", + "integrity": "sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==", + "dev": true, + "optional": true + }, + "arr-union": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/arr-union/-/arr-union-3.1.0.tgz", + "integrity": "sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ=", + "dev": true, + "optional": true + }, + "array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true + }, + "array-unique": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.3.2.tgz", + "integrity": "sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg=", + "dev": true, + "optional": true + }, + "arraybuffer.slice": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz", + "integrity": "sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog==", + "dev": true + }, + "arrify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=", + "dev": true, + "optional": true + }, + "asn1": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", + "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", + "dev": true, + "requires": { + "safer-buffer": "~2.1.0" + } + }, + "asn1.js": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-5.4.1.tgz", + "integrity": "sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA==", + "dev": true, + "optional": true, + "requires": { + "bn.js": "^4.0.0", + "inherits": "^2.0.1", + "minimalistic-assert": "^1.0.0", + "safer-buffer": "^2.1.0" + }, + "dependencies": { + "bn.js": { + "version": "4.12.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==", + "dev": true, + "optional": true + } + } + }, + "assert": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/assert/-/assert-1.5.0.tgz", + "integrity": "sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA==", + "dev": true, + "optional": true, + "requires": { + "object-assign": "^4.1.1", + "util": "0.10.3" + }, + "dependencies": { + "inherits": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", + "integrity": "sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE=", + "dev": true, + "optional": true + }, + "util": { + "version": "0.10.3", + "resolved": "https://registry.npmjs.org/util/-/util-0.10.3.tgz", + "integrity": "sha1-evsa/lCAUkZInj23/g7TeTNqwPk=", + "dev": true, + "optional": true, + "requires": { + "inherits": "2.0.1" + } + } + } + }, + "assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", + "dev": true + }, + "assign-symbols": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assign-symbols/-/assign-symbols-1.0.0.tgz", + "integrity": "sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c=", + "dev": true, + "optional": true + }, + "async": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.1.tgz", + "integrity": "sha512-XdD5lRO/87udXCMC9meWdYiR+Nq6ZjUfXidViUZGu2F1MO4T3XwZ1et0hb2++BgLfhyJwy44BGB/yx80ABx8hg==", + "dev": true + }, + "async-each": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/async-each/-/async-each-1.0.3.tgz", + "integrity": "sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ==", + "dev": true, + "optional": true + }, + "asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", + "dev": true + }, + "at-least-node": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz", + "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==", + "dev": true + }, + "atob": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz", + "integrity": "sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==", + "dev": true, + "optional": true + }, + "aws-sdk": { + "version": "2.1012.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1012.0.tgz", + "integrity": "sha512-5F/tC+mOJSTq4BTWqg6DepDIC7h+OeUycCYsFU6fMblQCUEBuI11o8z/+2DxGt4c40f52OstalYNiSlP2RuZvw==", + "dev": true, + "requires": { + "buffer": "4.9.2", + "events": "1.1.1", + "ieee754": "1.1.13", + "jmespath": "0.15.0", + "querystring": "0.2.0", + "sax": "1.2.1", + "url": "0.10.3", + "uuid": "3.3.2", + "xml2js": "0.4.19" + }, + "dependencies": { + "buffer": { + "version": "4.9.2", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz", + "integrity": "sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==", + "dev": true, + "requires": { + "base64-js": "^1.0.2", + "ieee754": "^1.1.4", + "isarray": "^1.0.0" + } + }, + "ieee754": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz", + "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==", + "dev": true + }, + "querystring": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", + "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=", + "dev": true + }, + "uuid": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", + "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==", + "dev": true + } + } + }, + "aws-sign2": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=", + "dev": true + }, + "aws4": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz", + "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==", + "dev": true + }, + "axios": { + "version": "0.21.4", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.21.4.tgz", + "integrity": "sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==", + "dev": true, + "requires": { + "follow-redirects": "^1.14.0" + } + }, + "azure-functions-core-tools": { + "version": "2.7.3188", + "resolved": "https://registry.npmjs.org/azure-functions-core-tools/-/azure-functions-core-tools-2.7.3188.tgz", + "integrity": "sha512-+Q9DhsI6xTuYWryxvI2PzgzbVPx5MYaR0jWhkdAGvONm1gYgtcy6b3O7R3fe/3dP+wIyvCZ0nMOnxu+Nbcrkow==", + "dev": true, + "requires": { + "chalk": "3.0.0", + "command-exists": "1.2.8", + "glob": "7.1.6", + "https-proxy-agent": "5.0.0", + "progress": "2.0.3", + "rimraf": "3.0.2", + "tmp": "0.1.0", + "unzipper": "0.10.10" + }, + "dependencies": { + "@types/color-name": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@types/color-name/-/color-name-1.1.1.tgz", + "integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==", + "dev": true + }, + "agent-base": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.0.tgz", + "integrity": "sha512-j1Q7cSCqN+AwrmDd+pzgqc0/NpC655x2bUf5ZjRIO77DcNBFmh+OgRNzF6OKdCC9RSCb19fGd99+bhXFdkRNqw==", + "dev": true, + "requires": { + "debug": "4" + } + }, + "ansi-styles": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.0.tgz", + "integrity": "sha512-7kFQgnEaMdRtwf6uSfUnVr9gSGC7faurn+J/Mv90/W+iTtN0405/nLdopfMWwchyxhbGYl6TC4Sccn9TUkGAgg==", + "dev": true, + "requires": { + "@types/color-name": "^1.1.1", + "color-convert": "^2.0.1" + } + }, + "balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "dev": true + }, + "big-integer": { + "version": "1.6.48", + "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.48.tgz", + "integrity": "sha512-j51egjPa7/i+RdiRuJbPdJ2FIUYYPhvYLjzoYbcMMm62ooO6F94fETG4MTs46zPAF9Brs04OajboA/qTGuz78w==", + "dev": true + }, + "binary": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz", + "integrity": "sha1-n2BVO8XOjDOG87VTz/R0Yq3sqnk=", + "dev": true, + "requires": { + "buffers": "~0.1.1", + "chainsaw": "~0.1.0" + } + }, + "bluebird": { + "version": "3.4.7", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.4.7.tgz", + "integrity": "sha1-9y12C+Cbf3bQjtj66Ysomo0F+rM=", + "dev": true + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "buffer-indexof-polyfill": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/buffer-indexof-polyfill/-/buffer-indexof-polyfill-1.0.1.tgz", + "integrity": "sha1-qfuAbOgUXVQoUQznLyeLs2OmOL8=", + "dev": true + }, + "buffers": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/buffers/-/buffers-0.1.1.tgz", + "integrity": "sha1-skV5w77U1tOWru5tmorn9Ugqt7s=", + "dev": true + }, + "chainsaw": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/chainsaw/-/chainsaw-0.1.0.tgz", + "integrity": "sha1-XqtQsor+WAdNDVgpE4iCi15fvJg=", + "dev": true, + "requires": { + "traverse": ">=0.3.0 <0.4" + } + }, + "chalk": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", + "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "command-exists": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/command-exists/-/command-exists-1.2.8.tgz", + "integrity": "sha512-PM54PkseWbiiD/mMsbvW351/u+dafwTJ0ye2qB60G1aGQP9j3xK2gmMDc+R34L3nDtx4qMCitXT75mkbkGJDLw==", + "dev": true + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=", + "dev": true + }, + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "duplexer2": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/duplexer2/-/duplexer2-0.1.4.tgz", + "integrity": "sha1-ixLauHjA1p4+eJEFFmKjL8a93ME=", + "dev": true, + "requires": { + "readable-stream": "^2.0.2" + } + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "fstream": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.12.tgz", + "integrity": "sha512-WvJ193OHa0GHPEL+AycEJgxvBEwyfRkN1vhjca23OaPVMCaLCXTd5qAu82AjTcgP1UJmytkOKb63Ypde7raDIg==", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "inherits": "~2.0.0", + "mkdirp": ">=0.5 0", + "rimraf": "2" + }, + "dependencies": { + "rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "dev": true, + "requires": { + "glob": "^7.1.3" + } + } + } + }, + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "graceful-fs": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", + "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "https-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz", + "integrity": "sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==", + "dev": true, + "requires": { + "agent-base": "6", + "debug": "4" + } + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", + "dev": true + }, + "listenercount": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/listenercount/-/listenercount-1.0.1.tgz", + "integrity": "sha1-hMinKrWcRyUyFIDJdeZQg0LnCTc=", + "dev": true + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", + "dev": true + }, + "mkdirp": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.4.tgz", + "integrity": "sha512-iG9AK/dJLtJ0XNgTuDbSyNS3zECqDlAhnQW4CsNxBG3LQJBbHmRX1egw39DmtOdCAqY+dKXV+sgPgilNWUKMVw==", + "dev": true, + "requires": { + "minimist": "^1.2.5" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true + }, + "process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true + }, + "progress": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", + "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", + "dev": true + }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "requires": { + "glob": "^7.1.3" + } + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "setimmediate": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", + "integrity": "sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU=", + "dev": true + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "requires": { + "safe-buffer": "~5.1.0" + } + }, + "supports-color": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", + "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + }, + "tmp": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.1.0.tgz", + "integrity": "sha512-J7Z2K08jbGcdA1kkQpJSqLF6T0tdQqpR2pnSUXsIchbPdTI9v3e85cLW0d6WDhwuAleOV71j2xWs8qMPfK7nKw==", + "dev": true, + "requires": { + "rimraf": "^2.6.3" + }, + "dependencies": { + "rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "dev": true, + "requires": { + "glob": "^7.1.3" + } + } + } + }, + "traverse": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.3.9.tgz", + "integrity": "sha1-cXuPIgzAu3tE5AUUwisui7xw2Lk=", + "dev": true + }, + "unzipper": { + "version": "0.10.10", + "resolved": "https://registry.npmjs.org/unzipper/-/unzipper-0.10.10.tgz", + "integrity": "sha512-wEgtqtrnJ/9zIBsQb8UIxOhAH1eTHfi7D/xvmrUoMEePeI6u24nq1wigazbIFtHt6ANYXdEVTvc8XYNlTurs7A==", + "dev": true, + "requires": { + "big-integer": "^1.6.17", + "binary": "~0.3.0", + "bluebird": "~3.4.1", + "buffer-indexof-polyfill": "~1.0.0", + "duplexer2": "~0.1.4", + "fstream": "^1.0.12", + "graceful-fs": "^4.2.2", + "listenercount": "~1.0.1", + "readable-stream": "~2.3.6", + "setimmediate": "~1.0.4" + } + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", + "dev": true + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + } + } + }, + "backo2": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/backo2/-/backo2-1.0.2.tgz", + "integrity": "sha1-MasayLEpNjRj41s+u2n038+6eUc=", + "dev": true + }, + "balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "base": { + "version": "0.11.2", + "resolved": "https://registry.npmjs.org/base/-/base-0.11.2.tgz", + "integrity": "sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==", + "dev": true, + "optional": true, + "requires": { + "cache-base": "^1.0.1", + "class-utils": "^0.3.5", + "component-emitter": "^1.2.1", + "define-property": "^1.0.0", + "isobject": "^3.0.1", + "mixin-deep": "^1.2.0", + "pascalcase": "^0.1.1" + }, + "dependencies": { + "define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", + "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", + "dev": true, + "optional": true, + "requires": { + "is-descriptor": "^1.0.0" + } + }, + "is-accessor-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", + "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", + "dev": true, + "optional": true, + "requires": { + "kind-of": "^6.0.0" + } + }, + "is-data-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", + "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", + "dev": true, + "optional": true, + "requires": { + "kind-of": "^6.0.0" + } + }, + "is-descriptor": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", + "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", + "dev": true, + "optional": true, + "requires": { + "is-accessor-descriptor": "^1.0.0", + "is-data-descriptor": "^1.0.0", + "kind-of": "^6.0.2" + } + }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true, + "optional": true + } + } + }, + "base64-arraybuffer": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-0.1.4.tgz", + "integrity": "sha1-mBjHngWbE1X5fgQooBfIOOkLqBI=", + "dev": true + }, + "base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "dev": true + }, + "bcrypt-pbkdf": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", + "dev": true, + "requires": { + "tweetnacl": "^0.14.3" + } + }, + "big.js": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz", + "integrity": "sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==", + "dev": true, + "optional": true + }, + "binary": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz", + "integrity": "sha1-n2BVO8XOjDOG87VTz/R0Yq3sqnk=", + "dev": true, + "requires": { + "buffers": "~0.1.1", + "chainsaw": "~0.1.0" + } + }, + "binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true + }, + "bindings": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", + "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", + "dev": true, + "optional": true, + "requires": { + "file-uri-to-path": "1.0.0" + } + }, + "bl": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "dev": true, + "requires": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "blob": { + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/blob/-/blob-0.0.5.tgz", + "integrity": "sha512-gaqbzQPqOoamawKg0LGVd7SzLgXS+JH61oWprSLH+P+abTczqJbhTR8CmJ2u9/bUYNmHTGJx/UEmn6doAvvuig==", + "dev": true + }, + "bluebird": { + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", + "dev": true + }, + "bn.js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz", + "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw==", + "dev": true, + "optional": true + }, + "boxen": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/boxen/-/boxen-5.1.2.tgz", + "integrity": "sha512-9gYgQKXx+1nP8mP7CzFyaUARhg7D3n1dF/FnErWmu9l6JvGpNUN278h0aSb+QjoiKSWG+iZ3uHrcqk0qrY9RQQ==", + "dev": true, + "requires": { + "ansi-align": "^3.0.0", + "camelcase": "^6.2.0", + "chalk": "^4.1.0", + "cli-boxes": "^2.2.1", + "string-width": "^4.2.2", + "type-fest": "^0.20.2", + "widest-line": "^3.1.0", + "wrap-ansi": "^7.0.0" + }, + "dependencies": { + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + }, + "type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true + } + } + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "requires": { + "fill-range": "^7.0.1" + } + }, + "brorand": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/brorand/-/brorand-1.1.0.tgz", + "integrity": "sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8=", + "dev": true, + "optional": true + }, + "browserify-aes": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz", + "integrity": "sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA==", + "dev": true, + "optional": true, + "requires": { + "buffer-xor": "^1.0.3", + "cipher-base": "^1.0.0", + "create-hash": "^1.1.0", + "evp_bytestokey": "^1.0.3", + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "browserify-cipher": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/browserify-cipher/-/browserify-cipher-1.0.1.tgz", + "integrity": "sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w==", + "dev": true, + "optional": true, + "requires": { + "browserify-aes": "^1.0.4", + "browserify-des": "^1.0.0", + "evp_bytestokey": "^1.0.0" + } + }, + "browserify-des": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/browserify-des/-/browserify-des-1.0.2.tgz", + "integrity": "sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A==", + "dev": true, + "optional": true, + "requires": { + "cipher-base": "^1.0.1", + "des.js": "^1.0.0", + "inherits": "^2.0.1", + "safe-buffer": "^5.1.2" + } + }, + "browserify-rsa": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.1.0.tgz", + "integrity": "sha512-AdEER0Hkspgno2aR97SAf6vi0y0k8NuOpGnVH3O99rcA5Q6sh8QxcngtHuJ6uXwnfAXNM4Gn1Gb7/MV1+Ymbog==", + "dev": true, + "optional": true, + "requires": { + "bn.js": "^5.0.0", + "randombytes": "^2.0.1" + } + }, + "browserify-sign": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.2.1.tgz", + "integrity": "sha512-/vrA5fguVAKKAVTNJjgSm1tRQDHUU6DbwO9IROu/0WAzC8PKhucDSh18J0RMvVeHAn5puMd+QHC2erPRNf8lmg==", + "dev": true, + "optional": true, + "requires": { + "bn.js": "^5.1.1", + "browserify-rsa": "^4.0.1", + "create-hash": "^1.2.0", + "create-hmac": "^1.1.7", + "elliptic": "^6.5.3", + "inherits": "^2.0.4", + "parse-asn1": "^5.1.5", + "readable-stream": "^3.6.0", + "safe-buffer": "^5.2.0" + }, + "dependencies": { + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "optional": true + } + } + }, + "browserify-zlib": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/browserify-zlib/-/browserify-zlib-0.2.0.tgz", + "integrity": "sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA==", + "dev": true, + "optional": true, + "requires": { + "pako": "~1.0.5" + } + }, + "buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, + "requires": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "buffer-alloc": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/buffer-alloc/-/buffer-alloc-1.2.0.tgz", + "integrity": "sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow==", + "dev": true, + "requires": { + "buffer-alloc-unsafe": "^1.1.0", + "buffer-fill": "^1.0.0" + } + }, + "buffer-alloc-unsafe": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz", + "integrity": "sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg==", + "dev": true + }, + "buffer-crc32": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", + "integrity": "sha1-DTM+PwDqxQqhRUq9MO+MKl2ackI=", + "dev": true + }, + "buffer-equal-constant-time": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk=", + "dev": true + }, + "buffer-fill": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/buffer-fill/-/buffer-fill-1.0.0.tgz", + "integrity": "sha1-+PeLdniYiO858gXNY39o5wISKyw=", + "dev": true + }, + "buffer-xor": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz", + "integrity": "sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk=", + "dev": true, + "optional": true + }, + "buffermaker": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/buffermaker/-/buffermaker-1.2.1.tgz", + "integrity": "sha512-IdnyU2jDHU65U63JuVQNTHiWjPRH0CS3aYd/WPaEwyX84rFdukhOduAVb1jwUScmb5X0JWPw8NZOrhoLMiyAHQ==", + "dev": true, + "requires": { + "long": "1.1.2" + } + }, + "buffers": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/buffers/-/buffers-0.1.1.tgz", + "integrity": "sha1-skV5w77U1tOWru5tmorn9Ugqt7s=", + "dev": true + }, + "builtin-modules": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.2.0.tgz", + "integrity": "sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA==", + "dev": true + }, + "builtin-status-codes": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz", + "integrity": "sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug=", + "dev": true, + "optional": true + }, + "cache-base": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/cache-base/-/cache-base-1.0.1.tgz", + "integrity": "sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ==", + "dev": true, + "optional": true, + "requires": { + "collection-visit": "^1.0.0", + "component-emitter": "^1.2.1", + "get-value": "^2.0.6", + "has-value": "^1.0.0", + "isobject": "^3.0.1", + "set-value": "^2.0.0", + "to-object-path": "^0.3.0", + "union-value": "^1.0.0", + "unset-value": "^1.0.0" + } + }, + "cacheable-lookup": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz", + "integrity": "sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA==", + "dev": true + }, + "cacheable-request": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-6.1.0.tgz", + "integrity": "sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg==", + "dev": true, + "requires": { + "clone-response": "^1.0.2", + "get-stream": "^5.1.0", + "http-cache-semantics": "^4.0.0", + "keyv": "^3.0.0", + "lowercase-keys": "^2.0.0", + "normalize-url": "^4.1.0", + "responselike": "^1.0.2" + }, + "dependencies": { + "get-stream": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", + "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", + "dev": true, + "requires": { + "pump": "^3.0.0" + } + }, + "lowercase-keys": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", + "integrity": "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==", + "dev": true + } + } + }, + "cachedir": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/cachedir/-/cachedir-2.3.0.tgz", + "integrity": "sha512-A+Fezp4zxnit6FanDmv9EqXNAi3vt9DWp51/71UEhXukb7QUuvtv9344h91dyAxuTLoSYJFU299qzR3tzwPAhw==", + "dev": true + }, + "call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dev": true, + "requires": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + } + }, + "camelcase": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.2.0.tgz", + "integrity": "sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg==", + "dev": true + }, + "caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", + "dev": true + }, + "center-align": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/center-align/-/center-align-0.1.3.tgz", + "integrity": "sha1-qg0yYptu6XIgBBHL1EYckHvCt60=", + "dev": true, + "optional": true, + "requires": { + "align-text": "^0.1.3", + "lazy-cache": "^1.0.3" + } + }, + "chainsaw": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/chainsaw/-/chainsaw-0.1.0.tgz", + "integrity": "sha1-XqtQsor+WAdNDVgpE4iCi15fvJg=", + "dev": true, + "requires": { + "traverse": ">=0.3.0 <0.4" + }, + "dependencies": { + "traverse": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.3.9.tgz", + "integrity": "sha1-cXuPIgzAu3tE5AUUwisui7xw2Lk=", + "dev": true + } + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "chardet": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", + "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", + "dev": true + }, + "charenc": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/charenc/-/charenc-0.0.2.tgz", + "integrity": "sha1-wKHS86cJLgN3S/qD8UwPxXkKhmc=", + "dev": true + }, + "child-process-ext": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/child-process-ext/-/child-process-ext-2.1.1.tgz", + "integrity": "sha512-0UQ55f51JBkOFa+fvR76ywRzxiPwQS3Xe8oe5bZRphpv+dIMeerW5Zn5e4cUy4COJwVtJyU0R79RMnw+aCqmGA==", + "dev": true, + "requires": { + "cross-spawn": "^6.0.5", + "es5-ext": "^0.10.53", + "log": "^6.0.0", + "split2": "^3.1.1", + "stream-promise": "^3.2.0" + } + }, + "chokidar": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.2.tgz", + "integrity": "sha512-ekGhOnNVPgT77r4K/U3GDhu+FQ2S8TnK/s2KbIGXi0SZWuwkZ2QNyfWdZW+TVfn84DpEP7rLeCt2UI6bJ8GwbQ==", + "dev": true, + "requires": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "fsevents": "~2.3.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + } + }, + "chownr": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", + "dev": true, + "optional": true + }, + "ci-info": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.2.0.tgz", + "integrity": "sha512-dVqRX7fLUm8J6FgHJ418XuIgDLZDkYcDFTeL6TA2gt5WlIZUQrrH6EZrNClwT/H0FateUsZkGIOPRrLbP+PR9A==", + "dev": true + }, + "cipher-base": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz", + "integrity": "sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==", + "dev": true, + "optional": true, + "requires": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "class-utils": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/class-utils/-/class-utils-0.3.6.tgz", + "integrity": "sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg==", + "dev": true, + "optional": true, + "requires": { + "arr-union": "^3.1.0", + "define-property": "^0.2.5", + "isobject": "^3.0.0", + "static-extend": "^0.1.1" + }, + "dependencies": { + "define-property": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", + "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", + "dev": true, + "optional": true, + "requires": { + "is-descriptor": "^0.1.0" + } + } + } + }, + "cli-boxes": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-2.2.1.tgz", + "integrity": "sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==", + "dev": true + }, + "cli-color": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/cli-color/-/cli-color-2.0.1.tgz", + "integrity": "sha512-eBbxZF6fqPUNnf7CLAFOersUnyYzv83tHFLSlts+OAHsNendaqv2tHCq+/MO+b3Y+9JeoUlIvobyxG/Z8GNeOg==", + "dev": true, + "requires": { + "d": "^1.0.1", + "es5-ext": "^0.10.53", + "es6-iterator": "^2.0.3", + "memoizee": "^0.4.15", + "timers-ext": "^0.1.7" + } + }, + "cli-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "dev": true, + "requires": { + "restore-cursor": "^3.1.0" + } + }, + "cli-progress-footer": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/cli-progress-footer/-/cli-progress-footer-2.1.1.tgz", + "integrity": "sha512-fBEAKLDp/CCMzQSeEbvz4POvomCekmT0LodI/mchzrjIPeLXQHJ9Gb28leAqEjdc9wyV40cjsB2aWpvO5MA7Pw==", + "dev": true, + "requires": { + "cli-color": "^2.0.0", + "d": "^1.0.1", + "es5-ext": "^0.10.53", + "process-utils": "^4.0.0", + "timers-ext": "^0.1.7", + "type": "^2.5.0" + } + }, + "cli-sprintf-format": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/cli-sprintf-format/-/cli-sprintf-format-1.1.0.tgz", + "integrity": "sha512-t3LcCdPvrypZovStadWdRS4a186gsq9aoHJYTIer55VY20YdVjGVHDV4uPWcWCXTw1tPjfwlRGE7zKMWJ663Sw==", + "dev": true, + "requires": { + "cli-color": "^1.3", + "es5-ext": "^0.10.46", + "sprintf-kit": "2", + "supports-color": "^5.5" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "dev": true + }, + "cli-color": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/cli-color/-/cli-color-1.4.0.tgz", + "integrity": "sha512-xu6RvQqqrWEo6MPR1eixqGPywhYBHRs653F9jfXB2Hx4jdM/3WxiNE1vppRmxtMIfl16SFYTpYlrnqH/HsK/2w==", + "dev": true, + "requires": { + "ansi-regex": "^2.1.1", + "d": "1", + "es5-ext": "^0.10.46", + "es6-iterator": "^2.0.3", + "memoizee": "^0.4.14", + "timers-ext": "^0.1.5" + } + } + } + }, + "cli-width": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz", + "integrity": "sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==", + "dev": true + }, + "cliui": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-2.1.0.tgz", + "integrity": "sha1-S0dXYP+AJkx2LDoXGQMukcf+oNE=", + "dev": true, + "optional": true, + "requires": { + "center-align": "^0.1.1", + "right-align": "^0.1.1", + "wordwrap": "0.0.2" + } + }, + "clone-response": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.2.tgz", + "integrity": "sha1-0dyXOSAxTfZ/vrlCI7TuNQI56Ws=", + "dev": true, + "requires": { + "mimic-response": "^1.0.0" + } + }, + "code-point-at": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", + "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=", + "dev": true, + "optional": true + }, + "collection-visit": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/collection-visit/-/collection-visit-1.0.0.tgz", + "integrity": "sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA=", + "dev": true, + "optional": true, + "requires": { + "map-visit": "^1.0.0", + "object-visit": "^1.0.0" + } + }, + "color": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/color/-/color-3.2.1.tgz", + "integrity": "sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA==", + "dev": true, + "requires": { + "color-convert": "^1.9.3", + "color-string": "^1.6.0" + } + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, + "color-string": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.6.0.tgz", + "integrity": "sha512-c/hGS+kRWJutUBEngKKmk4iH3sD59MBkoxVapS/0wgpCz2u7XsNloxknyvBhzwEs1IbV36D9PwqLPJ2DTu3vMA==", + "dev": true, + "requires": { + "color-name": "^1.0.0", + "simple-swizzle": "^0.2.2" + } + }, + "colornames": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/colornames/-/colornames-1.1.1.tgz", + "integrity": "sha1-+IiQMGhcfE/54qVZ9Qd+t2qBb5Y=", + "dev": true + }, + "colors": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.3.3.tgz", + "integrity": "sha512-mmGt/1pZqYRjMxB1axhTo16/snVZ5krrKkcmMeVKxzECMMXoCgnvTPp10QgHfcbQZw8Dq2jMNG6je4JlWU0gWg==", + "dev": true + }, + "colorspace": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/colorspace/-/colorspace-1.1.4.tgz", + "integrity": "sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w==", + "dev": true, + "requires": { + "color": "^3.1.3", + "text-hex": "1.0.x" + } + }, + "combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, + "requires": { + "delayed-stream": "~1.0.0" + } + }, + "commander": { + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.19.0.tgz", + "integrity": "sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg==", + "dev": true + }, + "component-bind": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/component-bind/-/component-bind-1.0.0.tgz", + "integrity": "sha1-AMYIq33Nk4l8AAllGx06jh5zu9E=", + "dev": true + }, + "component-emitter": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", + "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==", + "dev": true + }, + "component-inherit": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/component-inherit/-/component-inherit-0.0.3.tgz", + "integrity": "sha1-ZF/ErfWLcrZJ1crmUTVhnbJv8UM=", + "dev": true + }, + "compress-commons": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-4.1.1.tgz", + "integrity": "sha512-QLdDLCKNV2dtoTorqgxngQCMA+gWXkM/Nwu7FpeBhk/RdkzimqC3jueb/FDmaZeXh+uby1jkBqE3xArsLBE5wQ==", + "dev": true, + "requires": { + "buffer-crc32": "^0.2.13", + "crc32-stream": "^4.0.2", + "normalize-path": "^3.0.0", + "readable-stream": "^3.6.0" + } + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "console-browserify": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/console-browserify/-/console-browserify-1.2.0.tgz", + "integrity": "sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA==", + "dev": true, + "optional": true + }, + "console-control-strings": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", + "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=", + "dev": true, + "optional": true + }, + "constants-browserify": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/constants-browserify/-/constants-browserify-1.0.0.tgz", + "integrity": "sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U=", + "dev": true, + "optional": true + }, + "content-disposition": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.3.tgz", + "integrity": "sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g==", + "dev": true, + "requires": { + "safe-buffer": "5.1.2" + } + }, + "cookiejar": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.3.tgz", + "integrity": "sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ==", + "dev": true + }, + "copy-descriptor": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/copy-descriptor/-/copy-descriptor-0.1.1.tgz", + "integrity": "sha1-Z29us8OZl8LuGsOpJP1hJHSPV40=", + "dev": true, + "optional": true + }, + "core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "dev": true + }, + "crc": { + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/crc/-/crc-3.8.0.tgz", + "integrity": "sha512-iX3mfgcTMIq3ZKLIsVFAbv7+Mc10kxabAGQb8HvjA1o3T1PIYprbakQ65d3I+2HGHt6nSKkM9PYjgoJO2KcFBQ==", + "dev": true, + "optional": true, + "requires": { + "buffer": "^5.1.0" + } + }, + "crc-32": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.0.tgz", + "integrity": "sha512-1uBwHxF+Y/4yF5G48fwnKq6QsIXheor3ZLPT80yGBV1oEUwpPojlEhQbWKVw1VwcTQyMGHK1/XMmTjmlsmTTGA==", + "dev": true, + "requires": { + "exit-on-epipe": "~1.0.1", + "printj": "~1.1.0" + } + }, + "crc32-stream": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-4.0.2.tgz", + "integrity": "sha512-DxFZ/Hk473b/muq1VJ///PMNLj0ZMnzye9thBpmjpJKCc5eMgB95aK8zCGrGfQ90cWo561Te6HK9D+j4KPdM6w==", + "dev": true, + "requires": { + "crc-32": "^1.2.0", + "readable-stream": "^3.4.0" + } + }, + "create-ecdh": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/create-ecdh/-/create-ecdh-4.0.4.tgz", + "integrity": "sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A==", + "dev": true, + "optional": true, + "requires": { + "bn.js": "^4.1.0", + "elliptic": "^6.5.3" + }, + "dependencies": { + "bn.js": { + "version": "4.12.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==", + "dev": true, + "optional": true + } + } + }, + "create-hash": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", + "integrity": "sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==", + "dev": true, + "optional": true, + "requires": { + "cipher-base": "^1.0.1", + "inherits": "^2.0.1", + "md5.js": "^1.3.4", + "ripemd160": "^2.0.1", + "sha.js": "^2.4.0" + } + }, + "create-hmac": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/create-hmac/-/create-hmac-1.1.7.tgz", + "integrity": "sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==", + "dev": true, + "optional": true, + "requires": { + "cipher-base": "^1.0.3", + "create-hash": "^1.1.0", + "inherits": "^2.0.1", + "ripemd160": "^2.0.0", + "safe-buffer": "^5.0.1", + "sha.js": "^2.4.8" + } + }, + "cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "dev": true, + "requires": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } + } + }, + "crypt": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz", + "integrity": "sha1-iNf/fsDfuG9xPch7u0LQRNPmxBs=", + "dev": true + }, + "crypto-browserify": { + "version": "3.12.0", + "resolved": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-3.12.0.tgz", + "integrity": "sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg==", + "dev": true, + "optional": true, + "requires": { + "browserify-cipher": "^1.0.0", + "browserify-sign": "^4.0.0", + "create-ecdh": "^4.0.0", + "create-hash": "^1.1.0", + "create-hmac": "^1.1.0", + "diffie-hellman": "^5.0.0", + "inherits": "^2.0.1", + "pbkdf2": "^3.0.3", + "public-encrypt": "^4.0.0", + "randombytes": "^2.0.0", + "randomfill": "^1.0.3" + } + }, + "d": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/d/-/d-1.0.1.tgz", + "integrity": "sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA==", + "dev": true, + "requires": { + "es5-ext": "^0.10.50", + "type": "^1.0.1" + }, + "dependencies": { + "type": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/type/-/type-1.2.0.tgz", + "integrity": "sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg==", + "dev": true + } + } + }, + "dashdash": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", + "dev": true, + "requires": { + "assert-plus": "^1.0.0" + } + }, + "date-utils": { + "version": "1.2.21", + "resolved": "https://registry.npmjs.org/date-utils/-/date-utils-1.2.21.tgz", + "integrity": "sha1-YfsWzcEnSzyayq/+n8ad+HIKK2Q=", + "dev": true + }, + "dayjs": { + "version": "1.10.7", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.10.7.tgz", + "integrity": "sha512-P6twpd70BcPK34K26uJ1KT3wlhpuOAPoMwJzpsIWUxHZ7wpmbdZL/hQqBDfz7hGurYSa5PhzdhDHtt319hL3ig==", + "dev": true + }, + "debug": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", + "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", + "dev": true, + "requires": { + "ms": "2.1.2" + } + }, + "decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "dev": true, + "optional": true + }, + "decode-uri-component": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.0.tgz", + "integrity": "sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU=", + "dev": true, + "optional": true + }, + "decompress": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/decompress/-/decompress-4.2.1.tgz", + "integrity": "sha512-e48kc2IjU+2Zw8cTb6VZcJQ3lgVbS4uuB1TfCHbiZIP/haNXm+SVyhu+87jts5/3ROpd82GSVCoNs/z8l4ZOaQ==", + "dev": true, + "requires": { + "decompress-tar": "^4.0.0", + "decompress-tarbz2": "^4.0.0", + "decompress-targz": "^4.0.0", + "decompress-unzip": "^4.0.1", + "graceful-fs": "^4.1.10", + "make-dir": "^1.0.0", + "pify": "^2.3.0", + "strip-dirs": "^2.0.0" + }, + "dependencies": { + "make-dir": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-1.3.0.tgz", + "integrity": "sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ==", + "dev": true, + "requires": { + "pify": "^3.0.0" + }, + "dependencies": { + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", + "dev": true + } + } + } + } + }, + "decompress-response": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz", + "integrity": "sha1-gKTdMjdIOEv6JICDYirt7Jgq3/M=", + "dev": true, + "requires": { + "mimic-response": "^1.0.0" + } + }, + "decompress-tar": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/decompress-tar/-/decompress-tar-4.1.1.tgz", + "integrity": "sha512-JdJMaCrGpB5fESVyxwpCx4Jdj2AagLmv3y58Qy4GE6HMVjWz1FeVQk1Ct4Kye7PftcdOo/7U7UKzYBJgqnGeUQ==", + "dev": true, + "requires": { + "file-type": "^5.2.0", + "is-stream": "^1.1.0", + "tar-stream": "^1.5.2" + }, + "dependencies": { + "bl": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/bl/-/bl-1.2.3.tgz", + "integrity": "sha512-pvcNpa0UU69UT341rO6AYy4FVAIkUHuZXRIWbq+zHnsVcRzDDjIAhGuuYoi0d//cwIwtt4pkpKycWEfjdV+vww==", + "dev": true, + "requires": { + "readable-stream": "^2.3.5", + "safe-buffer": "^5.1.1" + } + }, + "file-type": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-5.2.0.tgz", + "integrity": "sha1-LdvqfHP/42No365J3DOMBYwritY=", + "dev": true + }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "tar-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-1.6.2.tgz", + "integrity": "sha512-rzS0heiNf8Xn7/mpdSVVSMAWAoy9bfb1WOTYC78Z0UQKeKa/CWS8FOq0lKGNa8DWKAn9gxjCvMLYc5PGXYlK2A==", + "dev": true, + "requires": { + "bl": "^1.0.0", + "buffer-alloc": "^1.2.0", + "end-of-stream": "^1.0.0", + "fs-constants": "^1.0.0", + "readable-stream": "^2.3.0", + "to-buffer": "^1.1.1", + "xtend": "^4.0.0" + } + } + } + }, + "decompress-tarbz2": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/decompress-tarbz2/-/decompress-tarbz2-4.1.1.tgz", + "integrity": "sha512-s88xLzf1r81ICXLAVQVzaN6ZmX4A6U4z2nMbOwobxkLoIIfjVMBg7TeguTUXkKeXni795B6y5rnvDw7rxhAq9A==", + "dev": true, + "requires": { + "decompress-tar": "^4.1.0", + "file-type": "^6.1.0", + "is-stream": "^1.1.0", + "seek-bzip": "^1.0.5", + "unbzip2-stream": "^1.0.9" + }, + "dependencies": { + "file-type": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-6.2.0.tgz", + "integrity": "sha512-YPcTBDV+2Tm0VqjybVd32MHdlEGAtuxS3VAYsumFokDSMG+ROT5wawGlnHDoz7bfMcMDt9hxuXvXwoKUx2fkOg==", + "dev": true + } + } + }, + "decompress-targz": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/decompress-targz/-/decompress-targz-4.1.1.tgz", + "integrity": "sha512-4z81Znfr6chWnRDNfFNqLwPvm4db3WuZkqV+UgXQzSngG3CEKdBkw5jrv3axjjL96glyiiKjsxJG3X6WBZwX3w==", + "dev": true, + "requires": { + "decompress-tar": "^4.1.1", + "file-type": "^5.2.0", + "is-stream": "^1.1.0" + }, + "dependencies": { + "file-type": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-5.2.0.tgz", + "integrity": "sha1-LdvqfHP/42No365J3DOMBYwritY=", + "dev": true + } + } + }, + "decompress-unzip": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/decompress-unzip/-/decompress-unzip-4.0.1.tgz", + "integrity": "sha1-3qrM39FK6vhVePczroIQ+bSEj2k=", + "dev": true, + "requires": { + "file-type": "^3.8.0", + "get-stream": "^2.2.0", + "pify": "^2.3.0", + "yauzl": "^2.4.2" + }, + "dependencies": { + "file-type": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-3.9.0.tgz", + "integrity": "sha1-JXoHg4TR24CHvESdEH1SpSZyuek=", + "dev": true + }, + "get-stream": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-2.3.1.tgz", + "integrity": "sha1-Xzj5PzRgCWZu4BUKBUFn+Rvdld4=", + "dev": true, + "requires": { + "object-assign": "^4.0.1", + "pinkie-promise": "^2.0.0" + } + } + } + }, + "deep-equal": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-1.1.1.tgz", + "integrity": "sha512-yd9c5AdiqVcR+JjcwUQb9DkhJc8ngNr0MahEBGvDiJw8puWab2yZlh+nkasOnZP+EGTAP6rRp2JzJhJZzvNF8g==", + "dev": true, + "requires": { + "is-arguments": "^1.0.4", + "is-date-object": "^1.0.1", + "is-regex": "^1.0.4", + "object-is": "^1.0.1", + "object-keys": "^1.1.1", + "regexp.prototype.flags": "^1.2.0" + } + }, + "deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", + "dev": true + }, + "deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true + }, + "defer-to-connect": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-1.1.3.tgz", + "integrity": "sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ==", + "dev": true + }, + "deferred": { + "version": "0.7.11", + "resolved": "https://registry.npmjs.org/deferred/-/deferred-0.7.11.tgz", + "integrity": "sha512-8eluCl/Blx4YOGwMapBvXRKxHXhA8ejDXYzEaK8+/gtcm8hRMhSLmXSqDmNUKNc/C8HNSmuyyp/hflhqDAvK2A==", + "dev": true, + "requires": { + "d": "^1.0.1", + "es5-ext": "^0.10.50", + "event-emitter": "^0.3.5", + "next-tick": "^1.0.0", + "timers-ext": "^0.1.7" + } + }, + "define-properties": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", + "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", + "dev": true, + "requires": { + "object-keys": "^1.0.12" + } + }, + "define-property": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-2.0.2.tgz", + "integrity": "sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ==", + "dev": true, + "optional": true, + "requires": { + "is-descriptor": "^1.0.2", + "isobject": "^3.0.1" + }, + "dependencies": { + "is-accessor-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", + "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", + "dev": true, + "optional": true, + "requires": { + "kind-of": "^6.0.0" + } + }, + "is-data-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", + "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", + "dev": true, + "optional": true, + "requires": { + "kind-of": "^6.0.0" + } + }, + "is-descriptor": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", + "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", + "dev": true, + "optional": true, + "requires": { + "is-accessor-descriptor": "^1.0.0", + "is-data-descriptor": "^1.0.0", + "kind-of": "^6.0.2" + } + }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true, + "optional": true + } + } + }, + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", + "dev": true + }, + "delegates": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", + "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=", + "dev": true, + "optional": true + }, + "denque": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/denque/-/denque-1.5.1.tgz", + "integrity": "sha512-XwE+iZ4D6ZUB7mfYRMb5wByE8L74HCn30FBN7sWnXksWc1LO1bPDl67pBR9o/kC4z/xSNAwkMYcGgqDV3BE3Hw==", + "dev": true + }, + "des.js": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/des.js/-/des.js-1.0.1.tgz", + "integrity": "sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA==", + "dev": true, + "optional": true, + "requires": { + "inherits": "^2.0.1", + "minimalistic-assert": "^1.0.0" + } + }, + "detect-libc": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", + "integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=", + "dev": true, + "optional": true + }, + "diagnostics": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/diagnostics/-/diagnostics-1.1.1.tgz", + "integrity": "sha512-8wn1PmdunLJ9Tqbx+Fx/ZEuHfJf4NKSN2ZBj7SJC/OWRWha843+WsTjqMe1B5E3p28jqBlp+mJ2fPVxPyNgYKQ==", + "dev": true, + "requires": { + "colorspace": "1.1.x", + "enabled": "1.0.x", + "kuler": "1.0.x" + } + }, + "diff": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", + "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==", + "dev": true, + "optional": true + }, + "diffie-hellman": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/diffie-hellman/-/diffie-hellman-5.0.3.tgz", + "integrity": "sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg==", + "dev": true, + "optional": true, + "requires": { + "bn.js": "^4.1.0", + "miller-rabin": "^4.0.0", + "randombytes": "^2.0.0" + }, + "dependencies": { + "bn.js": { + "version": "4.12.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==", + "dev": true, + "optional": true + } + } + }, + "dijkstrajs": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/dijkstrajs/-/dijkstrajs-1.0.2.tgz", + "integrity": "sha512-QV6PMaHTCNmKSeP6QoXhVTw9snc9VD8MulTT0Bd99Pacp4SS1cjcrYPgBPmibqKVtMJJfqC6XvOXgPMEEPH/fg==", + "dev": true + }, + "dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "requires": { + "path-type": "^4.0.0" + } + }, + "domain-browser": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/domain-browser/-/domain-browser-1.2.0.tgz", + "integrity": "sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA==", + "dev": true, + "optional": true + }, + "dot-qs": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/dot-qs/-/dot-qs-0.2.0.tgz", + "integrity": "sha1-02UX/iS3zaYfznpQJqACSvr1pDk=", + "dev": true + }, + "dotenv": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-10.0.0.tgz", + "integrity": "sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q==", + "dev": true + }, + "dotenv-expand": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/dotenv-expand/-/dotenv-expand-5.1.0.tgz", + "integrity": "sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA==", + "dev": true + }, + "duplexer3": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.4.tgz", + "integrity": "sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI=", + "dev": true + }, + "duplexify": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.2.tgz", + "integrity": "sha512-fz3OjcNCHmRP12MJoZMPglx8m4rrFP8rovnk4vT8Fs+aonZoCwGg10dSsQsfP/E62eZcPTMSMP6686fu9Qlqtw==", + "dev": true, + "requires": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "duration": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/duration/-/duration-0.2.2.tgz", + "integrity": "sha512-06kgtea+bGreF5eKYgI/36A6pLXggY7oR4p1pq4SmdFBn1ReOL5D8RhG64VrqfTTKNucqqtBAwEj8aB88mcqrg==", + "dev": true, + "requires": { + "d": "1", + "es5-ext": "~0.10.46" + } + }, + "ecc-jsbn": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", + "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", + "dev": true, + "requires": { + "jsbn": "~0.1.0", + "safer-buffer": "^2.1.0" + } + }, + "ecdsa-sig-formatter": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "dev": true, + "requires": { + "safe-buffer": "^5.0.1" + } + }, + "elliptic": { + "version": "6.5.4", + "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz", + "integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==", + "dev": true, + "optional": true, + "requires": { + "bn.js": "^4.11.9", + "brorand": "^1.1.0", + "hash.js": "^1.0.0", + "hmac-drbg": "^1.0.1", + "inherits": "^2.0.4", + "minimalistic-assert": "^1.0.1", + "minimalistic-crypto-utils": "^1.0.1" + }, + "dependencies": { + "bn.js": { + "version": "4.12.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==", + "dev": true, + "optional": true + } + } + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "emojis-list": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-3.0.0.tgz", + "integrity": "sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==", + "dev": true, + "optional": true + }, + "enabled": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/enabled/-/enabled-1.0.2.tgz", + "integrity": "sha1-ll9lE9LC0cX0ZStkouM5ZGf8L5M=", + "dev": true, + "requires": { + "env-variable": "0.0.x" + } + }, + "end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "dev": true, + "requires": { + "once": "^1.4.0" + } + }, + "engine.io-client": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/engine.io-client/-/engine.io-client-3.5.2.tgz", + "integrity": "sha512-QEqIp+gJ/kMHeUun7f5Vv3bteRHppHH/FMBQX/esFj/fuYfjyUKWGMo3VCvIP/V8bE9KcjHmRZrhIz2Z9oNsDA==", + "dev": true, + "requires": { + "component-emitter": "~1.3.0", + "component-inherit": "0.0.3", + "debug": "~3.1.0", + "engine.io-parser": "~2.2.0", + "has-cors": "1.1.0", + "indexof": "0.0.1", + "parseqs": "0.0.6", + "parseuri": "0.0.6", + "ws": "~7.4.2", + "xmlhttprequest-ssl": "~1.6.2", + "yeast": "0.1.2" + }, + "dependencies": { + "debug": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", + "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", + "dev": true, + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true + }, + "ws": { + "version": "7.4.6", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.4.6.tgz", + "integrity": "sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A==", + "dev": true + } + } + }, + "engine.io-parser": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-2.2.1.tgz", + "integrity": "sha512-x+dN/fBH8Ro8TFwJ+rkB2AmuVw9Yu2mockR/p3W8f8YtExwFgDvBDi0GWyb4ZLkpahtDGZgtr3zLovanJghPqg==", + "dev": true, + "requires": { + "after": "0.8.2", + "arraybuffer.slice": "~0.0.7", + "base64-arraybuffer": "0.1.4", + "blob": "0.0.5", + "has-binary2": "~1.0.2" + } + }, + "enhanced-resolve": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-3.4.1.tgz", + "integrity": "sha1-BCHjOf1xQZs9oT0Smzl5BAIwR24=", + "dev": true, + "optional": true, + "requires": { + "graceful-fs": "^4.1.2", + "memory-fs": "^0.4.0", + "object-assign": "^4.0.1", + "tapable": "^0.2.7" + } + }, + "env-variable": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/env-variable/-/env-variable-0.0.6.tgz", + "integrity": "sha512-bHz59NlBbtS0NhftmR8+ExBEekE7br0e01jw+kk0NDro7TtZzBYZ5ScGPs3OmwnpyfHTHOtr1Y6uedCdrIldtg==", + "dev": true + }, + "errno": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/errno/-/errno-0.1.8.tgz", + "integrity": "sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A==", + "dev": true, + "optional": true, + "requires": { + "prr": "~1.0.1" + } + }, + "error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "optional": true, + "requires": { + "is-arrayish": "^0.2.1" + }, + "dependencies": { + "is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "dev": true, + "optional": true + } + } + }, + "es5-ext": { + "version": "0.10.53", + "resolved": "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.53.tgz", + "integrity": "sha512-Xs2Stw6NiNHWypzRTY1MtaG/uJlwCk8kH81920ma8mvN8Xq1gsfhZvpkImLQArw8AHnv8MT2I45J3c0R8slE+Q==", + "dev": true, + "requires": { + "es6-iterator": "~2.0.3", + "es6-symbol": "~3.1.3", + "next-tick": "~1.0.0" + }, + "dependencies": { + "next-tick": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/next-tick/-/next-tick-1.0.0.tgz", + "integrity": "sha1-yobR/ogoFpsBICCOPchCS524NCw=", + "dev": true + } + } + }, + "es6-iterator": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/es6-iterator/-/es6-iterator-2.0.3.tgz", + "integrity": "sha1-p96IkUGgWpSwhUQDstCg+/qY87c=", + "dev": true, + "requires": { + "d": "1", + "es5-ext": "^0.10.35", + "es6-symbol": "^3.1.1" + } + }, + "es6-map": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/es6-map/-/es6-map-0.1.5.tgz", + "integrity": "sha1-kTbgUD3MBqMBaQ8LsU/042TpSfA=", + "dev": true, + "optional": true, + "requires": { + "d": "1", + "es5-ext": "~0.10.14", + "es6-iterator": "~2.0.1", + "es6-set": "~0.1.5", + "es6-symbol": "~3.1.1", + "event-emitter": "~0.3.5" + } + }, + "es6-promisify": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-6.1.1.tgz", + "integrity": "sha512-HBL8I3mIki5C1Cc9QjKUenHtnG0A5/xA8Q/AllRcfiwl2CZFXGK7ddBiCoRwAix4i2KxcQfjtIVcrVbB3vbmwg==", + "dev": true + }, + "es6-set": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/es6-set/-/es6-set-0.1.5.tgz", + "integrity": "sha1-0rPsXU2ADO2BjbU40ol02wpzzLE=", + "dev": true, + "requires": { + "d": "1", + "es5-ext": "~0.10.14", + "es6-iterator": "~2.0.1", + "es6-symbol": "3.1.1", + "event-emitter": "~0.3.5" + }, + "dependencies": { + "es6-symbol": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/es6-symbol/-/es6-symbol-3.1.1.tgz", + "integrity": "sha1-vwDvT9q2uhtG7Le2KbTH7VcVzHc=", + "dev": true, + "requires": { + "d": "1", + "es5-ext": "~0.10.14" + } + } + } + }, + "es6-symbol": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/es6-symbol/-/es6-symbol-3.1.3.tgz", + "integrity": "sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA==", + "dev": true, + "requires": { + "d": "^1.0.1", + "ext": "^1.1.2" + } + }, + "es6-weak-map": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/es6-weak-map/-/es6-weak-map-2.0.3.tgz", + "integrity": "sha512-p5um32HOTO1kP+w7PRnB+5lQ43Z6muuMuIMffvDN8ZB4GcnjLBV6zGStpbASIMk4DCAvEaamhe2zhyCb/QXXsA==", + "dev": true, + "requires": { + "d": "1", + "es5-ext": "^0.10.46", + "es6-iterator": "^2.0.3", + "es6-symbol": "^3.1.1" + } + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true + }, + "escodegen": { + "version": "1.14.3", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.14.3.tgz", + "integrity": "sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw==", + "dev": true, + "requires": { + "esprima": "^4.0.1", + "estraverse": "^4.2.0", + "esutils": "^2.0.2", + "optionator": "^0.8.1", + "source-map": "~0.6.1" + } + }, + "escope": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/escope/-/escope-3.6.0.tgz", + "integrity": "sha1-4Bl16BJ4GhY6ba392AOY3GTIicM=", + "dev": true, + "optional": true, + "requires": { + "es6-map": "^0.1.3", + "es6-weak-map": "^2.0.1", + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } + }, + "esniff": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/esniff/-/esniff-1.1.0.tgz", + "integrity": "sha1-xmhJIp+RRk3t4uDUAgHtar9l8qw=", + "dev": true, + "requires": { + "d": "1", + "es5-ext": "^0.10.12" + } + }, + "esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true + }, + "esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "optional": true, + "requires": { + "estraverse": "^5.2.0" + }, + "dependencies": { + "estraverse": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz", + "integrity": "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==", + "dev": true, + "optional": true + } + } + }, + "essentials": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/essentials/-/essentials-1.1.1.tgz", + "integrity": "sha512-SmaxoAdVu86XkZQM/u6TYSu96ZlFGwhvSk1l9zAkznFuQkMb9mRDS2iq/XWDow7R8OwBwdYH8nLyDKznMD+GWw==", + "dev": true + }, + "estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true + }, + "esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true + }, + "event-emitter": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/event-emitter/-/event-emitter-0.3.5.tgz", + "integrity": "sha1-34xp7vFkeSPHFXuc6DhAYQsCzDk=", + "dev": true, + "requires": { + "d": "1", + "es5-ext": "~0.10.14" + } + }, + "event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "dev": true + }, + "events": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", + "integrity": "sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=", + "dev": true + }, + "evp_bytestokey": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", + "integrity": "sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==", + "dev": true, + "optional": true, + "requires": { + "md5.js": "^1.3.4", + "safe-buffer": "^5.1.1" + } + }, + "execa": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-0.7.0.tgz", + "integrity": "sha1-lEvs00zEHuMqY6n68nrVpl/Fl3c=", + "dev": true, + "optional": true, + "requires": { + "cross-spawn": "^5.0.1", + "get-stream": "^3.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" + }, + "dependencies": { + "cross-spawn": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", + "integrity": "sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk=", + "dev": true, + "optional": true, + "requires": { + "lru-cache": "^4.0.1", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, + "get-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=", + "dev": true, + "optional": true + }, + "lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "dev": true, + "optional": true, + "requires": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + }, + "yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=", + "dev": true, + "optional": true + } + } + }, + "exit-on-epipe": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/exit-on-epipe/-/exit-on-epipe-1.0.1.tgz", + "integrity": "sha512-h2z5mrROTxce56S+pnvAV890uu7ls7f1kEvVGJbw1OlFH3/mlJ5bkXu0KRyW94v37zzHPiUd55iLn3DA7TjWpw==", + "dev": true + }, + "expand-brackets": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-2.1.4.tgz", + "integrity": "sha1-t3c14xXOMPa27/D4OwQVGiJEliI=", + "dev": true, + "optional": true, + "requires": { + "debug": "^2.3.3", + "define-property": "^0.2.5", + "extend-shallow": "^2.0.1", + "posix-character-classes": "^0.1.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.1" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "optional": true, + "requires": { + "ms": "2.0.0" + } + }, + "define-property": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", + "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", + "dev": true, + "optional": true, + "requires": { + "is-descriptor": "^0.1.0" + } + }, + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "dev": true, + "optional": true, + "requires": { + "is-extendable": "^0.1.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true, + "optional": true + } + } + }, + "expand-template": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", + "integrity": "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==", + "dev": true, + "optional": true + }, + "ext": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/ext/-/ext-1.6.0.tgz", + "integrity": "sha512-sdBImtzkq2HpkdRLtlLWDa6w4DX22ijZLKx8BMPUuKe1c5lbN6xwQDQCxSfxBQnHZ13ls/FH0MQZx/q/gr6FQg==", + "dev": true, + "requires": { + "type": "^2.5.0" + } + }, + "ext-list": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/ext-list/-/ext-list-2.2.2.tgz", + "integrity": "sha512-u+SQgsubraE6zItfVA0tBuCBhfU9ogSRnsvygI7wht9TS510oLkBRXBsqopeUG/GBOIQyKZO9wjTqIu/sf5zFA==", + "dev": true, + "requires": { + "mime-db": "^1.28.0" + } + }, + "ext-name": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ext-name/-/ext-name-5.0.0.tgz", + "integrity": "sha512-yblEwXAbGv1VQDmow7s38W77hzAgJAO50ztBLMcUyUBfxv1HC+LGwtiEN+Co6LtlqT/5uwVOxsD4TNIilWhwdQ==", + "dev": true, + "requires": { + "ext-list": "^2.0.0", + "sort-keys-length": "^1.0.0" + } + }, + "extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", + "dev": true + }, + "extend-shallow": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-3.0.2.tgz", + "integrity": "sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg=", + "dev": true, + "optional": true, + "requires": { + "assign-symbols": "^1.0.0", + "is-extendable": "^1.0.1" + }, + "dependencies": { + "is-extendable": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", + "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", + "dev": true, + "optional": true, + "requires": { + "is-plain-object": "^2.0.4" + } + } + } + }, + "external-editor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", + "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", + "dev": true, + "requires": { + "chardet": "^0.7.0", + "iconv-lite": "^0.4.24", + "tmp": "^0.0.33" + } + }, + "extglob": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/extglob/-/extglob-2.0.4.tgz", + "integrity": "sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw==", + "dev": true, + "optional": true, + "requires": { + "array-unique": "^0.3.2", + "define-property": "^1.0.0", + "expand-brackets": "^2.1.4", + "extend-shallow": "^2.0.1", + "fragment-cache": "^0.2.1", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.1" + }, + "dependencies": { + "define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", + "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", + "dev": true, + "optional": true, + "requires": { + "is-descriptor": "^1.0.0" + } + }, + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "dev": true, + "optional": true, + "requires": { + "is-extendable": "^0.1.0" + } + }, + "is-accessor-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", + "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", + "dev": true, + "optional": true, + "requires": { + "kind-of": "^6.0.0" + } + }, + "is-data-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", + "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", + "dev": true, + "optional": true, + "requires": { + "kind-of": "^6.0.0" + } + }, + "is-descriptor": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", + "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", + "dev": true, + "optional": true, + "requires": { + "is-accessor-descriptor": "^1.0.0", + "is-data-descriptor": "^1.0.0", + "kind-of": "^6.0.2" + } + }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true, + "optional": true + } + } + }, + "extsprintf": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=", + "dev": true + }, + "fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true + }, + "fast-glob": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.7.tgz", + "integrity": "sha512-rYGMRwip6lUMvYD3BTScMwT1HtAs2d71SMv66Vrxs0IekGZEjhM0pcMfjQPnknBt2zeCwQMEupiN02ZP4DiT1Q==", + "dev": true, + "requires": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + } + }, + "fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true + }, + "fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", + "dev": true + }, + "fastest-levenshtein": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/fastest-levenshtein/-/fastest-levenshtein-1.0.12.tgz", + "integrity": "sha512-On2N+BpYJ15xIC974QNVuYGMOlEVt4s0EOI3wwMqOmK1fdDY+FN/zltPV8vosq4ad4c/gJ1KHScUn/6AWIgiow==", + "dev": true + }, + "fastq": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", + "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", + "dev": true, + "requires": { + "reusify": "^1.0.4" + } + }, + "fd-slicer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", + "integrity": "sha1-JcfInLH5B3+IkbvmHY85Dq4lbx4=", + "dev": true, + "requires": { + "pend": "~1.2.0" + } + }, + "fecha": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/fecha/-/fecha-4.2.1.tgz", + "integrity": "sha512-MMMQ0ludy/nBs1/o0zVOiKTpG7qMbonKUzjJgQFEuvq6INZ1OraKPRAWkBq5vlKLOUMpmNYG1JoN3oDPUQ9m3Q==", + "dev": true + }, + "figures": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", + "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", + "dev": true, + "requires": { + "escape-string-regexp": "^1.0.5" + } + }, + "file-type": { + "version": "16.5.3", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-16.5.3.tgz", + "integrity": "sha512-uVsl7iFhHSOY4bEONLlTK47iAHtNsFHWP5YE4xJfZ4rnX7S1Q3wce09XgqSC7E/xh8Ncv/be1lNoyprlUH/x6A==", + "dev": true, + "requires": { + "readable-web-to-node-stream": "^3.0.0", + "strtok3": "^6.2.4", + "token-types": "^4.1.1" + } + }, + "file-uri-to-path": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", + "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==", + "dev": true, + "optional": true + }, + "filename-reserved-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/filename-reserved-regex/-/filename-reserved-regex-2.0.0.tgz", + "integrity": "sha1-q/c9+rc10EVECr/qLZHzieu/oik=", + "dev": true + }, + "filenamify": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/filenamify/-/filenamify-4.3.0.tgz", + "integrity": "sha512-hcFKyUG57yWGAzu1CMt/dPzYZuv+jAJUT85bL8mrXvNe6hWj6yEHEc4EdcgiA6Z3oi1/9wXJdZPXF2dZNgwgOg==", + "dev": true, + "requires": { + "filename-reserved-regex": "^2.0.0", + "strip-outer": "^1.0.1", + "trim-repeated": "^1.0.0" + } + }, + "filesize": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/filesize/-/filesize-8.0.3.tgz", + "integrity": "sha512-UrhwVdUWmP0Jo9uLhVro8U36D4Yp3uT6pfXeNJHVRwyQrZjsqfnypOLthfnuB/bk1glUu7aIY947kyfoOfXuog==", + "dev": true + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "find-requires": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/find-requires/-/find-requires-1.0.0.tgz", + "integrity": "sha512-UME7hNwBfzeISSFQcBEDemEEskpOjI/shPrpJM5PI4DSdn6hX0dmz+2dL70blZER2z8tSnTRL+2rfzlYgtbBoQ==", + "dev": true, + "requires": { + "es5-ext": "^0.10.49", + "esniff": "^1.1.0" + } + }, + "find-up": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "dev": true, + "optional": true, + "requires": { + "locate-path": "^2.0.0" + } + }, + "flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true + }, + "follow-redirects": { + "version": "1.14.4", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.4.tgz", + "integrity": "sha512-zwGkiSXC1MUJG/qmeIFH2HBJx9u0V46QGUe3YR1fXG8bXQxq7fLj0RjLZQ5nubr9qNJUZrH+xUcwXEoXNpfS+g==", + "dev": true + }, + "for-in": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz", + "integrity": "sha1-gQaNKVqBQuwKxybG4iAMMPttXoA=", + "dev": true, + "optional": true + }, + "forever-agent": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=", + "dev": true + }, + "form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "dev": true, + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } + }, + "formidable": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-1.2.2.tgz", + "integrity": "sha512-V8gLm+41I/8kguQ4/o1D3RIHRmhYFG4pnNyonvua+40rqcEmT4+V71yaZ3B457xbbgCsCfjSPi65u/W6vK1U5Q==", + "dev": true + }, + "fragment-cache": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/fragment-cache/-/fragment-cache-0.2.1.tgz", + "integrity": "sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk=", + "dev": true, + "optional": true, + "requires": { + "map-cache": "^0.2.2" + } + }, + "fs-constants": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", + "dev": true + }, + "fs-extra": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", + "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", + "dev": true, + "requires": { + "at-least-node": "^1.0.0", + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "dependencies": { + "jsonfile": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", + "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", + "dev": true, + "requires": { + "graceful-fs": "^4.1.6", + "universalify": "^2.0.0" + } + }, + "universalify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", + "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==", + "dev": true + } + } + }, + "fs-minipass": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", + "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "dev": true, + "requires": { + "minipass": "^3.0.0" + } + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "fs2": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/fs2/-/fs2-0.3.9.tgz", + "integrity": "sha512-WsOqncODWRlkjwll+73bAxVW3JPChDgaPX3DT4iTTm73UmG4VgALa7LaFblP232/DN60itkOrPZ8kaP1feksGQ==", + "dev": true, + "requires": { + "d": "^1.0.1", + "deferred": "^0.7.11", + "es5-ext": "^0.10.53", + "event-emitter": "^0.3.5", + "ignore": "^5.1.8", + "memoizee": "^0.4.14", + "type": "^2.1.0" + } + }, + "fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "optional": true + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "gauge": { + "version": "2.7.4", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz", + "integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=", + "dev": true, + "optional": true, + "requires": { + "aproba": "^1.0.3", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.0", + "object-assign": "^4.1.0", + "signal-exit": "^3.0.0", + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1", + "wide-align": "^1.1.0" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "dev": true, + "optional": true + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "dev": true, + "optional": true, + "requires": { + "ansi-regex": "^2.0.0" + } + } + } + }, + "get-caller-file": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.3.tgz", + "integrity": "sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w==", + "dev": true, + "optional": true + }, + "get-intrinsic": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz", + "integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==", + "dev": true, + "requires": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1" + } + }, + "get-stdin": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-8.0.0.tgz", + "integrity": "sha512-sY22aA6xchAzprjyqmSEQv4UbAAzRN0L2dQB0NlN5acTTK9Don6nhoc3eAbUnpZiCANAMfd/+40kVdKfFygohg==", + "dev": true + }, + "get-stream": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", + "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", + "dev": true, + "requires": { + "pump": "^3.0.0" + } + }, + "get-value": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz", + "integrity": "sha1-3BXKHGcjh8p2vTesCjlbogQqLCg=", + "dev": true, + "optional": true + }, + "getpass": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", + "dev": true, + "requires": { + "assert-plus": "^1.0.0" + } + }, + "github-from-package": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", + "integrity": "sha1-l/tdlr/eiXMxPyDoKI75oWf6ZM4=", + "dev": true, + "optional": true + }, + "glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + }, + "globby": { + "version": "11.0.4", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.0.4.tgz", + "integrity": "sha512-9O4MVG9ioZJ08ffbcyVYyLOJLk5JQ688pJ4eMGLpdWLHq/Wr1D9BlriLQyL0E+jbkuePVZXYFj47QM/v093wHg==", + "dev": true, + "requires": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.1.1", + "ignore": "^5.1.4", + "merge2": "^1.3.0", + "slash": "^3.0.0" + } + }, + "got": { + "version": "11.8.2", + "resolved": "https://registry.npmjs.org/got/-/got-11.8.2.tgz", + "integrity": "sha512-D0QywKgIe30ODs+fm8wMZiAcZjypcCodPNuMz5H9Mny7RJ+IjJ10BdmGW7OM7fHXP+O7r6ZwapQ/YQmMSvB0UQ==", + "dev": true, + "requires": { + "@sindresorhus/is": "^4.0.0", + "@szmarczak/http-timer": "^4.0.5", + "@types/cacheable-request": "^6.0.1", + "@types/responselike": "^1.0.0", + "cacheable-lookup": "^5.0.3", + "cacheable-request": "^7.0.1", + "decompress-response": "^6.0.0", + "http2-wrapper": "^1.0.0-beta.5.2", + "lowercase-keys": "^2.0.0", + "p-cancelable": "^2.0.0", + "responselike": "^2.0.0" + }, + "dependencies": { + "@sindresorhus/is": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-4.2.0.tgz", + "integrity": "sha512-VkE3KLBmJwcCaVARtQpfuKcKv8gcBmUubrfHGF84dXuuW6jgsRYxPtzcIhPyK9WAPpRt2/xY6zkD9MnRaJzSyw==", + "dev": true + }, + "@szmarczak/http-timer": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-4.0.6.tgz", + "integrity": "sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w==", + "dev": true, + "requires": { + "defer-to-connect": "^2.0.0" + } + }, + "cacheable-request": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-7.0.2.tgz", + "integrity": "sha512-pouW8/FmiPQbuGpkXQ9BAPv/Mo5xDGANgSNXzTzJ8DrKGuXOssM4wIQRjfanNRh3Yu5cfYPvcorqbhg2KIJtew==", + "dev": true, + "requires": { + "clone-response": "^1.0.2", + "get-stream": "^5.1.0", + "http-cache-semantics": "^4.0.0", + "keyv": "^4.0.0", + "lowercase-keys": "^2.0.0", + "normalize-url": "^6.0.1", + "responselike": "^2.0.0" + } + }, + "decompress-response": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", + "dev": true, + "requires": { + "mimic-response": "^3.1.0" + } + }, + "defer-to-connect": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz", + "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==", + "dev": true + }, + "get-stream": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", + "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", + "dev": true, + "requires": { + "pump": "^3.0.0" + } + }, + "json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true + }, + "keyv": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.0.3.tgz", + "integrity": "sha512-zdGa2TOpSZPq5mU6iowDARnMBZgtCqJ11dJROFi6tg6kTn4nuUdU09lFyLFSaHrWqpIJ+EBq4E8/Dc0Vx5vLdA==", + "dev": true, + "requires": { + "json-buffer": "3.0.1" + } + }, + "lowercase-keys": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", + "integrity": "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==", + "dev": true + }, + "mimic-response": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", + "dev": true + }, + "normalize-url": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz", + "integrity": "sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==", + "dev": true + }, + "p-cancelable": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-2.1.1.tgz", + "integrity": "sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg==", + "dev": true + }, + "responselike": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/responselike/-/responselike-2.0.0.tgz", + "integrity": "sha512-xH48u3FTB9VsZw7R+vvgaKeLKzT6jOogbQhEe/jewwnZgzPcnyWui2Av6JpoYZF/91uueC+lqhWqeURw5/qhCw==", + "dev": true, + "requires": { + "lowercase-keys": "^2.0.0" + } + } + } + }, + "graceful-fs": { + "version": "4.2.8", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.8.tgz", + "integrity": "sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg==", + "dev": true + }, + "graphlib": { + "version": "2.1.8", + "resolved": "https://registry.npmjs.org/graphlib/-/graphlib-2.1.8.tgz", + "integrity": "sha512-jcLLfkpoVGmH7/InMC/1hIvOPSUh38oJtGhvrOFGzioE1DZ+0YW16RgmOJhHiuWTvGiJQ9Z1Ik43JvkRPRvE+A==", + "dev": true, + "requires": { + "lodash": "^4.17.15" + } + }, + "har-schema": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", + "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=", + "dev": true + }, + "har-validator": { + "version": "5.1.5", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz", + "integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==", + "dev": true, + "requires": { + "ajv": "^6.12.3", + "har-schema": "^2.0.0" + } + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-ansi": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-4.0.1.tgz", + "integrity": "sha512-Qr4RtTm30xvEdqUXbSBVWDu+PrTokJOwe/FU+VdfJPk+MXAPoeOzKpRyrDTnZIJwAkQ4oBLTU53nu0HrkF/Z2A==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + }, + "has-binary2": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-binary2/-/has-binary2-1.0.3.tgz", + "integrity": "sha512-G1LWKhDSvhGeAQ8mPVQlqNcOB2sJdwATtZKl2pDKKHfpf/rYj24lkinxf69blJbnsvtqqNU+L3SL50vzZhXOnw==", + "dev": true, + "requires": { + "isarray": "2.0.1" + }, + "dependencies": { + "isarray": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.1.tgz", + "integrity": "sha1-o32U7ZzaLVmGXJ92/llu4fM4dB4=", + "dev": true + } + } + }, + "has-cors": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-cors/-/has-cors-1.1.0.tgz", + "integrity": "sha1-XkdHk/fqmEPRu5nCPu9J/xJv/zk=", + "dev": true + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true + }, + "has-symbols": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz", + "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==", + "dev": true + }, + "has-tostringtag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", + "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "dev": true, + "requires": { + "has-symbols": "^1.0.2" + } + }, + "has-unicode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", + "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=", + "dev": true, + "optional": true + }, + "has-value": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-value/-/has-value-1.0.0.tgz", + "integrity": "sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc=", + "dev": true, + "optional": true, + "requires": { + "get-value": "^2.0.6", + "has-values": "^1.0.0", + "isobject": "^3.0.0" + } + }, + "has-values": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-values/-/has-values-1.0.0.tgz", + "integrity": "sha1-lbC2P+whRmGab+V/51Yo1aOe/k8=", + "dev": true, + "optional": true, + "requires": { + "is-number": "^3.0.0", + "kind-of": "^4.0.0" + }, + "dependencies": { + "is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==", + "dev": true, + "optional": true + }, + "is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=", + "dev": true, + "optional": true, + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "dev": true, + "optional": true, + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "kind-of": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-4.0.0.tgz", + "integrity": "sha1-IIE989cSkosgc3hpGkUGb65y3Vc=", + "dev": true, + "optional": true, + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "hash-base": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.1.0.tgz", + "integrity": "sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA==", + "dev": true, + "optional": true, + "requires": { + "inherits": "^2.0.4", + "readable-stream": "^3.6.0", + "safe-buffer": "^5.2.0" + }, + "dependencies": { + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "optional": true + } + } + }, + "hash.js": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/hash.js/-/hash.js-1.1.7.tgz", + "integrity": "sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==", + "dev": true, + "optional": true, + "requires": { + "inherits": "^2.0.3", + "minimalistic-assert": "^1.0.1" + } + }, + "hmac-drbg": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/hmac-drbg/-/hmac-drbg-1.0.1.tgz", + "integrity": "sha1-0nRXAQJabHdabFRXk+1QL8DGSaE=", + "dev": true, + "optional": true, + "requires": { + "hash.js": "^1.0.3", + "minimalistic-assert": "^1.0.0", + "minimalistic-crypto-utils": "^1.0.1" + } + }, + "homedir-polyfill": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz", + "integrity": "sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA==", + "dev": true, + "optional": true, + "requires": { + "parse-passwd": "^1.0.0" + } + }, + "hosted-git-info": { + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", + "dev": true, + "optional": true + }, + "http-cache-semantics": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz", + "integrity": "sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ==", + "dev": true + }, + "http-signature": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", + "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "dev": true, + "requires": { + "assert-plus": "^1.0.0", + "jsprim": "^1.2.2", + "sshpk": "^1.7.0" + } + }, + "http2-wrapper": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/http2-wrapper/-/http2-wrapper-1.0.3.tgz", + "integrity": "sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg==", + "dev": true, + "requires": { + "quick-lru": "^5.1.1", + "resolve-alpn": "^1.0.0" + } + }, + "https-browserify": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/https-browserify/-/https-browserify-1.0.0.tgz", + "integrity": "sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM=", + "dev": true, + "optional": true + }, + "https-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz", + "integrity": "sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==", + "dev": true, + "requires": { + "agent-base": "6", + "debug": "4" + } + }, + "iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dev": true, + "requires": { + "safer-buffer": ">= 2.1.2 < 3" + } + }, + "ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "dev": true + }, + "ignore": { + "version": "5.1.8", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.8.tgz", + "integrity": "sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw==", + "dev": true + }, + "immediate": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz", + "integrity": "sha1-nbHb0Pr43m++D13V5Wu2BigN5ps=", + "dev": true + }, + "imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", + "dev": true + }, + "indexof": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/indexof/-/indexof-0.0.1.tgz", + "integrity": "sha1-gtwzbSMrkGIXnQWrMpOmYFn9Q10=", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "inquirer": { + "version": "7.3.3", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.3.3.tgz", + "integrity": "sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA==", + "dev": true, + "requires": { + "ansi-escapes": "^4.2.1", + "chalk": "^4.1.0", + "cli-cursor": "^3.1.0", + "cli-width": "^3.0.0", + "external-editor": "^3.0.3", + "figures": "^3.0.0", + "lodash": "^4.17.19", + "mute-stream": "0.0.8", + "run-async": "^2.4.0", + "rxjs": "^6.6.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0", + "through": "^2.3.6" + }, + "dependencies": { + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + } + } + }, + "inquirer-autocomplete-prompt": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/inquirer-autocomplete-prompt/-/inquirer-autocomplete-prompt-1.4.0.tgz", + "integrity": "sha512-qHgHyJmbULt4hI+kCmwX92MnSxDs/Yhdt4wPA30qnoa01OF6uTXV8yvH4hKXgdaTNmkZ9D01MHjqKYEuJN+ONw==", + "dev": true, + "requires": { + "ansi-escapes": "^4.3.1", + "chalk": "^4.0.0", + "figures": "^3.2.0", + "run-async": "^2.4.0", + "rxjs": "^6.6.2" + } + }, + "interpret": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz", + "integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==", + "dev": true, + "optional": true + }, + "invert-kv": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/invert-kv/-/invert-kv-1.0.0.tgz", + "integrity": "sha1-EEqOSqym09jNFXqO+L+rLXo//bY=", + "dev": true, + "optional": true + }, + "ip-regex": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz", + "integrity": "sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk=", + "dev": true + }, + "is-accessor-descriptor": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", + "integrity": "sha1-qeEss66Nh2cn7u84Q/igiXtcmNY=", + "dev": true, + "optional": true, + "requires": { + "kind-of": "^3.0.2" + } + }, + "is-arguments": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.1.tgz", + "integrity": "sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + } + }, + "is-arrayish": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", + "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==", + "dev": true + }, + "is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "requires": { + "binary-extensions": "^2.0.0" + } + }, + "is-buffer": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz", + "integrity": "sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==", + "dev": true + }, + "is-builtin-module": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-1.0.0.tgz", + "integrity": "sha1-VAVy0096wxGfj3bDDLwbHgN6/74=", + "dev": true, + "optional": true, + "requires": { + "builtin-modules": "^1.0.0" + }, + "dependencies": { + "builtin-modules": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-1.1.1.tgz", + "integrity": "sha1-Jw8HbFpywC9bZaR9+Uxf46J4iS8=", + "dev": true, + "optional": true + } + } + }, + "is-core-module": { + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.0.tgz", + "integrity": "sha512-vd15qHsaqrRL7dtH6QNuy0ndJmRDrS9HAM1CAiSifNUFv4x1a0CCVsj18hJ1mShxIG6T2i1sO78MkP56r0nYRw==", + "dev": true, + "optional": true, + "requires": { + "has": "^1.0.3" + } + }, + "is-data-descriptor": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz", + "integrity": "sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y=", + "dev": true, + "optional": true, + "requires": { + "kind-of": "^3.0.2" + } + }, + "is-date-object": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", + "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", + "dev": true, + "requires": { + "has-tostringtag": "^1.0.0" + } + }, + "is-descriptor": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.6.tgz", + "integrity": "sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==", + "dev": true, + "optional": true, + "requires": { + "is-accessor-descriptor": "^0.1.6", + "is-data-descriptor": "^0.1.4", + "kind-of": "^5.0.0" + }, + "dependencies": { + "kind-of": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-5.1.0.tgz", + "integrity": "sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw==", + "dev": true, + "optional": true + } + } + }, + "is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", + "dev": true + }, + "is-extendable": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", + "integrity": "sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik=", + "dev": true, + "optional": true + }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "dev": true, + "optional": true, + "requires": { + "number-is-nan": "^1.0.0" + } + }, + "is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, + "is-natural-number": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-natural-number/-/is-natural-number-4.0.1.tgz", + "integrity": "sha1-q5124dtM7VHjXeDHLr7PCfc0zeg=", + "dev": true + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true + }, + "is-plain-obj": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", + "integrity": "sha1-caUMhCnfync8kqOQpKA7OfzVHT4=", + "dev": true + }, + "is-plain-object": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", + "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", + "dev": true, + "optional": true, + "requires": { + "isobject": "^3.0.1" + } + }, + "is-promise": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.2.2.tgz", + "integrity": "sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ==", + "dev": true + }, + "is-regex": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", + "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + } + }, + "is-stream": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", + "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=", + "dev": true + }, + "is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", + "dev": true + }, + "is-windows": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz", + "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==", + "dev": true, + "optional": true + }, + "is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "dev": true, + "requires": { + "is-docker": "^2.0.0" + } + }, + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", + "dev": true + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true + }, + "isobject": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", + "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", + "dev": true, + "optional": true + }, + "isomorphic-ws": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/isomorphic-ws/-/isomorphic-ws-4.0.1.tgz", + "integrity": "sha512-BhBvN2MBpWTaSHdWRb/bwdZJ1WaehQ2L1KngkCkfLUGF0mAWAT1sQUQacEmQ0jXkFw/czDXPNQSL5u2/Krsz1w==", + "dev": true + }, + "isstream": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=", + "dev": true + }, + "jmespath": { + "version": "0.15.0", + "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz", + "integrity": "sha1-o/Iiqarp+Wb10nx5ZRDigJF2Qhc=", + "dev": true + }, + "js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "requires": { + "argparse": "^2.0.1" + }, + "dependencies": { + "argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + } + } + }, + "jsbn": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", + "dev": true + }, + "json-buffer": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.0.tgz", + "integrity": "sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg=", + "dev": true + }, + "json-cycle": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/json-cycle/-/json-cycle-1.3.0.tgz", + "integrity": "sha512-FD/SedD78LCdSvJaOUQAXseT8oQBb5z6IVYaQaCrVUlu9zOAr1BDdKyVYQaSD/GDsAMrXpKcOyBD4LIl8nfjHw==", + "dev": true + }, + "json-loader": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/json-loader/-/json-loader-0.5.7.tgz", + "integrity": "sha512-QLPs8Dj7lnf3e3QYS1zkCo+4ZwqOiF9d/nZnYozTISxXWCfNs9yuky5rJw4/W34s7POaNlbZmQGaB5NiXCbP4w==", + "dev": true, + "optional": true + }, + "json-refs": { + "version": "3.0.15", + "resolved": "https://registry.npmjs.org/json-refs/-/json-refs-3.0.15.tgz", + "integrity": "sha512-0vOQd9eLNBL18EGl5yYaO44GhixmImes2wiYn9Z3sag3QnehWrYWlB9AFtMxCL2Bj3fyxgDYkxGFEU/chlYssw==", + "dev": true, + "requires": { + "commander": "~4.1.1", + "graphlib": "^2.1.8", + "js-yaml": "^3.13.1", + "lodash": "^4.17.15", + "native-promise-only": "^0.8.1", + "path-loader": "^1.0.10", + "slash": "^3.0.0", + "uri-js": "^4.2.2" + }, + "dependencies": { + "commander": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "dev": true + }, + "js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + } + } + }, + "json-schema": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", + "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=", + "dev": true + }, + "json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=", + "dev": true + }, + "json5": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-0.5.1.tgz", + "integrity": "sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE=", + "dev": true, + "optional": true + }, + "jsonfile": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", + "integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.6" + } + }, + "jsonpath": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/jsonpath/-/jsonpath-1.1.1.tgz", + "integrity": "sha512-l6Cg7jRpixfbgoWgkrl77dgEj8RPvND0wMH6TwQmi9Qs4TFfS9u5cUFnbeKTwj5ga5Y3BTGGNI28k117LJ009w==", + "dev": true, + "requires": { + "esprima": "1.2.2", + "static-eval": "2.0.2", + "underscore": "1.12.1" + }, + "dependencies": { + "esprima": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-1.2.2.tgz", + "integrity": "sha1-dqD9Zvz+FU/SkmZ9wmQBl1CxZXs=", + "dev": true + }, + "underscore": { + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.12.1.tgz", + "integrity": "sha512-hEQt0+ZLDVUMhebKxL4x1BTtDY7bavVofhZ9KZ4aI26X9SRaE+Y3m83XUL1UP2jn8ynjndwCCpEHdUG+9pP1Tw==", + "dev": true + } + } + }, + "jsprim": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", + "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", + "dev": true, + "requires": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.2.3", + "verror": "1.10.0" + } + }, + "jszip": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/jszip/-/jszip-3.7.1.tgz", + "integrity": "sha512-ghL0tz1XG9ZEmRMcEN2vt7xabrDdqHHeykgARpmZ0BiIctWxM47Vt63ZO2dnp4QYt/xJVLLy5Zv1l/xRdh2byg==", + "dev": true, + "requires": { + "lie": "~3.3.0", + "pako": "~1.0.2", + "readable-stream": "~2.3.6", + "set-immediate-shim": "~1.0.1" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + } + } + }, + "jwa": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "dev": true, + "requires": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "dev": true, + "requires": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } + }, + "jwt-decode": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-2.2.0.tgz", + "integrity": "sha1-fYa9VmefWM5qhHBKZX3TkruoGnk=", + "dev": true + }, + "kafka-node": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/kafka-node/-/kafka-node-5.0.0.tgz", + "integrity": "sha512-dD2ga5gLcQhsq1yNoQdy1MU4x4z7YnXM5bcG9SdQuiNr5KKuAmXixH1Mggwdah5o7EfholFbcNDPSVA6BIfaug==", + "dev": true, + "requires": { + "async": "^2.6.2", + "binary": "~0.3.0", + "bl": "^2.2.0", + "buffer-crc32": "~0.2.5", + "buffermaker": "~1.2.0", + "debug": "^2.1.3", + "denque": "^1.3.0", + "lodash": "^4.17.4", + "minimatch": "^3.0.2", + "nested-error-stacks": "^2.0.0", + "optional": "^0.1.3", + "retry": "^0.10.1", + "snappy": "^6.0.1", + "uuid": "^3.0.0" + }, + "dependencies": { + "async": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz", + "integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==", + "dev": true, + "requires": { + "lodash": "^4.17.14" + } + }, + "bl": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/bl/-/bl-2.2.1.tgz", + "integrity": "sha512-6Pesp1w0DEX1N550i/uGV/TqucVL4AM/pgThFSN/Qq9si1/DF9aIHs1BxD8V/QU0HoeHO6cQRTAuYnLPKq1e4g==", + "dev": true, + "requires": { + "readable-stream": "^2.3.5", + "safe-buffer": "^5.1.1" + } + }, + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true + }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "dev": true + } + } + }, + "keyv": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-3.1.0.tgz", + "integrity": "sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA==", + "dev": true, + "requires": { + "json-buffer": "3.0.0" + } + }, + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "dev": true, + "optional": true, + "requires": { + "is-buffer": "^1.1.5" + }, + "dependencies": { + "is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==", + "dev": true, + "optional": true + } + } + }, + "kuler": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/kuler/-/kuler-1.0.1.tgz", + "integrity": "sha512-J9nVUucG1p/skKul6DU3PUZrhs0LPulNaeUOox0IyXDi8S4CztTHs1gQphhuZmzXG7VOQSf6NJfKuzteQLv9gQ==", + "dev": true, + "requires": { + "colornames": "^1.1.1" + } + }, + "lazy-cache": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/lazy-cache/-/lazy-cache-1.0.4.tgz", + "integrity": "sha1-odePw6UEdMuAhF07O24dpJpEbo4=", + "dev": true, + "optional": true + }, + "lazystream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/lazystream/-/lazystream-1.0.0.tgz", + "integrity": "sha1-9plf4PggOS9hOWvolGJAe7dxaOQ=", + "dev": true, + "requires": { + "readable-stream": "^2.0.5" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + } + } + }, + "lcid": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/lcid/-/lcid-1.0.0.tgz", + "integrity": "sha1-MIrMr6C8SDo4Z7S28rlQYlHRuDU=", + "dev": true, + "optional": true, + "requires": { + "invert-kv": "^1.0.0" + } + }, + "levn": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", + "dev": true, + "requires": { + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2" + } + }, + "lie": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/lie/-/lie-3.3.0.tgz", + "integrity": "sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==", + "dev": true, + "requires": { + "immediate": "~3.0.5" + } + }, + "load-json-file": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", + "integrity": "sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg=", + "dev": true, + "optional": true, + "requires": { + "graceful-fs": "^4.1.2", + "parse-json": "^2.2.0", + "pify": "^2.0.0", + "strip-bom": "^3.0.0" + } + }, + "loader-runner": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-2.4.0.tgz", + "integrity": "sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw==", + "dev": true, + "optional": true + }, + "loader-utils": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-1.4.0.tgz", + "integrity": "sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA==", + "dev": true, + "optional": true, + "requires": { + "big.js": "^5.2.2", + "emojis-list": "^3.0.0", + "json5": "^1.0.1" + }, + "dependencies": { + "json5": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", + "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", + "dev": true, + "optional": true, + "requires": { + "minimist": "^1.2.0" + } + } + } + }, + "locate-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "dev": true, + "optional": true, + "requires": { + "p-locate": "^2.0.0", + "path-exists": "^3.0.0" + } + }, + "lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true + }, + "lodash.defaults": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", + "integrity": "sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw=", + "dev": true + }, + "lodash.difference": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.difference/-/lodash.difference-4.5.0.tgz", + "integrity": "sha1-nMtOUF1Ia5FlE0V3KIWi3yf9AXw=", + "dev": true + }, + "lodash.flatten": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz", + "integrity": "sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8=", + "dev": true + }, + "lodash.isplainobject": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", + "integrity": "sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs=", + "dev": true + }, + "lodash.union": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/lodash.union/-/lodash.union-4.6.0.tgz", + "integrity": "sha1-SLtQiECfFvGCFmZkHETdGqrjzYg=", + "dev": true + }, + "log": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/log/-/log-6.3.1.tgz", + "integrity": "sha512-McG47rJEWOkXTDioZzQNydAVvZNeEkSyLJ1VWkFwfW+o1knW+QSi8D1KjPn/TnctV+q99lkvJNe1f0E1IjfY2A==", + "dev": true, + "requires": { + "d": "^1.0.1", + "duration": "^0.2.2", + "es5-ext": "^0.10.53", + "event-emitter": "^0.3.5", + "sprintf-kit": "^2.0.1", + "type": "^2.5.0", + "uni-global": "^1.0.0" + } + }, + "log-node": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/log-node/-/log-node-8.0.1.tgz", + "integrity": "sha512-w6ii8zZo+O4Os9EBB0+ruaeVU6CysNgYj/cUDOtobBxnNPRHynjMjzyqjEuNKGT/AD89sZzGh0pS3/0ZPRR1iQ==", + "dev": true, + "requires": { + "cli-color": "^2.0.0", + "cli-sprintf-format": "^1.1.0", + "d": "^1.0.1", + "es5-ext": "^0.10.53", + "has-ansi": "^4.0.1", + "sprintf-kit": "^2.0.1", + "supports-color": "^8.1.1", + "type": "^2.5.0" + }, + "dependencies": { + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "logform": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/logform/-/logform-2.3.0.tgz", + "integrity": "sha512-graeoWUH2knKbGthMtuG1EfaSPMZFZBIrhuJHhkS5ZseFBrc7DupCzihOQAzsK/qIKPQaPJ/lFQFctILUY5ARQ==", + "dev": true, + "requires": { + "colors": "^1.2.1", + "fecha": "^4.2.0", + "ms": "^2.1.1", + "safe-stable-stringify": "^1.1.0", + "triple-beam": "^1.3.0" + } + }, + "long": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/long/-/long-1.1.2.tgz", + "integrity": "sha1-6u9ZUcp1UdlpJrgtokLbnWso+1M=", + "dev": true + }, + "longest": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/longest/-/longest-1.0.1.tgz", + "integrity": "sha1-MKCy2jj3N3DoKUoNIuZiXtd9AJc=", + "dev": true, + "optional": true + }, + "lowercase-keys": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz", + "integrity": "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==", + "dev": true + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "requires": { + "yallist": "^4.0.0" + } + }, + "lru-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/lru-queue/-/lru-queue-0.1.0.tgz", + "integrity": "sha1-Jzi9nw089PhEkMVzbEhpmsYyzaM=", + "dev": true, + "requires": { + "es5-ext": "~0.10.2" + } + }, + "make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "requires": { + "semver": "^6.0.0" + }, + "dependencies": { + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + } + } + }, + "make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "optional": true + }, + "map-cache": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/map-cache/-/map-cache-0.2.2.tgz", + "integrity": "sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8=", + "dev": true, + "optional": true + }, + "map-visit": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/map-visit/-/map-visit-1.0.0.tgz", + "integrity": "sha1-7Nyo8TFE5mDxtb1B8S80edmN+48=", + "dev": true, + "optional": true, + "requires": { + "object-visit": "^1.0.0" + } + }, + "md5": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz", + "integrity": "sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==", + "dev": true, + "requires": { + "charenc": "0.0.2", + "crypt": "0.0.2", + "is-buffer": "~1.1.6" + }, + "dependencies": { + "is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==", + "dev": true + } + } + }, + "md5.js": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.5.tgz", + "integrity": "sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==", + "dev": true, + "optional": true, + "requires": { + "hash-base": "^3.0.0", + "inherits": "^2.0.1", + "safe-buffer": "^5.1.2" + } + }, + "media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=", + "dev": true + }, + "mem": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/mem/-/mem-1.1.0.tgz", + "integrity": "sha1-Xt1StIXKHZAP5kiVUFOZoN+kX3Y=", + "dev": true, + "optional": true, + "requires": { + "mimic-fn": "^1.0.0" + }, + "dependencies": { + "mimic-fn": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", + "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", + "dev": true, + "optional": true + } + } + }, + "memoizee": { + "version": "0.4.15", + "resolved": "https://registry.npmjs.org/memoizee/-/memoizee-0.4.15.tgz", + "integrity": "sha512-UBWmJpLZd5STPm7PMUlOw/TSy972M+z8gcyQ5veOnSDRREz/0bmpyTfKt3/51DhEBqCZQn1udM/5flcSPYhkdQ==", + "dev": true, + "requires": { + "d": "^1.0.1", + "es5-ext": "^0.10.53", + "es6-weak-map": "^2.0.3", + "event-emitter": "^0.3.5", + "is-promise": "^2.2.2", + "lru-queue": "^0.1.0", + "next-tick": "^1.1.0", + "timers-ext": "^0.1.7" + } + }, + "memory-fs": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/memory-fs/-/memory-fs-0.4.1.tgz", + "integrity": "sha1-OpoguEYlI+RHz7x+i7gO1me/xVI=", + "dev": true, + "optional": true, + "requires": { + "errno": "^0.1.3", + "readable-stream": "^2.0.1" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "optional": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + } + } + }, + "merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true + }, + "methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=", + "dev": true + }, + "micromatch": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", + "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", + "dev": true, + "requires": { + "braces": "^3.0.1", + "picomatch": "^2.2.3" + } + }, + "miller-rabin": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/miller-rabin/-/miller-rabin-4.0.1.tgz", + "integrity": "sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA==", + "dev": true, + "optional": true, + "requires": { + "bn.js": "^4.0.0", + "brorand": "^1.0.1" + }, + "dependencies": { + "bn.js": { + "version": "4.12.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==", + "dev": true, + "optional": true + } + } + }, + "mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "dev": true + }, + "mime-db": { + "version": "1.50.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.50.0.tgz", + "integrity": "sha512-9tMZCDlYHqeERXEHO9f/hKfNXhre5dK2eE/krIvUjZbS2KPcqGDfNShIWS1uW9XOTKQKqK6qbeOci18rbfW77A==", + "dev": true + }, + "mime-types": { + "version": "2.1.33", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.33.tgz", + "integrity": "sha512-plLElXp7pRDd0bNZHw+nMd52vRYjLwQjygaNg7ddJ2uJtTlmnTCjWuPKxVu6//AdaRuME84SvLW91sIkBqGT0g==", + "dev": true, + "requires": { + "mime-db": "1.50.0" + } + }, + "mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true + }, + "mimic-response": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz", + "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==", + "dev": true + }, + "minimalistic-assert": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz", + "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==", + "dev": true, + "optional": true + }, + "minimalistic-crypto-utils": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz", + "integrity": "sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo=", + "dev": true, + "optional": true + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", + "dev": true + }, + "minipass": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.5.tgz", + "integrity": "sha512-+8NzxD82XQoNKNrl1d/FSi+X8wAEWR+sbYAfIvub4Nz0d22plFG72CEVVaufV8PNf4qSslFTD8VMOxNVhHCjTw==", + "dev": true, + "requires": { + "yallist": "^4.0.0" + } + }, + "minizlib": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", + "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "dev": true, + "requires": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + } + }, + "mixin-deep": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz", + "integrity": "sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==", + "dev": true, + "optional": true, + "requires": { + "for-in": "^1.0.2", + "is-extendable": "^1.0.1" + }, + "dependencies": { + "is-extendable": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", + "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", + "dev": true, + "optional": true, + "requires": { + "is-plain-object": "^2.0.4" + } + } + } + }, + "mkdirp": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", + "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", + "dev": true, + "requires": { + "minimist": "^1.2.5" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "mute-stream": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", + "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", + "dev": true + }, + "nan": { + "version": "2.15.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.15.0.tgz", + "integrity": "sha512-8ZtvEnA2c5aYCZYd1cvgdnU6cqwixRoYg70xPLWUws5ORTa/lnw+u4amixRS/Ac5U5mQVgp9pnlSUnbNWFaWZQ==", + "dev": true, + "optional": true + }, + "nanoid": { + "version": "2.1.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-2.1.11.tgz", + "integrity": "sha512-s/snB+WGm6uwi0WjsZdaVcuf3KJXlfGl2LcxgwkEwJF0D/BWzVWAZW/XY4bFaiR7s0Jk3FPvlnepg1H1b1UwlA==", + "dev": true + }, + "nanomatch": { + "version": "1.2.13", + "resolved": "https://registry.npmjs.org/nanomatch/-/nanomatch-1.2.13.tgz", + "integrity": "sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA==", + "dev": true, + "optional": true, + "requires": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "fragment-cache": "^0.2.1", + "is-windows": "^1.0.2", + "kind-of": "^6.0.2", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.1" + }, + "dependencies": { + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true, + "optional": true + } + } + }, + "napi-build-utils": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-1.0.2.tgz", + "integrity": "sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg==", + "dev": true, + "optional": true + }, + "native-promise-only": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/native-promise-only/-/native-promise-only-0.8.1.tgz", + "integrity": "sha1-IKMYwwy0X3H+et+/eyHJnBRy7xE=", + "dev": true + }, + "ncjsm": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/ncjsm/-/ncjsm-4.2.0.tgz", + "integrity": "sha512-L2Qij4PTy7Bs4TB24zs7FLIAYJTaR5JPvSig5hIcO059LnMCNgy6MfHHNyg8s/aekPKrTqKX90gBGt3NNGvhdw==", + "dev": true, + "requires": { + "builtin-modules": "^3.2.0", + "deferred": "^0.7.11", + "es5-ext": "^0.10.53", + "es6-set": "^0.1.5", + "find-requires": "^1.0.0", + "fs2": "^0.3.9", + "type": "^2.5.0" + } + }, + "neo-async": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", + "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", + "dev": true, + "optional": true + }, + "nested-error-stacks": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/nested-error-stacks/-/nested-error-stacks-2.1.0.tgz", + "integrity": "sha512-AO81vsIO1k1sM4Zrd6Hu7regmJN1NSiAja10gc4bX3F0wd+9rQmcuHQaHVQCYIEC8iFXnE+mavh23GOt7wBgug==", + "dev": true + }, + "next-tick": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/next-tick/-/next-tick-1.1.0.tgz", + "integrity": "sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==", + "dev": true + }, + "nice-try": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", + "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", + "dev": true + }, + "node-abi": { + "version": "2.30.1", + "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-2.30.1.tgz", + "integrity": "sha512-/2D0wOQPgaUWzVSVgRMx+trKJRC2UG4SUc4oCJoXx9Uxjtp0Vy3/kt7zcbxHF8+Z/pK3UloLWzBISg72brfy1w==", + "dev": true, + "optional": true, + "requires": { + "semver": "^5.4.1" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true, + "optional": true + } + } + }, + "node-dir": { + "version": "0.1.17", + "resolved": "https://registry.npmjs.org/node-dir/-/node-dir-0.1.17.tgz", + "integrity": "sha1-X1Zl2TNRM1yqvvjxxVRRbPXx5OU=", + "dev": true, + "requires": { + "minimatch": "^3.0.2" + } + }, + "node-fetch": { + "version": "2.6.5", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.5.tgz", + "integrity": "sha512-mmlIVHJEu5rnIxgEgez6b9GgWXbkZj5YZ7fx+2r94a2E+Uirsp6HsPTPlomfdHtpt/B0cdKviwkoaM6pyvUOpQ==", + "dev": true, + "requires": { + "whatwg-url": "^5.0.0" + } + }, + "node-libs-browser": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/node-libs-browser/-/node-libs-browser-2.2.1.tgz", + "integrity": "sha512-h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q==", + "dev": true, + "optional": true, + "requires": { + "assert": "^1.1.1", + "browserify-zlib": "^0.2.0", + "buffer": "^4.3.0", + "console-browserify": "^1.1.0", + "constants-browserify": "^1.0.0", + "crypto-browserify": "^3.11.0", + "domain-browser": "^1.1.1", + "events": "^3.0.0", + "https-browserify": "^1.0.0", + "os-browserify": "^0.3.0", + "path-browserify": "0.0.1", + "process": "^0.11.10", + "punycode": "^1.2.4", + "querystring-es3": "^0.2.0", + "readable-stream": "^2.3.3", + "stream-browserify": "^2.0.1", + "stream-http": "^2.7.2", + "string_decoder": "^1.0.0", + "timers-browserify": "^2.0.4", + "tty-browserify": "0.0.0", + "url": "^0.11.0", + "util": "^0.11.0", + "vm-browserify": "^1.0.1" + }, + "dependencies": { + "buffer": { + "version": "4.9.2", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz", + "integrity": "sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==", + "dev": true, + "optional": true, + "requires": { + "base64-js": "^1.0.2", + "ieee754": "^1.1.4", + "isarray": "^1.0.0" + } + }, + "events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "dev": true, + "optional": true + }, + "punycode": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=", + "dev": true, + "optional": true + }, + "querystring": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", + "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=", + "dev": true, + "optional": true + }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "optional": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "url": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/url/-/url-0.11.0.tgz", + "integrity": "sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE=", + "dev": true, + "optional": true, + "requires": { + "punycode": "1.3.2", + "querystring": "0.2.0" + }, + "dependencies": { + "punycode": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", + "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=", + "dev": true, + "optional": true + } + } + } + } + }, + "noop-logger": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/noop-logger/-/noop-logger-0.1.1.tgz", + "integrity": "sha1-lKKxYzxPExdVMAfYlm/Q6EG2pMI=", + "dev": true, + "optional": true + }, + "normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "optional": true, + "requires": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true, + "optional": true + } + } + }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true + }, + "normalize-url": { + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-4.5.1.tgz", + "integrity": "sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA==", + "dev": true + }, + "npm-run-path": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", + "integrity": "sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=", + "dev": true, + "optional": true, + "requires": { + "path-key": "^2.0.0" + } + }, + "npmlog": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz", + "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", + "dev": true, + "optional": true, + "requires": { + "are-we-there-yet": "~1.1.2", + "console-control-strings": "~1.1.0", + "gauge": "~2.7.3", + "set-blocking": "~2.0.0" + } + }, + "number-is-nan": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", + "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", + "dev": true, + "optional": true + }, + "oauth-sign": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", + "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==", + "dev": true + }, + "object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", + "dev": true + }, + "object-copy": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/object-copy/-/object-copy-0.1.0.tgz", + "integrity": "sha1-fn2Fi3gb18mRpBupde04EnVOmYw=", + "dev": true, + "optional": true, + "requires": { + "copy-descriptor": "^0.1.0", + "define-property": "^0.2.5", + "kind-of": "^3.0.3" + }, + "dependencies": { + "define-property": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", + "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", + "dev": true, + "optional": true, + "requires": { + "is-descriptor": "^0.1.0" + } + } + } + }, + "object-hash": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-2.2.0.tgz", + "integrity": "sha512-gScRMn0bS5fH+IuwyIFgnh9zBdo4DV+6GhygmWM9HyNJSgS0hScp1f5vjtm7oIIOiT9trXrShAkLFSc2IqKNgw==", + "dev": true + }, + "object-is": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/object-is/-/object-is-1.1.5.tgz", + "integrity": "sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3" + } + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true + }, + "object-visit": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/object-visit/-/object-visit-1.0.1.tgz", + "integrity": "sha1-95xEk68MU3e1n+OdOV5BBC3QRbs=", + "dev": true, + "optional": true, + "requires": { + "isobject": "^3.0.0" + } + }, + "object.pick": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/object.pick/-/object.pick-1.3.0.tgz", + "integrity": "sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c=", + "dev": true, + "optional": true, + "requires": { + "isobject": "^3.0.1" + } + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "one-time": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/one-time/-/one-time-0.0.4.tgz", + "integrity": "sha1-+M33eISCb+Tf+T46nMN7HkSAdC4=", + "dev": true + }, + "onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "requires": { + "mimic-fn": "^2.1.0" + } + }, + "open": { + "version": "7.4.2", + "resolved": "https://registry.npmjs.org/open/-/open-7.4.2.tgz", + "integrity": "sha512-MVHddDVweXZF3awtlAS+6pgKLlm/JgxZ90+/NBurBoQctVOOB/zDdVjcyPzQ+0laDGbsWgrRkflI65sQeOgT9Q==", + "dev": true, + "requires": { + "is-docker": "^2.0.0", + "is-wsl": "^2.1.1" + } + }, + "optional": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/optional/-/optional-0.1.4.tgz", + "integrity": "sha512-gtvrrCfkE08wKcgXaVwQVgwEQ8vel2dc5DDBn9RLQZ3YtmtkBss6A2HY6BnJH4N/4Ku97Ri/SF8sNWE2225WJw==", + "dev": true + }, + "optionator": { + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "dev": true, + "requires": { + "deep-is": "~0.1.3", + "fast-levenshtein": "~2.0.6", + "levn": "~0.3.0", + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2", + "word-wrap": "~1.2.3" + } + }, + "os-browserify": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/os-browserify/-/os-browserify-0.3.0.tgz", + "integrity": "sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc=", + "dev": true, + "optional": true + }, + "os-homedir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", + "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=", + "dev": true, + "optional": true + }, + "os-locale": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/os-locale/-/os-locale-2.1.0.tgz", + "integrity": "sha512-3sslG3zJbEYcaC4YVAvDorjGxc7tv6KVATnLPZONiljsUncvihe9BQoVCEs0RZ1kmf4Hk9OBqlZfJZWI4GanKA==", + "dev": true, + "optional": true, + "requires": { + "execa": "^0.7.0", + "lcid": "^1.0.0", + "mem": "^1.1.0" + } + }, + "os-tmpdir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", + "dev": true + }, + "p-cancelable": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-1.1.0.tgz", + "integrity": "sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw==", + "dev": true + }, + "p-event": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/p-event/-/p-event-4.2.0.tgz", + "integrity": "sha512-KXatOjCRXXkSePPb1Nbi0p0m+gQAwdlbhi4wQKJPI1HsMQS9g+Sqp2o+QHziPr7eYJyOZet836KoHEVM1mwOrQ==", + "dev": true, + "requires": { + "p-timeout": "^3.1.0" + } + }, + "p-finally": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", + "integrity": "sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=", + "dev": true + }, + "p-limit": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", + "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "dev": true, + "optional": true, + "requires": { + "p-try": "^1.0.0" + } + }, + "p-locate": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "dev": true, + "optional": true, + "requires": { + "p-limit": "^1.1.0" + } + }, + "p-timeout": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-3.2.0.tgz", + "integrity": "sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==", + "dev": true, + "requires": { + "p-finally": "^1.0.0" + } + }, + "p-try": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", + "dev": true, + "optional": true + }, + "package-json": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/package-json/-/package-json-6.5.0.tgz", + "integrity": "sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ==", + "dev": true, + "requires": { + "got": "^9.6.0", + "registry-auth-token": "^4.0.0", + "registry-url": "^5.0.0", + "semver": "^6.2.0" + }, + "dependencies": { + "got": { + "version": "9.6.0", + "resolved": "https://registry.npmjs.org/got/-/got-9.6.0.tgz", + "integrity": "sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q==", + "dev": true, + "requires": { + "@sindresorhus/is": "^0.14.0", + "@szmarczak/http-timer": "^1.1.2", + "cacheable-request": "^6.0.0", + "decompress-response": "^3.3.0", + "duplexer3": "^0.1.4", + "get-stream": "^4.1.0", + "lowercase-keys": "^1.0.1", + "mimic-response": "^1.0.1", + "p-cancelable": "^1.0.0", + "to-readable-stream": "^1.0.0", + "url-parse-lax": "^3.0.0" + } + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + } + } + }, + "pako": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", + "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==", + "dev": true + }, + "parse-asn1": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.6.tgz", + "integrity": "sha512-RnZRo1EPU6JBnra2vGHj0yhp6ebyjBZpmUCLHWiFhxlzvBCCpAuZ7elsBp1PVAbQN0/04VD/19rfzlBSwLstMw==", + "dev": true, + "optional": true, + "requires": { + "asn1.js": "^5.2.0", + "browserify-aes": "^1.0.0", + "evp_bytestokey": "^1.0.0", + "pbkdf2": "^3.0.3", + "safe-buffer": "^5.1.1" + } + }, + "parse-json": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", + "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", + "dev": true, + "optional": true, + "requires": { + "error-ex": "^1.2.0" + } + }, + "parse-passwd": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/parse-passwd/-/parse-passwd-1.0.0.tgz", + "integrity": "sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY=", + "dev": true, + "optional": true + }, + "parseqs": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/parseqs/-/parseqs-0.0.6.tgz", + "integrity": "sha512-jeAGzMDbfSHHA091hr0r31eYfTig+29g3GKKE/PPbEQ65X0lmMwlEoqmhzu0iztID5uJpZsFlUPDP8ThPL7M8w==", + "dev": true + }, + "parseuri": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/parseuri/-/parseuri-0.0.6.tgz", + "integrity": "sha512-AUjen8sAkGgao7UyCX6Ahv0gIK2fABKmYjvP4xmy5JaKvcbTRueIqIPHLAfq30xJddqSE033IOMUSOMCcK3Sow==", + "dev": true + }, + "pascalcase": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/pascalcase/-/pascalcase-0.1.1.tgz", + "integrity": "sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ=", + "dev": true, + "optional": true + }, + "path-browserify": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-0.0.1.tgz", + "integrity": "sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ==", + "dev": true, + "optional": true + }, + "path-dirname": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/path-dirname/-/path-dirname-1.0.2.tgz", + "integrity": "sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA=", + "dev": true, + "optional": true + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "dev": true, + "optional": true + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true + }, + "path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", + "dev": true + }, + "path-loader": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/path-loader/-/path-loader-1.0.10.tgz", + "integrity": "sha512-CMP0v6S6z8PHeJ6NFVyVJm6WyJjIwFvyz2b0n2/4bKdS/0uZa/9sKUlYZzubrn3zuDRU0zIuEDX9DZYQ2ZI8TA==", + "dev": true, + "requires": { + "native-promise-only": "^0.8.1", + "superagent": "^3.8.3" + } + }, + "path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true, + "optional": true + }, + "path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true + }, + "path2": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/path2/-/path2-0.1.0.tgz", + "integrity": "sha1-Y5golCzb2kSkGkWwdK6Ic0g7Tvo=", + "dev": true + }, + "pbkdf2": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.2.tgz", + "integrity": "sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA==", + "dev": true, + "optional": true, + "requires": { + "create-hash": "^1.1.2", + "create-hmac": "^1.1.4", + "ripemd160": "^2.0.1", + "safe-buffer": "^5.0.1", + "sha.js": "^2.4.8" + } + }, + "peek-readable": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/peek-readable/-/peek-readable-4.0.1.tgz", + "integrity": "sha512-7qmhptnR0WMSpxT5rMHG9bW/mYSR1uqaPFj2MHvT+y/aOUu6msJijpKt5SkTDKySwg65OWG2JwTMBlgcbwMHrQ==", + "dev": true + }, + "pend": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", + "integrity": "sha1-elfrVQpng/kRUzH89GY9XI4AelA=", + "dev": true + }, + "performance-now": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=", + "dev": true + }, + "picomatch": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz", + "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==", + "dev": true + }, + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "dev": true + }, + "pinkie": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", + "integrity": "sha1-clVrgM+g1IqXToDnckjoDtT3+HA=", + "dev": true + }, + "pinkie-promise": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "integrity": "sha1-ITXW36ejWMBprJsXh3YogihFD/o=", + "dev": true, + "requires": { + "pinkie": "^2.0.0" + } + }, + "posix-character-classes": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz", + "integrity": "sha1-AerA/jta9xoqbAL+q7jB/vfgDqs=", + "dev": true, + "optional": true + }, + "prebuild-install": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-5.3.0.tgz", + "integrity": "sha512-aaLVANlj4HgZweKttFNUVNRxDukytuIuxeK2boIMHjagNJCiVKWFsKF4tCE3ql3GbrD2tExPQ7/pwtEJcHNZeg==", + "dev": true, + "optional": true, + "requires": { + "detect-libc": "^1.0.3", + "expand-template": "^2.0.3", + "github-from-package": "0.0.0", + "minimist": "^1.2.0", + "mkdirp": "^0.5.1", + "napi-build-utils": "^1.0.1", + "node-abi": "^2.7.0", + "noop-logger": "^0.1.1", + "npmlog": "^4.0.1", + "os-homedir": "^1.0.1", + "pump": "^2.0.1", + "rc": "^1.2.7", + "simple-get": "^2.7.0", + "tar-fs": "^1.13.0", + "tunnel-agent": "^0.6.0", + "which-pm-runs": "^1.0.0" + }, + "dependencies": { + "pump": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pump/-/pump-2.0.1.tgz", + "integrity": "sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA==", + "dev": true, + "optional": true, + "requires": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + } + } + }, + "prelude-ls": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", + "dev": true + }, + "prepend-http": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz", + "integrity": "sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc=", + "dev": true + }, + "prettyoutput": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/prettyoutput/-/prettyoutput-1.2.0.tgz", + "integrity": "sha512-G2gJwLzLcYS+2m6bTAe+CcDpwak9YpcvpScI0tE4WYb2O3lEZD/YywkMNpGqsSx5wttGvh2UXaKROTKKCyM2dw==", + "dev": true, + "requires": { + "colors": "1.3.x", + "commander": "2.19.x", + "lodash": "4.17.x" + } + }, + "printj": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/printj/-/printj-1.1.2.tgz", + "integrity": "sha512-zA2SmoLaxZyArQTOPj5LXecR+RagfPSU5Kw1qP+jkWeNlrq+eJZyY2oS68SU1Z/7/myXM4lo9716laOFAVStCQ==", + "dev": true + }, + "process": { + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha1-czIwDoQBYb2j5podHZGn1LwW8YI=", + "dev": true, + "optional": true + }, + "process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true + }, + "process-utils": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/process-utils/-/process-utils-4.0.0.tgz", + "integrity": "sha512-fMyMQbKCxX51YxR7YGCzPjLsU3yDzXFkP4oi1/Mt5Ixnk7GO/7uUTj8mrCHUwuvozWzI+V7QSJR9cZYnwNOZPg==", + "dev": true, + "requires": { + "ext": "^1.4.0", + "fs2": "^0.3.9", + "memoizee": "^0.4.14", + "type": "^2.1.0" + } + }, + "promise-queue": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/promise-queue/-/promise-queue-2.2.5.tgz", + "integrity": "sha1-L29ffA9tCBCelnZZx5uIqe1ek7Q=", + "dev": true + }, + "protobufjs": { + "version": "6.11.2", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.11.2.tgz", + "integrity": "sha512-4BQJoPooKJl2G9j3XftkIXjoC9C0Av2NOrWmbLWT1vH32GcSUHjM0Arra6UfTsVyfMAuFzaLucXn1sadxJydAw==", + "dev": true, + "requires": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/long": "^4.0.1", + "@types/node": ">=13.7.0", + "long": "^4.0.0" + }, + "dependencies": { + "long": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", + "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==", + "dev": true + } + } + }, + "prr": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz", + "integrity": "sha1-0/wRS6BplaRexok/SEzrHXj19HY=", + "dev": true, + "optional": true + }, + "pseudomap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", + "integrity": "sha1-8FKijacOYYkX7wqKw0wa5aaChrM=", + "dev": true, + "optional": true + }, + "psl": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", + "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==", + "dev": true + }, + "public-encrypt": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/public-encrypt/-/public-encrypt-4.0.3.tgz", + "integrity": "sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q==", + "dev": true, + "optional": true, + "requires": { + "bn.js": "^4.1.0", + "browserify-rsa": "^4.0.0", + "create-hash": "^1.1.0", + "parse-asn1": "^5.0.0", + "randombytes": "^2.0.1", + "safe-buffer": "^5.1.2" + }, + "dependencies": { + "bn.js": { + "version": "4.12.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==", + "dev": true, + "optional": true + } + } + }, + "pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "dev": true, + "requires": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "dev": true + }, + "qrcode-terminal": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/qrcode-terminal/-/qrcode-terminal-0.12.0.tgz", + "integrity": "sha512-EXtzRZmC+YGmGlDFbXKxQiMZNwCLEO6BANKXG4iCtSIM0yqc/pappSx3RIKr4r0uh5JsBckOXeKrB3Iz7mdQpQ==", + "dev": true + }, + "qs": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", + "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==", + "dev": true + }, + "querystring": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.1.tgz", + "integrity": "sha512-wkvS7mL/JMugcup3/rMitHmd9ecIGd2lhFhK9N3UUQ450h66d1r3Y9nvXzQAW1Lq+wyx61k/1pfKS5KuKiyEbg==", + "dev": true + }, + "querystring-es3": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/querystring-es3/-/querystring-es3-0.2.1.tgz", + "integrity": "sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM=", + "dev": true, + "optional": true + }, + "queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true + }, + "quick-lru": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", + "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==", + "dev": true + }, + "ramda": { + "version": "0.26.1", + "resolved": "https://registry.npmjs.org/ramda/-/ramda-0.26.1.tgz", + "integrity": "sha512-hLWjpy7EnsDBb0p+Z3B7rPi3GDeRG5ZtiI33kJhTt+ORCd38AbAIjB/9zRIUoeTbE/AVX5ZkU7m6bznsvrf8eQ==", + "dev": true + }, + "randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "optional": true, + "requires": { + "safe-buffer": "^5.1.0" + } + }, + "randomfill": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/randomfill/-/randomfill-1.0.4.tgz", + "integrity": "sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw==", + "dev": true, + "optional": true, + "requires": { + "randombytes": "^2.0.5", + "safe-buffer": "^5.1.0" + } + }, + "rc": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", + "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", + "dev": true, + "requires": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + }, + "dependencies": { + "ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", + "dev": true + } + } + }, + "read-pkg": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", + "integrity": "sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg=", + "dev": true, + "optional": true, + "requires": { + "load-json-file": "^2.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^2.0.0" + }, + "dependencies": { + "path-type": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", + "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", + "dev": true, + "optional": true, + "requires": { + "pify": "^2.0.0" + } + } + } + }, + "read-pkg-up": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", + "integrity": "sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=", + "dev": true, + "optional": true, + "requires": { + "find-up": "^2.0.0", + "read-pkg": "^2.0.0" + } + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "readable-web-to-node-stream": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/readable-web-to-node-stream/-/readable-web-to-node-stream-3.0.2.tgz", + "integrity": "sha512-ePeK6cc1EcKLEhJFt/AebMCLL+GgSKhuygrZ/GLaKZYEecIgIECf4UaUuaByiGtzckwR4ain9VzUh95T1exYGw==", + "dev": true, + "requires": { + "readable-stream": "^3.6.0" + } + }, + "readdir-glob": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/readdir-glob/-/readdir-glob-1.1.1.tgz", + "integrity": "sha512-91/k1EzZwDx6HbERR+zucygRFfiPl2zkIYZtv3Jjr6Mn7SkKcVct8aVO+sSRiGMc6fLf72du3d92/uY63YPdEA==", + "dev": true, + "requires": { + "minimatch": "^3.0.4" + } + }, + "readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "requires": { + "picomatch": "^2.2.1" + } + }, + "regex-not": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/regex-not/-/regex-not-1.0.2.tgz", + "integrity": "sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A==", + "dev": true, + "optional": true, + "requires": { + "extend-shallow": "^3.0.2", + "safe-regex": "^1.1.0" + } + }, + "regexp.prototype.flags": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.3.1.tgz", + "integrity": "sha512-JiBdRBq91WlY7uRJ0ds7R+dU02i6LKi8r3BuQhNXn+kmeLN+EfHhfjqMRis1zJxnlu88hq/4dx0P2OP3APRTOA==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3" + } + }, + "registry-auth-token": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-4.2.1.tgz", + "integrity": "sha512-6gkSb4U6aWJB4SF2ZvLb76yCBjcvufXBqvvEx1HbmKPkutswjW1xNVRY0+daljIYRbogN7O0etYSlbiaEQyMyw==", + "dev": true, + "requires": { + "rc": "^1.2.8" + } + }, + "registry-url": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/registry-url/-/registry-url-5.1.0.tgz", + "integrity": "sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw==", + "dev": true, + "requires": { + "rc": "^1.2.8" + } + }, + "remove-trailing-separator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz", + "integrity": "sha1-wkvOKig62tW8P1jg1IJJuSN52O8=", + "dev": true, + "optional": true + }, + "repeat-element": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.4.tgz", + "integrity": "sha512-LFiNfRcSu7KK3evMyYOuCzv3L10TW7yC1G2/+StMjK8Y6Vqd2MG7r/Qjw4ghtuCOjFvlnms/iMmLqpvW/ES/WQ==", + "dev": true, + "optional": true + }, + "repeat-string": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", + "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=", + "dev": true, + "optional": true + }, + "replaceall": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/replaceall/-/replaceall-0.1.6.tgz", + "integrity": "sha1-gdgax663LX9cSUKt8ml6MiBojY4=", + "dev": true + }, + "request": { + "version": "2.88.2", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz", + "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==", + "dev": true, + "requires": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "har-validator": "~5.1.3", + "http-signature": "~1.2.0", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "oauth-sign": "~0.9.0", + "performance-now": "^2.1.0", + "qs": "~6.5.2", + "safe-buffer": "^5.1.2", + "tough-cookie": "~2.5.0", + "tunnel-agent": "^0.6.0", + "uuid": "^3.3.2" + }, + "dependencies": { + "form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "dev": true, + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } + }, + "uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "dev": true + } + } + }, + "request-promise-core": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/request-promise-core/-/request-promise-core-1.1.4.tgz", + "integrity": "sha512-TTbAfBBRdWD7aNNOoVOBH4pN/KigV6LyapYNNlAPA8JwbovRti1E88m3sYAwsLi5ryhPKsE9APwnjFTgdUjTpw==", + "dev": true, + "requires": { + "lodash": "^4.17.19" + } + }, + "request-promise-native": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/request-promise-native/-/request-promise-native-1.0.9.tgz", + "integrity": "sha512-wcW+sIUiWnKgNY0dqCpOZkUbF/I+YPi+f09JZIDa39Ec+q82CpSYniDp+ISgTTbKmnpJWASeJBPZmoxH84wt3g==", + "dev": true, + "requires": { + "request-promise-core": "1.1.4", + "stealthy-require": "^1.1.1", + "tough-cookie": "^2.3.3" + } + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "dev": true, + "optional": true + }, + "require-main-filename": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-1.0.1.tgz", + "integrity": "sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE=", + "dev": true, + "optional": true + }, + "resolve": { + "version": "1.20.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz", + "integrity": "sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==", + "dev": true, + "optional": true, + "requires": { + "is-core-module": "^2.2.0", + "path-parse": "^1.0.6" + } + }, + "resolve-alpn": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/resolve-alpn/-/resolve-alpn-1.2.1.tgz", + "integrity": "sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==", + "dev": true + }, + "resolve-url": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/resolve-url/-/resolve-url-0.2.1.tgz", + "integrity": "sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=", + "dev": true, + "optional": true + }, + "responselike": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/responselike/-/responselike-1.0.2.tgz", + "integrity": "sha1-kYcg7ztjHFZCvgaPFa3lpG9Loec=", + "dev": true, + "requires": { + "lowercase-keys": "^1.0.0" + } + }, + "restore-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "dev": true, + "requires": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + } + }, + "ret": { + "version": "0.1.15", + "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz", + "integrity": "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==", + "dev": true, + "optional": true + }, + "retry": { + "version": "0.10.1", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.10.1.tgz", + "integrity": "sha1-52OI0heZLCUnUCQdPTlW/tmNj/Q=", + "dev": true + }, + "reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true + }, + "right-align": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/right-align/-/right-align-0.1.3.tgz", + "integrity": "sha1-YTObci/mo1FWiSENJOFMlhSGE+8=", + "dev": true, + "optional": true, + "requires": { + "align-text": "^0.1.1" + } + }, + "rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "dev": true, + "requires": { + "glob": "^7.1.3" + } + }, + "ripemd160": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.2.tgz", + "integrity": "sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==", + "dev": true, + "optional": true, + "requires": { + "hash-base": "^3.0.0", + "inherits": "^2.0.1" + } + }, + "run-async": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.1.tgz", + "integrity": "sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==", + "dev": true + }, + "run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "requires": { + "queue-microtask": "^1.2.2" + } + }, + "run-parallel-limit": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/run-parallel-limit/-/run-parallel-limit-1.1.0.tgz", + "integrity": "sha512-jJA7irRNM91jaKc3Hcl1npHsFLOXOoTkPCUL1JEa1R82O2miplXXRaGdjW/KM/98YQWDhJLiSs793CnXfblJUw==", + "dev": true, + "requires": { + "queue-microtask": "^1.2.2" + } + }, + "rxjs": { + "version": "6.6.7", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.6.7.tgz", + "integrity": "sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ==", + "dev": true, + "requires": { + "tslib": "^1.9.0" + } + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "safe-regex": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz", + "integrity": "sha1-QKNmnzsHfR6UPURinhV91IAjvy4=", + "dev": true, + "optional": true, + "requires": { + "ret": "~0.1.10" + } + }, + "safe-stable-stringify": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-1.1.1.tgz", + "integrity": "sha512-ERq4hUjKDbJfE4+XtZLFPCDi8Vb1JqaxAPTxWFLBx8XcAlf9Bda/ZJdVezs/NAfsMQScyIlUMx+Yeu7P7rx5jw==", + "dev": true + }, + "safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true + }, + "sax": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", + "integrity": "sha1-e45lYZCyKOgaZq6nSEgNgozS03o=", + "dev": true + }, + "seek-bzip": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/seek-bzip/-/seek-bzip-1.0.6.tgz", + "integrity": "sha512-e1QtP3YL5tWww8uKaOCQ18UxIT2laNBXHjV/S2WYCiK4udiv8lkG89KRIoCjUagnAmCBurjF4zEVX2ByBbnCjQ==", + "dev": true, + "requires": { + "commander": "^2.8.1" + } + }, + "semver": { + "version": "7.3.5", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", + "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "dev": true, + "requires": { + "lru-cache": "^6.0.0" + } + }, + "serverless": { + "version": "2.64.1", + "resolved": "https://registry.npmjs.org/serverless/-/serverless-2.64.1.tgz", + "integrity": "sha512-9DErsV4ACg/2UkRoX2EYRkcDyRi3NBld/gexCeVnkmUunadSwnmtSBeVU8spvg+zc61b1vbusTHE4woqcd2gvw==", + "dev": true, + "requires": { + "@serverless/cli": "^1.5.2", + "@serverless/components": "^3.17.1", + "@serverless/dashboard-plugin": "^5.5.0", + "@serverless/platform-client": "^4.3.0", + "@serverless/utils": "^5.19.0", + "ajv": "^6.12.6", + "ajv-keywords": "^3.5.2", + "archiver": "^5.3.0", + "aws-sdk": "^2.1011.0", + "bluebird": "^3.7.2", + "boxen": "^5.1.2", + "cachedir": "^2.3.0", + "chalk": "^4.1.2", + "child-process-ext": "^2.1.1", + "ci-info": "^3.2.0", + "cli-progress-footer": "^2.1.1", + "d": "^1.0.1", + "dayjs": "^1.10.7", + "decompress": "^4.2.1", + "dotenv": "^10.0.0", + "dotenv-expand": "^5.1.0", + "essentials": "^1.1.1", + "ext": "^1.6.0", + "fastest-levenshtein": "^1.0.12", + "filesize": "^8.0.3", + "fs-extra": "^9.1.0", + "get-stdin": "^8.0.0", + "globby": "^11.0.4", + "got": "^11.8.2", + "graceful-fs": "^4.2.8", + "https-proxy-agent": "^5.0.0", + "is-docker": "^2.2.1", + "is-wsl": "^2.2.0", + "js-yaml": "^4.1.0", + "json-cycle": "^1.3.0", + "json-refs": "^3.0.15", + "lodash": "^4.17.21", + "memoizee": "^0.4.15", + "micromatch": "^4.0.4", + "ncjsm": "^4.2.0", + "node-fetch": "^2.6.5", + "object-hash": "^2.2.0", + "path2": "^0.1.0", + "process-utils": "^4.0.0", + "promise-queue": "^2.2.5", + "replaceall": "^0.1.6", + "semver": "^7.3.5", + "signal-exit": "^3.0.5", + "strip-ansi": "^6.0.1", + "tabtab": "^3.0.2", + "tar": "^6.1.11", + "timers-ext": "^0.1.7", + "type": "^2.5.0", + "untildify": "^4.0.0", + "uuid": "^8.3.2", + "yaml-ast-parser": "0.0.43" + }, + "dependencies": { + "@serverless/components": { + "version": "3.17.1", + "resolved": "https://registry.npmjs.org/@serverless/components/-/components-3.17.1.tgz", + "integrity": "sha512-Ra0VVpivEWB816ZAca4UCNzOxQqxveEp4h+RzUX5vaAsZrxpotPUFZi96w9yZGQk3OTxxscRqrsBLxGDtOu8SA==", + "dev": true, + "requires": { + "@serverless/platform-client": "^4.2.2", + "@serverless/platform-client-china": "^2.2.0", + "@serverless/utils": "^4.0.0", + "adm-zip": "^0.5.4", + "ansi-escapes": "^4.3.1", + "chalk": "^4.1.0", + "child-process-ext": "^2.1.1", + "chokidar": "^3.5.1", + "ci-info": "^3.2.0", + "dayjs": "^1.10.4", + "dotenv": "^8.2.0", + "fastest-levenshtein": "^1.0.12", + "figures": "^3.2.0", + "fs-extra": "^9.1.0", + "got": "^11.8.2", + "graphlib": "^2.1.8", + "https-proxy-agent": "^5.0.0", + "inquirer-autocomplete-prompt": "^1.3.0", + "js-yaml": "^3.14.1", + "memoizee": "^0.4.14", + "minimist": "^1.2.5", + "open": "^7.3.1", + "prettyoutput": "^1.2.0", + "ramda": "^0.27.1", + "semver": "^7.3.4", + "strip-ansi": "^6.0.0", + "tencent-serverless-http": "^1.3.1", + "traverse": "^0.6.6", + "uuid": "^8.3.2" + }, + "dependencies": { + "@serverless/utils": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@serverless/utils/-/utils-4.1.0.tgz", + "integrity": "sha512-cl5uPaGg72z0sCUpF0zsOhwYYUV72Gxc1FwFfxltO8hSvMeFDvwD7JrNE4kHcIcKRjwPGbSH0fdVPUpErZ8Mog==", + "dev": true, + "requires": { + "chalk": "^4.1.0", + "ci-info": "^3.1.1", + "inquirer": "^7.3.3", + "js-yaml": "^4.1.0", + "jwt-decode": "^3.1.2", + "lodash": "^4.17.21", + "ncjsm": "^4.1.0", + "type": "^2.5.0", + "uuid": "^8.3.2", + "write-file-atomic": "^3.0.3" + }, + "dependencies": { + "js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "requires": { + "argparse": "^2.0.1" + } + } + } + }, + "dotenv": { + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-8.6.0.tgz", + "integrity": "sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g==", + "dev": true + }, + "js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "dependencies": { + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "requires": { + "sprintf-js": "~1.0.2" + } + } + } + } + } + }, + "argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "jwt-decode": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-3.1.2.tgz", + "integrity": "sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A==", + "dev": true + }, + "ramda": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/ramda/-/ramda-0.27.1.tgz", + "integrity": "sha512-PgIdVpn5y5Yns8vqb8FzBUEYn98V3xcPgawAkkgj0YJ0qDsnHCiNmZYfOGMgOvoB0eWFLpYbhxUR3mxfDIMvpw==", + "dev": true + }, + "write-file-atomic": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz", + "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==", + "dev": true, + "requires": { + "imurmurhash": "^0.1.4", + "is-typedarray": "^1.0.0", + "signal-exit": "^3.0.2", + "typedarray-to-buffer": "^3.1.5" + } + } + } + }, + "serverless-azure-functions": { + "version": "1.0.2-22", + "resolved": "https://registry.npmjs.org/serverless-azure-functions/-/serverless-azure-functions-1.0.2-22.tgz", + "integrity": "sha512-diKlCH2DN2OjP/j7sfPHdB+DSwJgX5/L175jKvA4ttUbefWmyK9M3Ul4QohOPixnPWhKEmR6Y2AOt69DOP2rIw==", + "dev": true, + "requires": { + "@azure/arm-apimanagement": "^5.1.0", + "@azure/arm-appservice": "^5.7.0", + "@azure/arm-keyvault": "^1.2.1", + "@azure/arm-resources": "^1.0.1", + "@azure/arm-storage": "^9.0.1", + "@azure/ms-rest-nodeauth": "^1.0.1", + "@azure/storage-blob": "^10.3.0", + "acorn": "^7.0.0", + "axios": "^0.18.0", + "azure-functions-core-tools": "^2.7.1575", + "deep-equal": "^1.0.1", + "js-yaml": "^3.13.1", + "jsonpath": "^1.0.1", + "lodash": "^4.16.6", + "md5": "^2.2.1", + "open": "^6.3.0", + "request": "^2.81.0", + "rimraf": "^2.7.1", + "semver": "^6.3.0", + "serverless-webpack": "^4.2.0", + "webpack": "^3.10.0", + "xml2js": "^0.4.22" + }, + "dependencies": { + "axios": { + "version": "0.18.1", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.18.1.tgz", + "integrity": "sha512-0BfJq4NSfQXd+SkFdrvFbG7addhYSBA2mQwISr46pD6E5iqkWg02RAs8vyTT/j0RTnoYmeXauBuSv1qKwR179g==", + "dev": true, + "requires": { + "follow-redirects": "1.5.10", + "is-buffer": "^2.0.2" + } + }, + "debug": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", + "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", + "dev": true, + "requires": { + "ms": "2.0.0" + } + }, + "follow-redirects": { + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz", + "integrity": "sha512-0V5l4Cizzvqt5D44aTXbFZz+FtyXV1vrDN6qrelxtfYQKW0KO0W2T/hkE8xvGa/540LkZlkaUjO4ailYTFtHVQ==", + "dev": true, + "requires": { + "debug": "=3.1.0" + } + }, + "is-wsl": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-1.1.0.tgz", + "integrity": "sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0=", + "dev": true + }, + "js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true + }, + "open": { + "version": "6.4.0", + "resolved": "https://registry.npmjs.org/open/-/open-6.4.0.tgz", + "integrity": "sha512-IFenVPgF70fSm1keSd2iDBIDIBZkroLeuffXq+wKTzTJlBpesFWojV9lb8mzOfaAzM1sr7HQHuO0vtV0zYekGg==", + "dev": true, + "requires": { + "is-wsl": "^1.1.0" + } + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + }, + "xml2js": { + "version": "0.4.23", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz", + "integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==", + "dev": true, + "requires": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + } + }, + "xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "dev": true + } + } + }, + "serverless-webpack": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/serverless-webpack/-/serverless-webpack-4.4.0.tgz", + "integrity": "sha512-yezTtL5on2QQfv1wG2tWLEu1UOjV8mLco4HfE9nO3tUsHPgT3nyL0HXWMpdf21KhIEFnDbgrbu4UYZm4lPrSyA==", + "dev": true, + "optional": true, + "requires": { + "archiver": "^2.0.0", + "bluebird": "^3.5.0", + "fs-extra": "^4.0.2", + "glob": "^7.1.2", + "is-builtin-module": "^1.0.0", + "lodash": "^4.17.4", + "semver": "^5.4.1", + "ts-node": "^3.2.0" + }, + "dependencies": { + "archiver": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/archiver/-/archiver-2.1.1.tgz", + "integrity": "sha1-/2YrSnggFJSj7lRNOjP+dJZQnrw=", + "dev": true, + "optional": true, + "requires": { + "archiver-utils": "^1.3.0", + "async": "^2.0.0", + "buffer-crc32": "^0.2.1", + "glob": "^7.0.0", + "lodash": "^4.8.0", + "readable-stream": "^2.0.0", + "tar-stream": "^1.5.0", + "zip-stream": "^1.2.0" + } + }, + "archiver-utils": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-1.3.0.tgz", + "integrity": "sha1-5QtMCccL89aA4y/xt5lOn52JUXQ=", + "dev": true, + "optional": true, + "requires": { + "glob": "^7.0.0", + "graceful-fs": "^4.1.0", + "lazystream": "^1.0.0", + "lodash": "^4.8.0", + "normalize-path": "^2.0.0", + "readable-stream": "^2.0.0" + } + }, + "async": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz", + "integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==", + "dev": true, + "optional": true, + "requires": { + "lodash": "^4.17.14" + } + }, + "bl": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/bl/-/bl-1.2.3.tgz", + "integrity": "sha512-pvcNpa0UU69UT341rO6AYy4FVAIkUHuZXRIWbq+zHnsVcRzDDjIAhGuuYoi0d//cwIwtt4pkpKycWEfjdV+vww==", + "dev": true, + "optional": true, + "requires": { + "readable-stream": "^2.3.5", + "safe-buffer": "^5.1.1" + } + }, + "compress-commons": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-1.2.2.tgz", + "integrity": "sha1-UkqfEJA/OoEzibAiXSfEi7dRiQ8=", + "dev": true, + "optional": true, + "requires": { + "buffer-crc32": "^0.2.1", + "crc32-stream": "^2.0.0", + "normalize-path": "^2.0.0", + "readable-stream": "^2.0.0" + } + }, + "crc32-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-2.0.0.tgz", + "integrity": "sha1-483TtN8xaN10494/u8t7KX/pCPQ=", + "dev": true, + "optional": true, + "requires": { + "crc": "^3.4.4", + "readable-stream": "^2.0.0" + } + }, + "fs-extra": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-4.0.3.tgz", + "integrity": "sha512-q6rbdDd1o2mAnQreO7YADIxf/Whx4AHBiRf6d+/cVT8h44ss+lHgxf1FemcqDnQt9X3ct4McHr+JMGlYSsK7Cg==", + "dev": true, + "optional": true, + "requires": { + "graceful-fs": "^4.1.2", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + } + }, + "normalize-path": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", + "integrity": "sha1-GrKLVW4Zg2Oowab35vogE3/mrtk=", + "dev": true, + "optional": true, + "requires": { + "remove-trailing-separator": "^1.0.1" + } + }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "optional": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true, + "optional": true + }, + "tar-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-1.6.2.tgz", + "integrity": "sha512-rzS0heiNf8Xn7/mpdSVVSMAWAoy9bfb1WOTYC78Z0UQKeKa/CWS8FOq0lKGNa8DWKAn9gxjCvMLYc5PGXYlK2A==", + "dev": true, + "optional": true, + "requires": { + "bl": "^1.0.0", + "buffer-alloc": "^1.2.0", + "end-of-stream": "^1.0.0", + "fs-constants": "^1.0.0", + "readable-stream": "^2.3.0", + "to-buffer": "^1.1.1", + "xtend": "^4.0.0" + } + }, + "zip-stream": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-1.2.0.tgz", + "integrity": "sha1-qLxF9MG0lpnGuQGYuqyqzbzUugQ=", + "dev": true, + "optional": true, + "requires": { + "archiver-utils": "^1.3.0", + "compress-commons": "^1.2.0", + "lodash": "^4.8.0", + "readable-stream": "^2.0.0" + } + } + } + }, + "set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", + "dev": true, + "optional": true + }, + "set-immediate-shim": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz", + "integrity": "sha1-SysbJ+uAip+NzEgaWOXlb1mfP2E=", + "dev": true + }, + "set-value": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz", + "integrity": "sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==", + "dev": true, + "optional": true, + "requires": { + "extend-shallow": "^2.0.1", + "is-extendable": "^0.1.1", + "is-plain-object": "^2.0.3", + "split-string": "^3.0.1" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "dev": true, + "optional": true, + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, + "setimmediate": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", + "integrity": "sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU=", + "dev": true, + "optional": true + }, + "sha.js": { + "version": "2.4.11", + "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", + "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", + "dev": true, + "optional": true, + "requires": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", + "dev": true, + "requires": { + "shebang-regex": "^1.0.0" + } + }, + "shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", + "dev": true + }, + "shortid": { + "version": "2.2.16", + "resolved": "https://registry.npmjs.org/shortid/-/shortid-2.2.16.tgz", + "integrity": "sha512-Ugt+GIZqvGXCIItnsL+lvFJOiN7RYqlGy7QE41O3YC1xbNSeDGIRO7xg2JJXIAj1cAGnOeC1r7/T9pgrtQbv4g==", + "dev": true, + "requires": { + "nanoid": "^2.1.0" + } + }, + "signal-exit": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.5.tgz", + "integrity": "sha512-KWcOiKeQj6ZyXx7zq4YxSMgHRlod4czeBQZrPb8OKcohcqAXShm7E20kEMle9WBt26hFcAf0qLOcp5zmY7kOqQ==", + "dev": true + }, + "simple-concat": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", + "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==", + "dev": true, + "optional": true + }, + "simple-get": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-2.8.1.tgz", + "integrity": "sha512-lSSHRSw3mQNUGPAYRqo7xy9dhKmxFXIjLjp4KHpf99GEH2VH7C3AM+Qfx6du6jhfUi6Vm7XnbEVEf7Wb6N8jRw==", + "dev": true, + "optional": true, + "requires": { + "decompress-response": "^3.3.0", + "once": "^1.3.1", + "simple-concat": "^1.0.0" + } + }, + "simple-git": { + "version": "2.47.0", + "resolved": "https://registry.npmjs.org/simple-git/-/simple-git-2.47.0.tgz", + "integrity": "sha512-+HfCpqPBEZTPWiW9fPdbiPJDslM22MLqrktfzNKyI2pWaJa6DhfNVx4Mds04KZzVv5vjC9/ksw3y5gVf8ECWDg==", + "dev": true, + "requires": { + "@kwsites/file-exists": "^1.1.1", + "@kwsites/promise-deferred": "^1.1.1", + "debug": "^4.3.2" + } + }, + "simple-swizzle": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", + "integrity": "sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo=", + "dev": true, + "requires": { + "is-arrayish": "^0.3.1" + } + }, + "slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true + }, + "snapdragon": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz", + "integrity": "sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg==", + "dev": true, + "optional": true, + "requires": { + "base": "^0.11.1", + "debug": "^2.2.0", + "define-property": "^0.2.5", + "extend-shallow": "^2.0.1", + "map-cache": "^0.2.2", + "source-map": "^0.5.6", + "source-map-resolve": "^0.5.0", + "use": "^3.1.0" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "optional": true, + "requires": { + "ms": "2.0.0" + } + }, + "define-property": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", + "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", + "dev": true, + "optional": true, + "requires": { + "is-descriptor": "^0.1.0" + } + }, + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "dev": true, + "optional": true, + "requires": { + "is-extendable": "^0.1.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true, + "optional": true + }, + "source-map": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", + "dev": true, + "optional": true + } + } + }, + "snapdragon-node": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/snapdragon-node/-/snapdragon-node-2.1.1.tgz", + "integrity": "sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw==", + "dev": true, + "optional": true, + "requires": { + "define-property": "^1.0.0", + "isobject": "^3.0.0", + "snapdragon-util": "^3.0.1" + }, + "dependencies": { + "define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", + "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", + "dev": true, + "optional": true, + "requires": { + "is-descriptor": "^1.0.0" + } + }, + "is-accessor-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", + "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", + "dev": true, + "optional": true, + "requires": { + "kind-of": "^6.0.0" + } + }, + "is-data-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", + "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", + "dev": true, + "optional": true, + "requires": { + "kind-of": "^6.0.0" + } + }, + "is-descriptor": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", + "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", + "dev": true, + "optional": true, + "requires": { + "is-accessor-descriptor": "^1.0.0", + "is-data-descriptor": "^1.0.0", + "kind-of": "^6.0.2" + } + }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true, + "optional": true + } + } + }, + "snapdragon-util": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/snapdragon-util/-/snapdragon-util-3.0.1.tgz", + "integrity": "sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ==", + "dev": true, + "optional": true, + "requires": { + "kind-of": "^3.2.0" + } + }, + "snappy": { + "version": "6.3.5", + "resolved": "https://registry.npmjs.org/snappy/-/snappy-6.3.5.tgz", + "integrity": "sha512-lonrUtdp1b1uDn1dbwgQbBsb5BbaiLeKq+AGwOk2No+en+VvJThwmtztwulEQsLinRF681pBqib0NUZaizKLIA==", + "dev": true, + "optional": true, + "requires": { + "bindings": "^1.3.1", + "nan": "^2.14.1", + "prebuild-install": "5.3.0" + } + }, + "socket.io-client": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/socket.io-client/-/socket.io-client-2.4.0.tgz", + "integrity": "sha512-M6xhnKQHuuZd4Ba9vltCLT9oa+YvTsP8j9NcEiLElfIg8KeYPyhWOes6x4t+LTAC8enQbE/995AdTem2uNyKKQ==", + "dev": true, + "requires": { + "backo2": "1.0.2", + "component-bind": "1.0.0", + "component-emitter": "~1.3.0", + "debug": "~3.1.0", + "engine.io-client": "~3.5.0", + "has-binary2": "~1.0.2", + "indexof": "0.0.1", + "parseqs": "0.0.6", + "parseuri": "0.0.6", + "socket.io-parser": "~3.3.0", + "to-array": "0.1.4" + }, + "dependencies": { + "debug": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", + "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", + "dev": true, + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true + } + } + }, + "socket.io-parser": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-3.3.2.tgz", + "integrity": "sha512-FJvDBuOALxdCI9qwRrO/Rfp9yfndRtc1jSgVgV8FDraihmSP/MLGD5PEuJrNfjALvcQ+vMDM/33AWOYP/JSjDg==", + "dev": true, + "requires": { + "component-emitter": "~1.3.0", + "debug": "~3.1.0", + "isarray": "2.0.1" + }, + "dependencies": { + "debug": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", + "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", + "dev": true, + "requires": { + "ms": "2.0.0" + } + }, + "isarray": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.1.tgz", + "integrity": "sha1-o32U7ZzaLVmGXJ92/llu4fM4dB4=", + "dev": true + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true + } + } + }, + "sort-keys": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-1.1.2.tgz", + "integrity": "sha1-RBttTTRnmPG05J6JIK37oOVD+a0=", + "dev": true, + "requires": { + "is-plain-obj": "^1.0.0" + } + }, + "sort-keys-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/sort-keys-length/-/sort-keys-length-1.0.1.tgz", + "integrity": "sha1-nLb09OnkgVWmqgZx7dM2/xR5oYg=", + "dev": true, + "requires": { + "sort-keys": "^1.0.0" + } + }, + "source-list-map": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/source-list-map/-/source-list-map-2.0.1.tgz", + "integrity": "sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw==", + "dev": true, + "optional": true + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "optional": true + }, + "source-map-resolve": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/source-map-resolve/-/source-map-resolve-0.5.3.tgz", + "integrity": "sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw==", + "dev": true, + "optional": true, + "requires": { + "atob": "^2.1.2", + "decode-uri-component": "^0.2.0", + "resolve-url": "^0.2.1", + "source-map-url": "^0.4.0", + "urix": "^0.1.0" + } + }, + "source-map-support": { + "version": "0.4.18", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.4.18.tgz", + "integrity": "sha512-try0/JqxPLF9nOjvSta7tVondkP5dwgyLDjVoyMDlmjugT2lRZ1OfsrYTkCd2hkDnJTKRbO/Rl3orm8vlsUzbA==", + "dev": true, + "optional": true, + "requires": { + "source-map": "^0.5.6" + }, + "dependencies": { + "source-map": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", + "dev": true, + "optional": true + } + } + }, + "source-map-url": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/source-map-url/-/source-map-url-0.4.1.tgz", + "integrity": "sha512-cPiFOTLUKvJFIg4SKVScy4ilPPW6rFgMgfuZJPNoDuMs3nC1HbMUycBoJw77xFIp6z1UJQJOfx6C9GMH80DiTw==", + "dev": true, + "optional": true + }, + "spdx-correct": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz", + "integrity": "sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==", + "dev": true, + "optional": true, + "requires": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "spdx-exceptions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", + "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==", + "dev": true, + "optional": true + }, + "spdx-expression-parse": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "dev": true, + "optional": true, + "requires": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "spdx-license-ids": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.10.tgz", + "integrity": "sha512-oie3/+gKf7QtpitB0LYLETe+k8SifzsX4KixvpOsbI6S0kRiRQ5MKOio8eMSAKQ17N06+wdEOXRiId+zOxo0hA==", + "dev": true, + "optional": true + }, + "split-string": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/split-string/-/split-string-3.1.0.tgz", + "integrity": "sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw==", + "dev": true, + "optional": true, + "requires": { + "extend-shallow": "^3.0.0" + } + }, + "split2": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/split2/-/split2-3.2.2.tgz", + "integrity": "sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg==", + "dev": true, + "requires": { + "readable-stream": "^3.0.0" + } + }, + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "dev": true + }, + "sprintf-kit": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/sprintf-kit/-/sprintf-kit-2.0.1.tgz", + "integrity": "sha512-2PNlcs3j5JflQKcg4wpdqpZ+AjhQJ2OZEo34NXDtlB0tIPG84xaaXhpA8XFacFiwjKA4m49UOYG83y3hbMn/gQ==", + "dev": true, + "requires": { + "es5-ext": "^0.10.53" + } + }, + "sshpk": { + "version": "1.16.1", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", + "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==", + "dev": true, + "requires": { + "asn1": "~0.2.3", + "assert-plus": "^1.0.0", + "bcrypt-pbkdf": "^1.0.0", + "dashdash": "^1.12.0", + "ecc-jsbn": "~0.1.1", + "getpass": "^0.1.1", + "jsbn": "~0.1.0", + "safer-buffer": "^2.0.2", + "tweetnacl": "~0.14.0" + } + }, + "stack-trace": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", + "integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA=", + "dev": true + }, + "static-eval": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/static-eval/-/static-eval-2.0.2.tgz", + "integrity": "sha512-N/D219Hcr2bPjLxPiV+TQE++Tsmrady7TqAJugLy7Xk1EumfDWS/f5dtBbkRCGE7wKKXuYockQoj8Rm2/pVKyg==", + "dev": true, + "requires": { + "escodegen": "^1.8.1" + } + }, + "static-extend": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/static-extend/-/static-extend-0.1.2.tgz", + "integrity": "sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY=", + "dev": true, + "optional": true, + "requires": { + "define-property": "^0.2.5", + "object-copy": "^0.1.0" + }, + "dependencies": { + "define-property": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", + "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", + "dev": true, + "optional": true, + "requires": { + "is-descriptor": "^0.1.0" + } + } + } + }, + "stealthy-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/stealthy-require/-/stealthy-require-1.1.1.tgz", + "integrity": "sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=", + "dev": true + }, + "stream-browserify": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-2.0.2.tgz", + "integrity": "sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg==", + "dev": true, + "optional": true, + "requires": { + "inherits": "~2.0.1", + "readable-stream": "^2.0.2" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "optional": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + } + } + }, + "stream-http": { + "version": "2.8.3", + "resolved": "https://registry.npmjs.org/stream-http/-/stream-http-2.8.3.tgz", + "integrity": "sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw==", + "dev": true, + "optional": true, + "requires": { + "builtin-status-codes": "^3.0.0", + "inherits": "^2.0.1", + "readable-stream": "^2.3.6", + "to-arraybuffer": "^1.0.0", + "xtend": "^4.0.0" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "optional": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + } + } + }, + "stream-promise": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/stream-promise/-/stream-promise-3.2.0.tgz", + "integrity": "sha512-P+7muTGs2C8yRcgJw/PPt61q7O517tDHiwYEzMWo1GSBCcZedUMT/clz7vUNsSxFphIlJ6QUL4GexQKlfJoVtA==", + "dev": true, + "requires": { + "2-thenable": "^1.0.0", + "es5-ext": "^0.10.49", + "is-stream": "^1.1.0" + } + }, + "stream-shift": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz", + "integrity": "sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==", + "dev": true + }, + "string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "dev": true, + "optional": true, + "requires": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "dev": true, + "optional": true + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "dev": true, + "optional": true, + "requires": { + "ansi-regex": "^2.0.0" + } + } + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "requires": { + "safe-buffer": "~5.1.0" + } + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1" + }, + "dependencies": { + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true + } + } + }, + "strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", + "dev": true, + "optional": true + }, + "strip-dirs": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/strip-dirs/-/strip-dirs-2.1.0.tgz", + "integrity": "sha512-JOCxOeKLm2CAS73y/U4ZeZPTkE+gNVCzKt7Eox84Iej1LT/2pTWYpZKJuxwQpvX1LiZb1xokNR7RLfuBAa7T3g==", + "dev": true, + "requires": { + "is-natural-number": "^4.0.1" + } + }, + "strip-eof": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", + "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=", + "dev": true, + "optional": true + }, + "strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", + "dev": true + }, + "strip-outer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/strip-outer/-/strip-outer-1.0.1.tgz", + "integrity": "sha512-k55yxKHwaXnpYGsOzg4Vl8+tDrWylxDEpknGjhTiZB8dFRU5rTo9CAzeycivxV3s+zlTKwrs6WxMxR95n26kwg==", + "dev": true, + "requires": { + "escape-string-regexp": "^1.0.2" + } + }, + "strtok3": { + "version": "6.2.4", + "resolved": "https://registry.npmjs.org/strtok3/-/strtok3-6.2.4.tgz", + "integrity": "sha512-GO8IcFF9GmFDvqduIspUBwCzCbqzegyVKIsSymcMgiZKeCfrN9SowtUoi8+b59WZMAjIzVZic/Ft97+pynR3Iw==", + "dev": true, + "requires": { + "@tokenizer/token": "^0.3.0", + "peek-readable": "^4.0.1" + } + }, + "superagent": { + "version": "3.8.3", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-3.8.3.tgz", + "integrity": "sha512-GLQtLMCoEIK4eDv6OGtkOoSMt3D+oq0y3dsxMuYuDvaNUvuT8eFBuLmfR0iYYzHC1e8hpzC6ZsxbuP6DIalMFA==", + "dev": true, + "requires": { + "component-emitter": "^1.2.0", + "cookiejar": "^2.1.0", + "debug": "^3.1.0", + "extend": "^3.0.0", + "form-data": "^2.3.1", + "formidable": "^1.2.0", + "methods": "^1.1.1", + "mime": "^1.4.1", + "qs": "^6.5.1", + "readable-stream": "^2.3.5" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + } + } + }, + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + }, + "tabtab": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/tabtab/-/tabtab-3.0.2.tgz", + "integrity": "sha512-jANKmUe0sIQc/zTALTBy186PoM/k6aPrh3A7p6AaAfF6WPSbTx1JYeGIGH162btpH+mmVEXln+UxwViZHO2Jhg==", + "dev": true, + "requires": { + "debug": "^4.0.1", + "es6-promisify": "^6.0.0", + "inquirer": "^6.0.0", + "minimist": "^1.2.0", + "mkdirp": "^0.5.1", + "untildify": "^3.0.3" + }, + "dependencies": { + "ansi-escapes": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", + "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", + "dev": true + }, + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "dev": true + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "cli-cursor": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", + "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", + "dev": true, + "requires": { + "restore-cursor": "^2.0.0" + } + }, + "cli-width": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.1.tgz", + "integrity": "sha512-GRMWDxpOB6Dgk2E5Uo+3eEBvtOOlimMmpbFiKuLFnQzYDavtLFY3K5ona41jgN/WdRZtG7utuVSVTL4HbZHGkw==", + "dev": true + }, + "figures": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", + "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", + "dev": true, + "requires": { + "escape-string-regexp": "^1.0.5" + } + }, + "inquirer": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-6.5.2.tgz", + "integrity": "sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ==", + "dev": true, + "requires": { + "ansi-escapes": "^3.2.0", + "chalk": "^2.4.2", + "cli-cursor": "^2.1.0", + "cli-width": "^2.0.0", + "external-editor": "^3.0.3", + "figures": "^2.0.0", + "lodash": "^4.17.12", + "mute-stream": "0.0.7", + "run-async": "^2.2.0", + "rxjs": "^6.4.0", + "string-width": "^2.1.0", + "strip-ansi": "^5.1.0", + "through": "^2.3.6" + } + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "mimic-fn": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", + "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", + "dev": true + }, + "mute-stream": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", + "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", + "dev": true + }, + "onetime": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", + "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", + "dev": true, + "requires": { + "mimic-fn": "^1.0.0" + } + }, + "restore-cursor": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", + "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", + "dev": true, + "requires": { + "onetime": "^2.0.0", + "signal-exit": "^3.0.2" + } + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "dev": true, + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + }, + "dependencies": { + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "dev": true, + "requires": { + "ansi-regex": "^3.0.0" + } + } + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + } + } + }, + "untildify": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/untildify/-/untildify-3.0.3.tgz", + "integrity": "sha512-iSk/J8efr8uPT/Z4eSUywnqyrQU7DSdMfdqK4iWEaUVVmcP5JcnpRqmVMwcwcnmI1ATFNgC5V90u09tBynNFKA==", + "dev": true + } + } + }, + "tapable": { + "version": "0.2.9", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-0.2.9.tgz", + "integrity": "sha512-2wsvQ+4GwBvLPLWsNfLCDYGsW6xb7aeC6utq2Qh0PFwgEy7K7dsma9Jsmb2zSQj7GvYAyUGSntLtsv++GmgL1A==", + "dev": true, + "optional": true + }, + "tar": { + "version": "6.1.11", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.11.tgz", + "integrity": "sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA==", + "dev": true, + "requires": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^3.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + }, + "dependencies": { + "chownr": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", + "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", + "dev": true + }, + "mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "dev": true + } + } + }, + "tar-fs": { + "version": "1.16.3", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-1.16.3.tgz", + "integrity": "sha512-NvCeXpYx7OsmOh8zIOP/ebG55zZmxLE0etfWRbWok+q2Qo8x/vOR/IJT1taADXPe+jsiu9axDb3X4B+iIgNlKw==", + "dev": true, + "optional": true, + "requires": { + "chownr": "^1.0.1", + "mkdirp": "^0.5.1", + "pump": "^1.0.0", + "tar-stream": "^1.1.2" + }, + "dependencies": { + "bl": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/bl/-/bl-1.2.3.tgz", + "integrity": "sha512-pvcNpa0UU69UT341rO6AYy4FVAIkUHuZXRIWbq+zHnsVcRzDDjIAhGuuYoi0d//cwIwtt4pkpKycWEfjdV+vww==", + "dev": true, + "optional": true, + "requires": { + "readable-stream": "^2.3.5", + "safe-buffer": "^5.1.1" + } + }, + "pump": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/pump/-/pump-1.0.3.tgz", + "integrity": "sha512-8k0JupWme55+9tCVE+FS5ULT3K6AbgqrGa58lTT49RpyfwwcGedHqaC5LlQNdEAumn/wFsu6aPwkuPMioy8kqw==", + "dev": true, + "optional": true, + "requires": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "optional": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "tar-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-1.6.2.tgz", + "integrity": "sha512-rzS0heiNf8Xn7/mpdSVVSMAWAoy9bfb1WOTYC78Z0UQKeKa/CWS8FOq0lKGNa8DWKAn9gxjCvMLYc5PGXYlK2A==", + "dev": true, + "optional": true, + "requires": { + "bl": "^1.0.0", + "buffer-alloc": "^1.2.0", + "end-of-stream": "^1.0.0", + "fs-constants": "^1.0.0", + "readable-stream": "^2.3.0", + "to-buffer": "^1.1.1", + "xtend": "^4.0.0" + } + } + } + }, + "tar-stream": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", + "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", + "dev": true, + "requires": { + "bl": "^4.0.3", + "end-of-stream": "^1.4.1", + "fs-constants": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1" + } + }, + "tencent-serverless-http": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/tencent-serverless-http/-/tencent-serverless-http-1.3.2.tgz", + "integrity": "sha512-HgIu9HuBdY0lx3jLKuicOSOrjmieklPh55x8ZmtuTnrZ5v1buAPUfLKBhTeBSz6e90ggyW+dPr5PWdz179kUkw==", + "dev": true, + "requires": { + "type-is": "^1.6.16" + } + }, + "text-hex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz", + "integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==", + "dev": true + }, + "throat": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/throat/-/throat-5.0.0.tgz", + "integrity": "sha512-fcwX4mndzpLQKBS1DVYhGAcYaYt7vsHNIvQV+WXMvnow5cgjPphq5CaayLaGsjRdSCKZFNGt7/GYAuXaNOiYCA==", + "dev": true + }, + "through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=", + "dev": true + }, + "timers-browserify": { + "version": "2.0.12", + "resolved": "https://registry.npmjs.org/timers-browserify/-/timers-browserify-2.0.12.tgz", + "integrity": "sha512-9phl76Cqm6FhSX9Xe1ZUAMLtm1BLkKj2Qd5ApyWkXzsMRaA7dgr81kf4wJmQf/hAvg8EEyJxDo3du/0KlhPiKQ==", + "dev": true, + "optional": true, + "requires": { + "setimmediate": "^1.0.4" + } + }, + "timers-ext": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/timers-ext/-/timers-ext-0.1.7.tgz", + "integrity": "sha512-b85NUNzTSdodShTIbky6ZF02e8STtVVfD+fu4aXXShEELpozH+bCpJLYMPZbsABN2wDH7fJpqIoXxJpzbf0NqQ==", + "dev": true, + "requires": { + "es5-ext": "~0.10.46", + "next-tick": "1" + } + }, + "tmp": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", + "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", + "dev": true, + "requires": { + "os-tmpdir": "~1.0.2" + } + }, + "to-array": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/to-array/-/to-array-0.1.4.tgz", + "integrity": "sha1-F+bBH3PdTz10zaek/zI46a2b+JA=", + "dev": true + }, + "to-arraybuffer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz", + "integrity": "sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M=", + "dev": true, + "optional": true + }, + "to-buffer": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/to-buffer/-/to-buffer-1.1.1.tgz", + "integrity": "sha512-lx9B5iv7msuFYE3dytT+KE5tap+rNYw+K4jVkb9R/asAb+pbBSM17jtunHplhBe6RRJdZx3Pn2Jph24O32mOVg==", + "dev": true + }, + "to-object-path": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz", + "integrity": "sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68=", + "dev": true, + "optional": true, + "requires": { + "kind-of": "^3.0.2" + } + }, + "to-readable-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/to-readable-stream/-/to-readable-stream-1.0.0.tgz", + "integrity": "sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q==", + "dev": true + }, + "to-regex": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/to-regex/-/to-regex-3.0.2.tgz", + "integrity": "sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw==", + "dev": true, + "optional": true, + "requires": { + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "regex-not": "^1.0.2", + "safe-regex": "^1.1.0" + } + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "requires": { + "is-number": "^7.0.0" + } + }, + "token-types": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/token-types/-/token-types-4.1.1.tgz", + "integrity": "sha512-hD+QyuUAyI2spzsI0B7gf/jJ2ggR4RjkAo37j3StuePhApJUwcWDjnHDOFdIWYSwNR28H14hpwm4EI+V1Ted1w==", + "dev": true, + "requires": { + "@tokenizer/token": "^0.3.0", + "ieee754": "^1.2.1" + } + }, + "tough-cookie": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", + "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", + "dev": true, + "requires": { + "psl": "^1.1.28", + "punycode": "^2.1.1" + } + }, + "tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=", + "dev": true + }, + "traverse": { + "version": "0.6.6", + "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.6.tgz", + "integrity": "sha1-y99WD9e5r2MlAv7UD5GMFX6pcTc=", + "dev": true + }, + "trim-repeated": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/trim-repeated/-/trim-repeated-1.0.0.tgz", + "integrity": "sha1-42RqLqTokTEr9+rObPsFOAvAHCE=", + "dev": true, + "requires": { + "escape-string-regexp": "^1.0.2" + } + }, + "triple-beam": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/triple-beam/-/triple-beam-1.3.0.tgz", + "integrity": "sha512-XrHUvV5HpdLmIj4uVMxHggLbFSZYIn7HEWsqePZcI50pco+MPqJ50wMGY794X7AOOhxOBAjbkqfAbEe/QMp2Lw==", + "dev": true + }, + "ts-node": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-3.3.0.tgz", + "integrity": "sha1-wTxqMCTjC+EYDdUwOPwgkonUv2k=", + "dev": true, + "optional": true, + "requires": { + "arrify": "^1.0.0", + "chalk": "^2.0.0", + "diff": "^3.1.0", + "make-error": "^1.1.1", + "minimist": "^1.2.0", + "mkdirp": "^0.5.1", + "source-map-support": "^0.4.0", + "tsconfig": "^6.0.0", + "v8flags": "^3.0.0", + "yn": "^2.0.0" + }, + "dependencies": { + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "optional": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + } + } + }, + "tsconfig": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/tsconfig/-/tsconfig-6.0.0.tgz", + "integrity": "sha1-aw6DdgA9evGGT434+J3QBZ/80DI=", + "dev": true, + "optional": true, + "requires": { + "strip-bom": "^3.0.0", + "strip-json-comments": "^2.0.0" + } + }, + "tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "dev": true + }, + "tty-browserify": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.0.tgz", + "integrity": "sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY=", + "dev": true, + "optional": true + }, + "tunnel": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", + "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", + "dev": true + }, + "tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "dev": true, + "requires": { + "safe-buffer": "^5.0.1" + } + }, + "tweetnacl": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", + "dev": true + }, + "type": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/type/-/type-2.5.0.tgz", + "integrity": "sha512-180WMDQaIMm3+7hGXWf12GtdniDEy7nYcyFMKJn/eZz/6tSLXrUN9V0wKSbMjej0I1WHWbpREDEKHtqPQa9NNw==", + "dev": true + }, + "type-check": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", + "dev": true, + "requires": { + "prelude-ls": "~1.1.2" + } + }, + "type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true + }, + "type-is": { + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "dev": true, + "requires": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + } + }, + "typedarray-to-buffer": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", + "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", + "dev": true, + "requires": { + "is-typedarray": "^1.0.0" + } + }, + "uglify-js": { + "version": "2.8.29", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-2.8.29.tgz", + "integrity": "sha1-KcVzMUgFe7Th913zW3qcty5qWd0=", + "dev": true, + "optional": true, + "requires": { + "source-map": "~0.5.1", + "uglify-to-browserify": "~1.0.0", + "yargs": "~3.10.0" + }, + "dependencies": { + "camelcase": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-1.2.1.tgz", + "integrity": "sha1-m7UwTS4LVmmLLHWLCKPqqdqlijk=", + "dev": true, + "optional": true + }, + "source-map": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", + "dev": true, + "optional": true + }, + "yargs": { + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-3.10.0.tgz", + "integrity": "sha1-9+572FfdfB0tOMDnTvvWgdFDH9E=", + "dev": true, + "optional": true, + "requires": { + "camelcase": "^1.0.2", + "cliui": "^2.1.0", + "decamelize": "^1.0.0", + "window-size": "0.1.0" + } + } + } + }, + "uglify-to-browserify": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz", + "integrity": "sha1-bgkk1r2mta/jSeOabWMoUKD4grc=", + "dev": true, + "optional": true + }, + "uglifyjs-webpack-plugin": { + "version": "0.4.6", + "resolved": "https://registry.npmjs.org/uglifyjs-webpack-plugin/-/uglifyjs-webpack-plugin-0.4.6.tgz", + "integrity": "sha1-uVH0q7a9YX5m9j64kUmOORdj4wk=", + "dev": true, + "optional": true, + "requires": { + "source-map": "^0.5.6", + "uglify-js": "^2.8.29", + "webpack-sources": "^1.0.1" + }, + "dependencies": { + "source-map": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", + "dev": true, + "optional": true + } + } + }, + "unbzip2-stream": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/unbzip2-stream/-/unbzip2-stream-1.4.3.tgz", + "integrity": "sha512-mlExGW4w71ebDJviH16lQLtZS32VKqsSfk80GCfUlwT/4/hNRFsoscrF/c++9xinkMzECL1uL9DDwXqFWkruPg==", + "dev": true, + "requires": { + "buffer": "^5.2.1", + "through": "^2.3.8" + } + }, + "underscore": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.1.tgz", + "integrity": "sha512-hzSoAVtJF+3ZtiFX0VgfFPHEDRm7Y/QPjGyNo4TVdnDTdft3tr8hEkD25a1jC+TjTuE7tkHGKkhwCgs9dgBB2g==", + "dev": true + }, + "uni-global": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/uni-global/-/uni-global-1.0.0.tgz", + "integrity": "sha512-WWM3HP+siTxzIWPNUg7hZ4XO8clKi6NoCAJJWnuRL+BAqyFXF8gC03WNyTefGoUXYc47uYgXxpKLIEvo65PEHw==", + "dev": true, + "requires": { + "type": "^2.5.0" + } + }, + "union-value": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz", + "integrity": "sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==", + "dev": true, + "optional": true, + "requires": { + "arr-union": "^3.1.0", + "get-value": "^2.0.6", + "is-extendable": "^0.1.1", + "set-value": "^2.0.1" + } + }, + "universalify": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", + "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", + "dev": true + }, + "unset-value": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unset-value/-/unset-value-1.0.0.tgz", + "integrity": "sha1-g3aHP30jNRef+x5vw6jtDfyKtVk=", + "dev": true, + "optional": true, + "requires": { + "has-value": "^0.3.1", + "isobject": "^3.0.0" + }, + "dependencies": { + "has-value": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/has-value/-/has-value-0.3.1.tgz", + "integrity": "sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8=", + "dev": true, + "optional": true, + "requires": { + "get-value": "^2.0.3", + "has-values": "^0.1.4", + "isobject": "^2.0.0" + }, + "dependencies": { + "isobject": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-2.1.0.tgz", + "integrity": "sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk=", + "dev": true, + "optional": true, + "requires": { + "isarray": "1.0.0" + } + } + } + }, + "has-values": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/has-values/-/has-values-0.1.4.tgz", + "integrity": "sha1-bWHeldkd/Km5oCCJrThL/49it3E=", + "dev": true, + "optional": true + } + } + }, + "untildify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", + "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", + "dev": true + }, + "upath": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/upath/-/upath-1.2.0.tgz", + "integrity": "sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg==", + "dev": true, + "optional": true + }, + "uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "requires": { + "punycode": "^2.1.0" + } + }, + "urix": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/urix/-/urix-0.1.0.tgz", + "integrity": "sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI=", + "dev": true, + "optional": true + }, + "url": { + "version": "0.10.3", + "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", + "integrity": "sha1-Ah5NnHcF8hu/N9A861h2dAJ3TGQ=", + "dev": true, + "requires": { + "punycode": "1.3.2", + "querystring": "0.2.0" + }, + "dependencies": { + "punycode": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", + "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=", + "dev": true + }, + "querystring": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", + "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=", + "dev": true + } + } + }, + "url-parse-lax": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz", + "integrity": "sha1-FrXK/Afb42dsGxmZF3gj1lA6yww=", + "dev": true, + "requires": { + "prepend-http": "^2.0.0" + } + }, + "urlencode": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/urlencode/-/urlencode-1.1.0.tgz", + "integrity": "sha1-HyuibwE8hfATP3o61v8nMK33y7c=", + "dev": true, + "requires": { + "iconv-lite": "~0.4.11" + } + }, + "use": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/use/-/use-3.1.1.tgz", + "integrity": "sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==", + "dev": true, + "optional": true + }, + "util": { + "version": "0.11.1", + "resolved": "https://registry.npmjs.org/util/-/util-0.11.1.tgz", + "integrity": "sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ==", + "dev": true, + "optional": true, + "requires": { + "inherits": "2.0.3" + }, + "dependencies": { + "inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", + "dev": true, + "optional": true + } + } + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", + "dev": true + }, + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "dev": true + }, + "v8flags": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/v8flags/-/v8flags-3.2.0.tgz", + "integrity": "sha512-mH8etigqMfiGWdeXpaaqGfs6BndypxusHHcv2qSHyZkGEznCd/qAXCWWRzeowtL54147cktFOC4P5y+kl8d8Jg==", + "dev": true, + "optional": true, + "requires": { + "homedir-polyfill": "^1.0.1" + } + }, + "validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "optional": true, + "requires": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "verror": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", + "dev": true, + "requires": { + "assert-plus": "^1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "^1.2.0" + }, + "dependencies": { + "core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=", + "dev": true + } + } + }, + "vm-browserify": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vm-browserify/-/vm-browserify-1.1.2.tgz", + "integrity": "sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ==", + "dev": true, + "optional": true + }, + "watchpack": { + "version": "1.7.5", + "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-1.7.5.tgz", + "integrity": "sha512-9P3MWk6SrKjHsGkLT2KHXdQ/9SNkyoJbabxnKOoJepsvJjJG8uYTR3yTPxPQvNDI3w4Nz1xnE0TLHK4RIVe/MQ==", + "dev": true, + "optional": true, + "requires": { + "chokidar": "^3.4.1", + "graceful-fs": "^4.1.2", + "neo-async": "^2.5.0", + "watchpack-chokidar2": "^2.0.1" + } + }, + "watchpack-chokidar2": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/watchpack-chokidar2/-/watchpack-chokidar2-2.0.1.tgz", + "integrity": "sha512-nCFfBIPKr5Sh61s4LPpy1Wtfi0HE8isJ3d2Yb5/Ppw2P2B/3eVSEBjKfN0fmHJSK14+31KwMKmcrzs2GM4P0Ww==", + "dev": true, + "optional": true, + "requires": { + "chokidar": "^2.1.8" + }, + "dependencies": { + "anymatch": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-2.0.0.tgz", + "integrity": "sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==", + "dev": true, + "optional": true, + "requires": { + "micromatch": "^3.1.4", + "normalize-path": "^2.1.1" + }, + "dependencies": { + "normalize-path": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", + "integrity": "sha1-GrKLVW4Zg2Oowab35vogE3/mrtk=", + "dev": true, + "optional": true, + "requires": { + "remove-trailing-separator": "^1.0.1" + } + } + } + }, + "binary-extensions": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.13.1.tgz", + "integrity": "sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==", + "dev": true, + "optional": true + }, + "braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "dev": true, + "optional": true, + "requires": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "dev": true, + "optional": true, + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, + "chokidar": { + "version": "2.1.8", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz", + "integrity": "sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg==", + "dev": true, + "optional": true, + "requires": { + "anymatch": "^2.0.0", + "async-each": "^1.0.1", + "braces": "^2.3.2", + "fsevents": "^1.2.7", + "glob-parent": "^3.1.0", + "inherits": "^2.0.3", + "is-binary-path": "^1.0.0", + "is-glob": "^4.0.0", + "normalize-path": "^3.0.0", + "path-is-absolute": "^1.0.0", + "readdirp": "^2.2.1", + "upath": "^1.1.1" + } + }, + "fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc=", + "dev": true, + "optional": true, + "requires": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "dev": true, + "optional": true, + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, + "fsevents": { + "version": "1.2.13", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.13.tgz", + "integrity": "sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw==", + "dev": true, + "optional": true, + "requires": { + "bindings": "^1.5.0", + "nan": "^2.12.1" + } + }, + "glob-parent": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", + "integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=", + "dev": true, + "optional": true, + "requires": { + "is-glob": "^3.1.0", + "path-dirname": "^1.0.0" + }, + "dependencies": { + "is-glob": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", + "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", + "dev": true, + "optional": true, + "requires": { + "is-extglob": "^2.1.0" + } + } + } + }, + "is-binary-path": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-1.0.1.tgz", + "integrity": "sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg=", + "dev": true, + "optional": true, + "requires": { + "binary-extensions": "^1.0.0" + } + }, + "is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==", + "dev": true, + "optional": true + }, + "is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=", + "dev": true, + "optional": true, + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "dev": true, + "optional": true, + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true, + "optional": true + }, + "micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "dev": true, + "optional": true, + "requires": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + } + }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "optional": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "readdirp": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-2.2.1.tgz", + "integrity": "sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ==", + "dev": true, + "optional": true, + "requires": { + "graceful-fs": "^4.1.11", + "micromatch": "^3.1.10", + "readable-stream": "^2.0.2" + } + }, + "to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", + "dev": true, + "optional": true, + "requires": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + } + } + } + }, + "webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE=", + "dev": true + }, + "webpack": { + "version": "3.12.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-3.12.0.tgz", + "integrity": "sha512-Sw7MdIIOv/nkzPzee4o0EdvCuPmxT98+vVpIvwtcwcF1Q4SDSNp92vwcKc4REe7NItH9f1S4ra9FuQ7yuYZ8bQ==", + "dev": true, + "optional": true, + "requires": { + "acorn": "^5.0.0", + "acorn-dynamic-import": "^2.0.0", + "ajv": "^6.1.0", + "ajv-keywords": "^3.1.0", + "async": "^2.1.2", + "enhanced-resolve": "^3.4.0", + "escope": "^3.6.0", + "interpret": "^1.0.0", + "json-loader": "^0.5.4", + "json5": "^0.5.1", + "loader-runner": "^2.3.0", + "loader-utils": "^1.1.0", + "memory-fs": "~0.4.1", + "mkdirp": "~0.5.0", + "node-libs-browser": "^2.0.0", + "source-map": "^0.5.3", + "supports-color": "^4.2.1", + "tapable": "^0.2.7", + "uglifyjs-webpack-plugin": "^0.4.6", + "watchpack": "^1.4.0", + "webpack-sources": "^1.0.1", + "yargs": "^8.0.2" + }, + "dependencies": { + "acorn": { + "version": "5.7.4", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.7.4.tgz", + "integrity": "sha512-1D++VG7BhrtvQpNbBzovKNc1FLGGEE/oGe7b9xJm/RFHMBeUaUGpluV9RLjZa47YFdPcDAenEYuq9pQPcMdLJg==", + "dev": true, + "optional": true + }, + "async": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz", + "integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==", + "dev": true, + "optional": true, + "requires": { + "lodash": "^4.17.14" + } + }, + "has-flag": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-2.0.0.tgz", + "integrity": "sha1-6CB68cx7MNRGzHC3NLXovhj4jVE=", + "dev": true, + "optional": true + }, + "source-map": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", + "dev": true, + "optional": true + }, + "supports-color": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-4.5.0.tgz", + "integrity": "sha1-vnoN5ITexcXN34s9WRJQRJEvY1s=", + "dev": true, + "optional": true, + "requires": { + "has-flag": "^2.0.0" + } + } + } + }, + "webpack-sources": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-1.4.3.tgz", + "integrity": "sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ==", + "dev": true, + "optional": true, + "requires": { + "source-list-map": "^2.0.0", + "source-map": "~0.6.1" + } + }, + "whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=", + "dev": true, + "requires": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + }, + "which-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", + "dev": true, + "optional": true + }, + "which-pm-runs": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/which-pm-runs/-/which-pm-runs-1.0.0.tgz", + "integrity": "sha1-Zws6+8VS4LVd9rd4DKdGFfI60cs=", + "dev": true, + "optional": true + }, + "wide-align": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", + "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", + "dev": true, + "optional": true, + "requires": { + "string-width": "^1.0.2 || 2 || 3 || 4" + } + }, + "widest-line": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz", + "integrity": "sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==", + "dev": true, + "requires": { + "string-width": "^4.0.0" + }, + "dependencies": { + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + } + } + }, + "window-size": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/window-size/-/window-size-0.1.0.tgz", + "integrity": "sha1-VDjNLqk7IC76Ohn+iIeu58lPnJ0=", + "dev": true, + "optional": true + }, + "winston": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/winston/-/winston-3.2.1.tgz", + "integrity": "sha512-zU6vgnS9dAWCEKg/QYigd6cgMVVNwyTzKs81XZtTFuRwJOcDdBg7AU0mXVyNbs7O5RH2zdv+BdNZUlx7mXPuOw==", + "dev": true, + "requires": { + "async": "^2.6.1", + "diagnostics": "^1.1.1", + "is-stream": "^1.1.0", + "logform": "^2.1.1", + "one-time": "0.0.4", + "readable-stream": "^3.1.1", + "stack-trace": "0.0.x", + "triple-beam": "^1.3.0", + "winston-transport": "^4.3.0" + }, + "dependencies": { + "async": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz", + "integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==", + "dev": true, + "requires": { + "lodash": "^4.17.14" + } + } + } + }, + "winston-transport": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/winston-transport/-/winston-transport-4.4.0.tgz", + "integrity": "sha512-Lc7/p3GtqtqPBYYtS6KCN3c77/2QCev51DvcJKbkFPQNoj1sinkGwLGFDxkXY9J6p9+EPnYs+D90uwbnaiURTw==", + "dev": true, + "requires": { + "readable-stream": "^2.3.7", + "triple-beam": "^1.2.0" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + } + } + }, + "word-wrap": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "dev": true + }, + "wordwrap": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.2.tgz", + "integrity": "sha1-t5Zpu0LstAn4PVg8rVLKF+qhZD8=", + "dev": true, + "optional": true + }, + "wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + } + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + }, + "write-file-atomic": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz", + "integrity": "sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==", + "dev": true, + "requires": { + "graceful-fs": "^4.1.11", + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.2" + } + }, + "ws": { + "version": "7.5.5", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.5.tgz", + "integrity": "sha512-BAkMFcAzl8as1G/hArkxOxq3G7pjUqQ3gzYbLL0/5zNkph70e+lCoxBGnm6AW1+/aiNeV4fnKqZ8m4GZewmH2w==", + "dev": true + }, + "xml2js": { + "version": "0.4.19", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz", + "integrity": "sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==", + "dev": true, + "requires": { + "sax": ">=0.6.0", + "xmlbuilder": "~9.0.1" + } + }, + "xmlbuilder": { + "version": "9.0.7", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz", + "integrity": "sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0=", + "dev": true + }, + "xmldom": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/xmldom/-/xmldom-0.6.0.tgz", + "integrity": "sha512-iAcin401y58LckRZ0TkI4k0VSM1Qg0KGSc3i8rU+xrxe19A/BN1zHyVSJY7uoutVlaTSzYyk/v5AmkewAP7jtg==", + "dev": true + }, + "xmlhttprequest-ssl": { + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.6.3.tgz", + "integrity": "sha512-3XfeQE/wNkvrIktn2Kf0869fC0BN6UpydVasGIeSm2B1Llihf7/0UfZM+eCkOw3P7bP4+qPgqhm7ZoxuJtFU0Q==", + "dev": true + }, + "xpath.js": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/xpath.js/-/xpath.js-1.1.0.tgz", + "integrity": "sha512-jg+qkfS4K8E7965sqaUl8mRngXiKb3WZGfONgE18pr03FUQiuSV6G+Ej4tS55B+rIQSFEIw3phdVAQ4pPqNWfQ==", + "dev": true + }, + "xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "dev": true + }, + "y18n": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-3.2.2.tgz", + "integrity": "sha512-uGZHXkHnhF0XeeAPgnKfPv1bgKAYyVvmNL1xlKsPYZPaIHxGti2hHqvOCQv71XMsLxu1QjergkqogUnms5D3YQ==", + "dev": true, + "optional": true + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "yaml-ast-parser": { + "version": "0.0.43", + "resolved": "https://registry.npmjs.org/yaml-ast-parser/-/yaml-ast-parser-0.0.43.tgz", + "integrity": "sha512-2PTINUwsRqSd+s8XxKaJWQlUuEMHJQyEuh2edBbW8KNJz0SJPwUSD2zRWqezFEdN7IzAgeuYHFUCF7o8zRdZ0A==", + "dev": true + }, + "yamljs": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/yamljs/-/yamljs-0.3.0.tgz", + "integrity": "sha512-C/FsVVhht4iPQYXOInoxUM/1ELSf9EsgKH34FofQOp6hwCPrW4vG4w5++TED3xRUo8gD7l0P1J1dLlDYzODsTQ==", + "dev": true, + "requires": { + "argparse": "^1.0.7", + "glob": "^7.0.5" + } + }, + "yargs": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-8.0.2.tgz", + "integrity": "sha1-YpmpBVsc78lp/355wdkY3Osiw2A=", + "dev": true, + "optional": true, + "requires": { + "camelcase": "^4.1.0", + "cliui": "^3.2.0", + "decamelize": "^1.1.1", + "get-caller-file": "^1.0.1", + "os-locale": "^2.0.0", + "read-pkg-up": "^2.0.0", + "require-directory": "^2.1.1", + "require-main-filename": "^1.0.1", + "set-blocking": "^2.0.0", + "string-width": "^2.0.0", + "which-module": "^2.0.0", + "y18n": "^3.2.1", + "yargs-parser": "^7.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "dev": true, + "optional": true + }, + "camelcase": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-4.1.0.tgz", + "integrity": "sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0=", + "dev": true, + "optional": true + }, + "cliui": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-3.2.0.tgz", + "integrity": "sha1-EgYBU3qRbSmUD5NNo7SNWFo5IT0=", + "dev": true, + "optional": true, + "requires": { + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1", + "wrap-ansi": "^2.0.0" + }, + "dependencies": { + "string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "dev": true, + "optional": true, + "requires": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + } + } + } + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "dev": true, + "optional": true, + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "dev": true, + "optional": true + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true, + "optional": true + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "dev": true, + "optional": true, + "requires": { + "ansi-regex": "^3.0.0" + } + } + } + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "dev": true, + "optional": true, + "requires": { + "ansi-regex": "^2.0.0" + } + }, + "wrap-ansi": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", + "integrity": "sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=", + "dev": true, + "optional": true, + "requires": { + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1" + }, + "dependencies": { + "string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "dev": true, + "optional": true, + "requires": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + } + } + } + } + } + }, + "yargs-parser": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-7.0.0.tgz", + "integrity": "sha1-jQrELxbqVd69MyyvTEA4s+P139k=", + "dev": true, + "optional": true, + "requires": { + "camelcase": "^4.1.0" + }, + "dependencies": { + "camelcase": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-4.1.0.tgz", + "integrity": "sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0=", + "dev": true, + "optional": true + } + } + }, + "yauzl": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", + "integrity": "sha1-x+sXyT4RLLEIb6bY5R+wZnt5pfk=", + "dev": true, + "requires": { + "buffer-crc32": "~0.2.3", + "fd-slicer": "~1.1.0" + } + }, + "yeast": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/yeast/-/yeast-0.1.2.tgz", + "integrity": "sha1-AI4G2AlDIMNy28L47XagymyKxBk=", + "dev": true + }, + "yn": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yn/-/yn-2.0.0.tgz", + "integrity": "sha1-5a2ryKz0CPY4X8dklWhMiOavaJo=", + "dev": true, + "optional": true + }, + "zip-stream": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-4.1.0.tgz", + "integrity": "sha512-zshzwQW7gG7hjpBlgeQP9RuyPGNxvJdzR8SUM3QhxCnLjWN2E7j3dOvpeDcQoETfHx0urRS7EtmVToql7YpU4A==", + "dev": true, + "requires": { + "archiver-utils": "^2.1.0", + "compress-commons": "^4.1.0", + "readable-stream": "^3.6.0" + } + } + } +} diff --git a/V2/package.json b/V2/package.json new file mode 100644 index 00000000..e77bd582 --- /dev/null +++ b/V2/package.json @@ -0,0 +1,17 @@ +{ + "name": "time-tracker-backend", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1", + "offline": "serverless offline", + "deploy": "serverless deploy" + }, + "author": "", + "license": "ISC", + "devDependencies": { + "serverless": "^2.64.1", + "serverless-azure-functions": "1.0.2-22" + } +} diff --git a/V2/requirements.txt b/V2/requirements.txt new file mode 100644 index 00000000..41acba1d --- /dev/null +++ b/V2/requirements.txt @@ -0,0 +1,12 @@ +#Azure +azure.functions +azure-functions-worker + +# Tests +pytest + +# Mocking +pytest-mock + +# To create sample content in tests and API documentation +Faker==4.0.2 \ No newline at end of file diff --git a/V2/serverless.yml b/V2/serverless.yml new file mode 100644 index 00000000..16914dc4 --- /dev/null +++ b/V2/serverless.yml @@ -0,0 +1,48 @@ +service: azure-time-tracker + +frameworkVersion: "2" + +provider: + name: azure + region: westus2 + runtime: python3.8 + os: linux + resourceGroup: time-tracker-wus2-dev-nodejs-functions-rg + stage: dev + functionApp: + name: sls-time-tracker + + appInsights: + name: sls-time-tracker-ai + + storageAccount: + name: slstimetracker + +plugins: + - serverless-azure-functions + +package: + patterns: + - "!env/**" + - "!.env/**" + - "!local.settings.json" + - "!.vscode/**" + - "!__pycache__/**" + - "!node_modules/**" + - "!.python_packages/**" + - "!.funcignore" + - "!package.json" + - "!package-lock.json" + - "!.gitignore" + - "!.git/**" + +functions: + get_activities: + handler: time_entries/interface.get_activities + events: + - http: true + x-azure-settings: + methods: + - GET + route: activities/{id:?} + authLevel: anonymous diff --git a/V2/source/entry_points/flask_api/__init__.py b/V2/source/entry_points/flask_api/__init__.py deleted file mode 100644 index 65fbcb89..00000000 --- a/V2/source/entry_points/flask_api/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -from flask import Flask -from flask_wtf.csrf import CSRFProtect -from flask_restplus import Namespace, Resource, Api -from http import HTTPStatus -from . import activities_endpoints - -csrf = CSRFProtect() - - -def create_app(test_config=None): - app = Flask(__name__) - csrf.init_app(app) - - api = Api( - app, - version='1.0', - title='Time Tracker API', - description='API for the TimeTracker project', - ) - - if test_config is not None: - app.config.from_mapping(test_config) - - activities_namespace = Namespace('activities', description='Endpoint for activities') - activities_namespace.route('/')(activities_endpoints.Activities) - activities_namespace.route('/')(activities_endpoints.Activity) - - api.add_namespace(activities_namespace) - - return app diff --git a/V2/source/entry_points/flask_api/activities_endpoints.py b/V2/source/entry_points/flask_api/activities_endpoints.py deleted file mode 100644 index 3dce2a6a..00000000 --- a/V2/source/entry_points/flask_api/activities_endpoints.py +++ /dev/null @@ -1,31 +0,0 @@ -from V2.source.daos.activities_json_dao import ActivitiesJsonDao -from V2.source.services.activity_service import ActivityService -from V2.source import use_cases -from flask_restplus import Resource -from http import HTTPStatus - -JSON_PATH = './V2/source/activities_data.json' - - -class Activities(Resource): - def get(self): - activities = use_cases.GetActivitiesUseCase( - create_activity_service(JSON_PATH) - ) - return [activity.__dict__ for activity in activities.get_activities()] - - -class Activity(Resource): - def get(self, activity_id: str): - try: - activity = use_cases.GetActivityUseCase( - create_activity_service(JSON_PATH) - ) - return activity.get_activity_by_id(activity_id).__dict__ - except AttributeError: - return {'message': 'Activity not found'}, HTTPStatus.NOT_FOUND - - -def create_activity_service(path: str): - activity_json = ActivitiesJsonDao(path) - return ActivityService(activity_json) diff --git a/V2/tests/api/api_fixtures.py b/V2/tests/api/api_fixtures.py new file mode 100644 index 00000000..21b58021 --- /dev/null +++ b/V2/tests/api/api_fixtures.py @@ -0,0 +1,41 @@ +import json +import pytest +import shutil + + +@pytest.fixture +def create_temp_activities(tmpdir_factory): + temporary_directory = tmpdir_factory.mktemp("tmp") + json_file = temporary_directory.join("activities.json") + activities = [ + { + 'id': 'c61a4a49-3364-49a3-a7f7-0c5f2d15072b', + 'name': 'Development', + 'description': 'Development', + 'deleted': 'b4327ba6-9f96-49ee-a9ac-3c1edf525172', + 'status': 'active', + 'tenant_id': 'cc925a5d-9644-4a4f-8d99-0bee49aadd05', + }, + { + 'id': '94ec92e2-a500-4700-a9f6-e41eb7b5507c', + 'name': 'Management', + 'description': 'Description of management', + 'deleted': '7cf6efe5-a221-4fe4-b94f-8945127a489a', + 'status': 'active', + 'tenant_id': 'cc925a5d-9644-4a4f-8d99-0bee49aadd05', + }, + { + 'id': 'd45c770a-b1a0-4bd8-a713-22c01a23e41b', + 'name': 'Operations', + 'description': 'Operation activities performed.', + 'deleted': '7cf6efe5-a221-4fe4-b94f-8945127a489a', + 'status': 'active', + 'tenant_id': 'cc925a5d-9644-4a4f-8d99-0bee49aadd05', + }, + ] + + with open(json_file, 'w') as outfile: + json.dump(activities, outfile) + + yield activities, json_file + shutil.rmtree(temporary_directory) diff --git a/V2/tests/api/azure/activity_azure_endpoints_test.py b/V2/tests/api/azure/activity_azure_endpoints_test.py new file mode 100644 index 00000000..99b74020 --- /dev/null +++ b/V2/tests/api/azure/activity_azure_endpoints_test.py @@ -0,0 +1,37 @@ +from time_entries._application._activities import _get_activities as activities +import azure.functions as func +import json +import typing + + +def test__activity_azure_endpoint__returns_all_activities( + create_temp_activities, +): + activities_json, tmp_directory = create_temp_activities + activities.JSON_PATH = tmp_directory + req = func.HttpRequest(method='GET', body=None, url='/api/activities') + + response = activities.get_activities(req) + activities_json_data = response.get_body().decode("utf-8") + + assert response.status_code == 200 + assert activities_json_data == json.dumps(activities_json) + + +def test__activity_azure_endpoint__returns_an_activity__when_activity_matches_its_id( + create_temp_activities, +): + activities_json, tmp_directory = create_temp_activities + activities.JSON_PATH = tmp_directory + req = func.HttpRequest( + method='GET', + body=None, + url='/api/activities/', + route_params={"id": activities_json[0]['id']}, + ) + + response = activities.get_activities(req) + activitiy_json_data = response.get_body().decode("utf-8") + + assert response.status_code == 200 + assert activitiy_json_data == json.dumps(activities_json[0]) diff --git a/V2/tests/api/flask/activity_endpoints_test.py b/V2/tests/api/flask/activity_endpoints_test.py deleted file mode 100644 index 9ead6c98..00000000 --- a/V2/tests/api/flask/activity_endpoints_test.py +++ /dev/null @@ -1,86 +0,0 @@ -from V2.source.entry_points.flask_api import create_app -import json -import pytest -import typing -from flask.testing import FlaskClient -from http import HTTPStatus -from faker import Faker -import shutil - - -@pytest.fixture -def client(): - app = create_app({'TESTING': True}) - with app.test_client() as client: - yield client - - -@pytest.fixture -def activities_json(tmpdir_factory): - temporary_directory = tmpdir_factory.mktemp("tmp") - json_file = temporary_directory.join("activities.json") - activities = [ - { - 'id': 'c61a4a49-3364-49a3-a7f7-0c5f2d15072b', - 'name': 'Development', - 'description': 'Development', - 'deleted': 'b4327ba6-9f96-49ee-a9ac-3c1edf525172', - 'status': None, - 'tenant_id': 'cc925a5d-9644-4a4f-8d99-0bee49aadd05', - }, - { - 'id': '94ec92e2-a500-4700-a9f6-e41eb7b5507c', - 'name': 'Management', - 'description': None, - 'deleted': '7cf6efe5-a221-4fe4-b94f-8945127a489a', - 'status': None, - 'tenant_id': 'cc925a5d-9644-4a4f-8d99-0bee49aadd05', - }, - { - 'id': 'd45c770a-b1a0-4bd8-a713-22c01a23e41b', - 'name': 'Operations', - 'description': 'Operation activities performed.', - 'deleted': '7cf6efe5-a221-4fe4-b94f-8945127a489a', - 'status': 'active', - 'tenant_id': 'cc925a5d-9644-4a4f-8d99-0bee49aadd05', - }, - ] - - with open(json_file, 'w') as outfile: - json.dump(activities, outfile) - - with open(json_file) as outfile: - activities_json = json.load(outfile) - - yield activities_json - shutil.rmtree(temporary_directory) - - -def test_test__activity_endpoint__returns_all_activities( - client: FlaskClient, activities_json: typing.List[dict] -): - response = client.get("/activities/") - json_data = json.loads(response.data) - - assert response.status_code == HTTPStatus.OK - assert json_data == activities_json - - -def test__activity_endpoint__returns_an_activity__when_activity_matches_its_id( - client: FlaskClient, activities_json: typing.List[dict] -): - response = client.get("/activities/%s" % activities_json[0]['id']) - json_data = json.loads(response.data) - - assert response.status_code == HTTPStatus.OK - assert json_data == activities_json[0] - - -def test__activity_endpoint__returns_a_not_found_status__when_no_activity_matches_its_id( - client: FlaskClient, -): - response = client.get("/activities/%s" % Faker().uuid4()) - json_data = json.loads(response.data) - - assert response.status_code == HTTPStatus.NOT_FOUND - assert json_data['message'] == 'Activity not found' diff --git a/V2/tests/conftest.py b/V2/tests/conftest.py new file mode 100644 index 00000000..59065296 --- /dev/null +++ b/V2/tests/conftest.py @@ -0,0 +1 @@ +from tests.api.api_fixtures import create_temp_activities diff --git a/V2/tests/integration/daos/activities_json_dao_test.py b/V2/tests/integration/daos/activities_json_dao_test.py index d4f87b96..c24c8718 100644 --- a/V2/tests/integration/daos/activities_json_dao_test.py +++ b/V2/tests/integration/daos/activities_json_dao_test.py @@ -1,5 +1,5 @@ -from V2.source.daos.activities_json_dao import ActivitiesJsonDao -from V2.source.dtos.activity import Activity +from time_entries._infrastructure import ActivitiesJsonDao +from time_entries._domain import Activity from faker import Faker import json import pytest diff --git a/V2/tests/unit/entry_points/flask/activity_class_endpoint_test.py b/V2/tests/unit/entry_points/flask/activity_class_endpoint_test.py deleted file mode 100644 index 1ed41eeb..00000000 --- a/V2/tests/unit/entry_points/flask/activity_class_endpoint_test.py +++ /dev/null @@ -1,55 +0,0 @@ -from V2.source.entry_points.flask_api.activities_endpoints import ( - Activities, - Activity, -) -from V2.source import use_cases -from V2.source.dtos.activity import Activity as ActivityDTO -from pytest_mock import MockFixture -from faker import Faker -from werkzeug.exceptions import NotFound - -fake = Faker() - -valid_id = fake.uuid4() - -fake_activity = { - "name": fake.company(), - "description": fake.paragraph(), - "tenant_id": fake.uuid4(), - "id": valid_id, - "deleted": fake.date(), - "status": fake.boolean(), -} -fake_activity_dto = ActivityDTO(**fake_activity) - - -def test__activities_class__uses_the_get_activities_use_case__to_retrieve_activities( - mocker: MockFixture, -): - mocker.patch.object( - use_cases.GetActivitiesUseCase, - 'get_activities', - return_value=[], - ) - - activities_class_endpoint = Activities() - activities = activities_class_endpoint.get() - - assert use_cases.GetActivitiesUseCase.get_activities.called - assert [] == activities - - -def test__activity_class__uses_the_get_activity_by_id_use_case__to_retrieve__an_activity( - mocker: MockFixture, -): - mocker.patch.object( - use_cases.GetActivityUseCase, - 'get_activity_by_id', - return_value=fake_activity_dto, - ) - - activity_class_endpoint = Activity() - activity = activity_class_endpoint.get(valid_id) - - assert use_cases.GetActivityUseCase.get_activity_by_id.called - assert fake_activity == activity diff --git a/V2/tests/unit/services/activity_service_test.py b/V2/tests/unit/services/activity_service_test.py index e2e62b04..e21d09e5 100644 --- a/V2/tests/unit/services/activity_service_test.py +++ b/V2/tests/unit/services/activity_service_test.py @@ -1,4 +1,4 @@ -from V2.source.services.activity_service import ActivityService +from time_entries._domain import ActivityService from faker import Faker diff --git a/V2/tests/unit/use_cases/activities_use_case_test.py b/V2/tests/unit/use_cases/activities_use_case_test.py index 3cb5b664..a74cd862 100644 --- a/V2/tests/unit/use_cases/activities_use_case_test.py +++ b/V2/tests/unit/use_cases/activities_use_case_test.py @@ -1,5 +1,5 @@ -from V2.source.services.activity_service import ActivityService -from V2.source import use_cases +from time_entries._domain import ActivityService +from time_entries._domain import _use_cases from pytest_mock import MockFixture from faker import Faker @@ -14,7 +14,7 @@ def test__get_list_activities_function__uses_the_activities_service__to_retrieve get_all=mocker.Mock(return_value=expected_activities) ) - activities_use_case = use_cases.GetActivitiesUseCase(activity_service) + activities_use_case = _use_cases.GetActivitiesUseCase(activity_service) actual_activities = activities_use_case.get_activities() assert activity_service.get_all.called @@ -29,7 +29,7 @@ def test__get_activity_by_id_function__uses_the_activities_service__to_retrieve_ get_by_id=mocker.Mock(return_value=expected_activity) ) - activity_use_case = use_cases.GetActivityUseCase(activity_service) + activity_use_case = _use_cases.GetActivityUseCase(activity_service) actual_activity = activity_use_case.get_activity_by_id(fake.uuid4()) assert activity_service.get_by_id.called diff --git a/V2/time_entries/_application/__init__.py b/V2/time_entries/_application/__init__.py new file mode 100644 index 00000000..16b3ae9e --- /dev/null +++ b/V2/time_entries/_application/__init__.py @@ -0,0 +1 @@ +from ._activities import get_activities diff --git a/V2/time_entries/_application/_activities/__init__.py b/V2/time_entries/_application/_activities/__init__.py new file mode 100644 index 00000000..c2a4a84c --- /dev/null +++ b/V2/time_entries/_application/_activities/__init__.py @@ -0,0 +1 @@ +from ._get_activities import get_activities diff --git a/V2/time_entries/_application/_activities/_get_activities.py b/V2/time_entries/_application/_activities/_get_activities.py new file mode 100644 index 00000000..fc929e77 --- /dev/null +++ b/V2/time_entries/_application/_activities/_get_activities.py @@ -0,0 +1,55 @@ +from time_entries._infrastructure import ActivitiesJsonDao +from time_entries._domain import ActivityService, _use_cases + +import azure.functions as func +import json +import logging + +JSON_PATH = ( + 'time_entries/_infrastructure/_data_persistence/activities_data.json' +) + + +def get_activities(req: func.HttpRequest) -> func.HttpResponse: + logging.info( + 'Python HTTP trigger function processed a request to get an activity.' + ) + activity_id = req.route_params.get('id') + status_code = 200 + + if activity_id: + response = _get_by_id(activity_id) + if response == b'Not Found': + status_code = 404 + else: + response = _get_all() + + return func.HttpResponse( + body=response, status_code=status_code, mimetype="application/json" + ) + + +def _get_by_id(activity_id: str) -> str: + activity_use_case = _use_cases.GetActivityUseCase( + _create_activity_service(JSON_PATH) + ) + activity = activity_use_case.get_activity_by_id(activity_id) + + return json.dumps(activity.__dict__) if activity else b'Not Found' + + +def _get_all() -> str: + activities_use_case = _use_cases.GetActivitiesUseCase( + _create_activity_service(JSON_PATH) + ) + return json.dumps( + [ + activity.__dict__ + for activity in activities_use_case.get_activities() + ] + ) + + +def _create_activity_service(path: str): + activity_json = ActivitiesJsonDao(path) + return ActivityService(activity_json) diff --git a/V2/time_entries/_domain/__init__.py b/V2/time_entries/_domain/__init__.py new file mode 100644 index 00000000..69cc80f9 --- /dev/null +++ b/V2/time_entries/_domain/__init__.py @@ -0,0 +1,4 @@ +from ._entities import Activity +from ._persistence_contracts import ActivitiesDao +from ._services import ActivityService +from ._use_cases import GetActivitiesUseCase, GetActivityUseCase diff --git a/V2/time_entries/_domain/_entities/__init__.py b/V2/time_entries/_domain/_entities/__init__.py new file mode 100644 index 00000000..bf3eb08e --- /dev/null +++ b/V2/time_entries/_domain/_entities/__init__.py @@ -0,0 +1 @@ +from ._activity import Activity diff --git a/V2/source/dtos/activity.py b/V2/time_entries/_domain/_entities/_activity.py similarity index 100% rename from V2/source/dtos/activity.py rename to V2/time_entries/_domain/_entities/_activity.py diff --git a/V2/time_entries/_domain/_persistence_contracts/__init__.py b/V2/time_entries/_domain/_persistence_contracts/__init__.py new file mode 100644 index 00000000..3495445e --- /dev/null +++ b/V2/time_entries/_domain/_persistence_contracts/__init__.py @@ -0,0 +1 @@ +from ._activities_dao import ActivitiesDao diff --git a/V2/source/daos/activities_dao.py b/V2/time_entries/_domain/_persistence_contracts/_activities_dao.py similarity index 83% rename from V2/source/daos/activities_dao.py rename to V2/time_entries/_domain/_persistence_contracts/_activities_dao.py index 11cfb0f9..4c19fb42 100644 --- a/V2/source/daos/activities_dao.py +++ b/V2/time_entries/_domain/_persistence_contracts/_activities_dao.py @@ -1,4 +1,4 @@ -from V2.source.dtos.activity import Activity +from time_entries._domain import Activity import abc import typing diff --git a/V2/time_entries/_domain/_services/__init__.py b/V2/time_entries/_domain/_services/__init__.py new file mode 100644 index 00000000..1a9befa8 --- /dev/null +++ b/V2/time_entries/_domain/_services/__init__.py @@ -0,0 +1 @@ +from ._activity import ActivityService diff --git a/V2/source/services/activity_service.py b/V2/time_entries/_domain/_services/_activity.py similarity index 77% rename from V2/source/services/activity_service.py rename to V2/time_entries/_domain/_services/_activity.py index fdba3390..104a150c 100644 --- a/V2/source/services/activity_service.py +++ b/V2/time_entries/_domain/_services/_activity.py @@ -1,5 +1,4 @@ -from V2.source.daos.activities_dao import ActivitiesDao -from V2.source.dtos.activity import Activity +from time_entries._domain import ActivitiesDao, Activity import typing diff --git a/V2/source/use_cases/__init__.py b/V2/time_entries/_domain/_use_cases/__init__.py similarity index 100% rename from V2/source/use_cases/__init__.py rename to V2/time_entries/_domain/_use_cases/__init__.py diff --git a/V2/source/use_cases/_get_activities_use_case.py b/V2/time_entries/_domain/_use_cases/_get_activities_use_case.py similarity index 70% rename from V2/source/use_cases/_get_activities_use_case.py rename to V2/time_entries/_domain/_use_cases/_get_activities_use_case.py index 16bd937b..1262ff14 100644 --- a/V2/source/use_cases/_get_activities_use_case.py +++ b/V2/time_entries/_domain/_use_cases/_get_activities_use_case.py @@ -1,5 +1,4 @@ -from V2.source.services.activity_service import ActivityService -from V2.source.dtos.activity import Activity +from time_entries._domain import ActivityService, Activity import typing diff --git a/V2/source/use_cases/_get_activity_by_id_use_case.py b/V2/time_entries/_domain/_use_cases/_get_activity_by_id_use_case.py similarity index 69% rename from V2/source/use_cases/_get_activity_by_id_use_case.py rename to V2/time_entries/_domain/_use_cases/_get_activity_by_id_use_case.py index 3f63b9df..65ce104f 100644 --- a/V2/source/use_cases/_get_activity_by_id_use_case.py +++ b/V2/time_entries/_domain/_use_cases/_get_activity_by_id_use_case.py @@ -1,5 +1,4 @@ -from V2.source.services.activity_service import ActivityService -from V2.source.dtos.activity import Activity +from time_entries._domain import ActivityService, Activity class GetActivityUseCase: diff --git a/V2/time_entries/_infrastructure/__init__.py b/V2/time_entries/_infrastructure/__init__.py new file mode 100644 index 00000000..df144da6 --- /dev/null +++ b/V2/time_entries/_infrastructure/__init__.py @@ -0,0 +1 @@ +from ._data_persistence import ActivitiesJsonDao diff --git a/V2/time_entries/_infrastructure/_data_persistence/__init__.py b/V2/time_entries/_infrastructure/_data_persistence/__init__.py new file mode 100644 index 00000000..802f35f4 --- /dev/null +++ b/V2/time_entries/_infrastructure/_data_persistence/__init__.py @@ -0,0 +1 @@ +from ._activities_json_dao import ActivitiesJsonDao diff --git a/V2/source/daos/activities_json_dao.py b/V2/time_entries/_infrastructure/_data_persistence/_activities_json_dao.py similarity index 92% rename from V2/source/daos/activities_json_dao.py rename to V2/time_entries/_infrastructure/_data_persistence/_activities_json_dao.py index c86e2ec0..b6fa9010 100644 --- a/V2/source/daos/activities_json_dao.py +++ b/V2/time_entries/_infrastructure/_data_persistence/_activities_json_dao.py @@ -1,5 +1,4 @@ -from V2.source.daos.activities_dao import ActivitiesDao -from V2.source.dtos.activity import Activity +from time_entries._domain import ActivitiesDao, Activity import dataclasses import json import typing diff --git a/V2/source/activities_data.json b/V2/time_entries/_infrastructure/_data_persistence/activities_data.json similarity index 100% rename from V2/source/activities_data.json rename to V2/time_entries/_infrastructure/_data_persistence/activities_data.json diff --git a/V2/time_entries/interface.py b/V2/time_entries/interface.py new file mode 100644 index 00000000..d82d680e --- /dev/null +++ b/V2/time_entries/interface.py @@ -0,0 +1 @@ +from ._application import get_activities diff --git a/requirements/time_tracker_api/prod.txt b/requirements/time_tracker_api/prod.txt index cba1f715..6fd17f94 100644 --- a/requirements/time_tracker_api/prod.txt +++ b/requirements/time_tracker_api/prod.txt @@ -38,6 +38,8 @@ PyJWT==1.7.1 #Azure msal==1.3.0 +azure-functions==1.7.2 +azure-functions-worker==1.1.9 # Time utils pytz==2019.3 From 6ba8320c6ddd6599679dfbbbaf9ac1dba9addb8d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Josu=C3=A9=20Ricardo=20Cando=20Obaco?= Date: Tue, 26 Oct 2021 16:16:04 -0500 Subject: [PATCH 26/74] feat: TT-367 V2 - Delete Activity (#330) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: TT-356 Read activities with an azure endpoint * refactor: TT-356 Solving code smells from Sonarcloud * refactor: TT-356 change directory from files in source to azure_time_tracker * test: TT-356 Adding azure endpoint api test * feat: TT-358 Use serverless to create Azure endpoint * refactor: TT-358 Changing time tracker backend app skeleton * refactor: TT-358 Change name to the domain partitioning * refactor: TT-358 Change route of activities data json file for azure functions * refactor: TT-358 Change folder structure according to new app skeleton * feat: TT-358 Add Makefile to install time tracker backend * refactor: TT-358 Change api test to use create temp activities fixture * feat: TT-367 creation of the functionality to change the status of an activity * test: TT-367 unit test for activity service * test: TT-367 unit test for delete activity use case * test: TT-367 integration test for activities json dao * test: TT-367 api test for endpoint to delete an activity Co-authored-by: Andrés Soto --- V2/serverless.yml | 10 ++++++ .../azure/activity_azure_endpoints_test.py | 32 ++++++++++++++--- .../daos/activities_json_dao_test.py | 36 ++++++++++++++++++- .../unit/services/activity_service_test.py | 15 ++++++++ .../use_cases/activities_use_case_test.py | 19 ++++++++-- V2/time_entries/_application/__init__.py | 1 + .../_application/_activities/__init__.py | 1 + .../_activities/_delete_activity.py | 36 +++++++++++++++++++ .../_persistence_contracts/_activities_dao.py | 4 +++ .../_domain/_services/_activity.py | 3 ++ .../_domain/_use_cases/__init__.py | 1 + .../_use_cases/_delete_activity_use_case.py | 9 +++++ .../_data_persistence/_activities_json_dao.py | 26 ++++++++++++++ V2/time_entries/interface.py | 1 + 14 files changed, 186 insertions(+), 8 deletions(-) create mode 100644 V2/time_entries/_application/_activities/_delete_activity.py create mode 100644 V2/time_entries/_domain/_use_cases/_delete_activity_use_case.py diff --git a/V2/serverless.yml b/V2/serverless.yml index 16914dc4..5c08f749 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -46,3 +46,13 @@ functions: - GET route: activities/{id:?} authLevel: anonymous + + delete_activity: + handler: time_entries/interface.delete_activity + events: + - http: true + x-azure-settings: + methods: + - DELETE + route: activities/{id} + authLevel: anonymous \ No newline at end of file diff --git a/V2/tests/api/azure/activity_azure_endpoints_test.py b/V2/tests/api/azure/activity_azure_endpoints_test.py index 99b74020..824a52d4 100644 --- a/V2/tests/api/azure/activity_azure_endpoints_test.py +++ b/V2/tests/api/azure/activity_azure_endpoints_test.py @@ -1,4 +1,7 @@ -from time_entries._application._activities import _get_activities as activities +from time_entries._application._activities import ( + _get_activities, + _delete_activity, +) import azure.functions as func import json import typing @@ -8,10 +11,10 @@ def test__activity_azure_endpoint__returns_all_activities( create_temp_activities, ): activities_json, tmp_directory = create_temp_activities - activities.JSON_PATH = tmp_directory + _get_activities.JSON_PATH = tmp_directory req = func.HttpRequest(method='GET', body=None, url='/api/activities') - response = activities.get_activities(req) + response = _get_activities.get_activities(req) activities_json_data = response.get_body().decode("utf-8") assert response.status_code == 200 @@ -22,7 +25,7 @@ def test__activity_azure_endpoint__returns_an_activity__when_activity_matches_it create_temp_activities, ): activities_json, tmp_directory = create_temp_activities - activities.JSON_PATH = tmp_directory + _get_activities.JSON_PATH = tmp_directory req = func.HttpRequest( method='GET', body=None, @@ -30,8 +33,27 @@ def test__activity_azure_endpoint__returns_an_activity__when_activity_matches_it route_params={"id": activities_json[0]['id']}, ) - response = activities.get_activities(req) + response = _get_activities.get_activities(req) activitiy_json_data = response.get_body().decode("utf-8") assert response.status_code == 200 assert activitiy_json_data == json.dumps(activities_json[0]) + + +def test__activity_azure_endpoint__returns_an_activity_with_inactive_status__when_an_activity_matching_its_id_is_found( + create_temp_activities, +): + activities_json, tmp_directory = create_temp_activities + _delete_activity.JSON_PATH = tmp_directory + req = func.HttpRequest( + method='DELETE', + body=None, + url='/api/activities/', + route_params={"id": activities_json[0]['id']}, + ) + + response = _delete_activity.delete_activity(req) + activity_json_data = json.loads(response.get_body().decode("utf-8")) + + assert response.status_code == 200 + assert activity_json_data['status'] == 'inactive' diff --git a/V2/tests/integration/daos/activities_json_dao_test.py b/V2/tests/integration/daos/activities_json_dao_test.py index c24c8718..0022900a 100644 --- a/V2/tests/integration/daos/activities_json_dao_test.py +++ b/V2/tests/integration/daos/activities_json_dao_test.py @@ -47,7 +47,7 @@ def test__get_by_id__returns_none__when_no_activity_matches_its_id( result = activities_json_dao.get_by_id(Faker().uuid4()) - assert result == None + assert result is None def test__get_all__returns_a_list_of_activity_dto_objects__when_one_or_more_activities_are_found( @@ -83,3 +83,37 @@ def test_get_all__returns_an_empty_list__when_doesnt_found_any_activities( result = activities_json_dao.get_all() assert result == activities + + +def test_delete__returns_an_activity_with_inactive_status__when_an_activity_matching_its_id_is_found( + create_fake_activities, +): + activities_json_dao = ActivitiesJsonDao(Faker().file_path()) + activities = create_fake_activities( + [ + { + "name": "test_name", + "description": "test_description", + "tenant_id": "test_tenant_id", + "id": "test_id", + "deleted": "test_deleted", + "status": "test_status", + } + ] + ) + + activity_dto = activities.pop() + result = activities_json_dao.delete(activity_dto.id) + + assert result.status == 'inactive' + + +def test_delete__returns_none__when_no_activity_matching_its_id_is_found( + create_fake_activities, +): + activities_json_dao = ActivitiesJsonDao(Faker().file_path()) + create_fake_activities([]) + + result = activities_json_dao.delete(Faker().uuid4()) + + assert result is None diff --git a/V2/tests/unit/services/activity_service_test.py b/V2/tests/unit/services/activity_service_test.py index e21d09e5..5a400b4e 100644 --- a/V2/tests/unit/services/activity_service_test.py +++ b/V2/tests/unit/services/activity_service_test.py @@ -26,3 +26,18 @@ def test__get_by_id__uses_the_activity_dao__to_retrieve_one_activity(mocker): assert activity_dao.get_by_id.called assert expected_activity == actual_activity + + +def test__delete_activity__uses_the_activity_dao__to_change_activity_status( + mocker, +): + expected_activity = mocker.Mock() + activity_dao = mocker.Mock( + delete=mocker.Mock(return_value=expected_activity) + ) + + activity_service = ActivityService(activity_dao) + deleted_activity = activity_service.delete(Faker().uuid4()) + + assert activity_dao.delete.called + assert expected_activity == deleted_activity diff --git a/V2/tests/unit/use_cases/activities_use_case_test.py b/V2/tests/unit/use_cases/activities_use_case_test.py index a74cd862..dfdfcf2b 100644 --- a/V2/tests/unit/use_cases/activities_use_case_test.py +++ b/V2/tests/unit/use_cases/activities_use_case_test.py @@ -6,7 +6,7 @@ fake = Faker() -def test__get_list_activities_function__uses_the_activities_service__to_retrieve_activities( +def test__get_list_activities_function__uses_the_activity_service__to_retrieve_activities( mocker: MockFixture, ): expected_activities = mocker.Mock() @@ -21,7 +21,7 @@ def test__get_list_activities_function__uses_the_activities_service__to_retrieve assert expected_activities == actual_activities -def test__get_activity_by_id_function__uses_the_activities_service__to_retrieve_activity( +def test__get_activity_by_id_function__uses_the_activity_service__to_retrieve_activity( mocker: MockFixture, ): expected_activity = mocker.Mock() @@ -34,3 +34,18 @@ def test__get_activity_by_id_function__uses_the_activities_service__to_retrieve_ assert activity_service.get_by_id.called assert expected_activity == actual_activity + + +def test__delete_activity_function__uses_the_activity_service__to_change_activity_status( + mocker: MockFixture, +): + expected_activity = mocker.Mock() + activity_service = mocker.Mock( + delete=mocker.Mock(return_value=expected_activity) + ) + + activity_use_case = _use_cases.DeleteActivityUseCase(activity_service) + deleted_activity = activity_use_case.delete_activity(fake.uuid4()) + + assert activity_service.delete.called + assert expected_activity == deleted_activity diff --git a/V2/time_entries/_application/__init__.py b/V2/time_entries/_application/__init__.py index 16b3ae9e..cb958a05 100644 --- a/V2/time_entries/_application/__init__.py +++ b/V2/time_entries/_application/__init__.py @@ -1 +1,2 @@ from ._activities import get_activities +from ._activities import delete_activity diff --git a/V2/time_entries/_application/_activities/__init__.py b/V2/time_entries/_application/_activities/__init__.py index c2a4a84c..e42c3a12 100644 --- a/V2/time_entries/_application/_activities/__init__.py +++ b/V2/time_entries/_application/_activities/__init__.py @@ -1 +1,2 @@ from ._get_activities import get_activities +from ._delete_activity import delete_activity diff --git a/V2/time_entries/_application/_activities/_delete_activity.py b/V2/time_entries/_application/_activities/_delete_activity.py new file mode 100644 index 00000000..896c8a92 --- /dev/null +++ b/V2/time_entries/_application/_activities/_delete_activity.py @@ -0,0 +1,36 @@ +from time_entries._infrastructure import ActivitiesJsonDao +from time_entries._domain import ActivityService, _use_cases + +import azure.functions as func +import json +import logging + +JSON_PATH = ( + 'time_entries/_infrastructure/_data_persistence/activities_data.json' +) + + +def delete_activity(req: func.HttpRequest) -> func.HttpResponse: + logging.info( + 'Python HTTP trigger function processed a request to delete an activity.' + ) + activity_id = req.route_params.get('id') + response = _delete(activity_id) + status_code = 200 if response != b'Not found' else 404 + + return func.HttpResponse( + body=response, status_code=status_code, mimetype="application/json" + ) + + +def _delete(activity_id: str) -> str: + activity_use_case = _use_cases.DeleteActivityUseCase( + _create_activity_service(JSON_PATH) + ) + activity = activity_use_case.delete_activity(activity_id) + return json.dumps(activity.__dict__) if activity else b'Not found' + + +def _create_activity_service(path: str): + activity_json = ActivitiesJsonDao(path) + return ActivityService(activity_json) diff --git a/V2/time_entries/_domain/_persistence_contracts/_activities_dao.py b/V2/time_entries/_domain/_persistence_contracts/_activities_dao.py index 4c19fb42..09de92c0 100644 --- a/V2/time_entries/_domain/_persistence_contracts/_activities_dao.py +++ b/V2/time_entries/_domain/_persistence_contracts/_activities_dao.py @@ -11,3 +11,7 @@ def get_by_id(self, id: str) -> Activity: @abc.abstractmethod def get_all(self) -> typing.List[Activity]: pass + + @abc.abstractmethod + def delete(self, id: str) -> Activity: + pass diff --git a/V2/time_entries/_domain/_services/_activity.py b/V2/time_entries/_domain/_services/_activity.py index 104a150c..b2294d9a 100644 --- a/V2/time_entries/_domain/_services/_activity.py +++ b/V2/time_entries/_domain/_services/_activity.py @@ -11,3 +11,6 @@ def get_by_id(self, activity_id: str) -> Activity: def get_all(self) -> typing.List[Activity]: return self.activities_dao.get_all() + + def delete(self, activity_id: str) -> Activity: + return self.activities_dao.delete(activity_id) diff --git a/V2/time_entries/_domain/_use_cases/__init__.py b/V2/time_entries/_domain/_use_cases/__init__.py index a937b03d..476930d0 100644 --- a/V2/time_entries/_domain/_use_cases/__init__.py +++ b/V2/time_entries/_domain/_use_cases/__init__.py @@ -1,2 +1,3 @@ from ._get_activities_use_case import GetActivitiesUseCase from ._get_activity_by_id_use_case import GetActivityUseCase +from ._delete_activity_use_case import DeleteActivityUseCase diff --git a/V2/time_entries/_domain/_use_cases/_delete_activity_use_case.py b/V2/time_entries/_domain/_use_cases/_delete_activity_use_case.py new file mode 100644 index 00000000..e13acaa5 --- /dev/null +++ b/V2/time_entries/_domain/_use_cases/_delete_activity_use_case.py @@ -0,0 +1,9 @@ +from time_entries._domain import ActivityService, Activity + + +class DeleteActivityUseCase: + def __init__(self, activity_service: ActivityService): + self.activity_service = activity_service + + def delete_activity(self, id: str) -> Activity: + return self.activity_service.delete(id) diff --git a/V2/time_entries/_infrastructure/_data_persistence/_activities_json_dao.py b/V2/time_entries/_infrastructure/_data_persistence/_activities_json_dao.py index b6fa9010..caa89179 100644 --- a/V2/time_entries/_infrastructure/_data_persistence/_activities_json_dao.py +++ b/V2/time_entries/_infrastructure/_data_persistence/_activities_json_dao.py @@ -25,6 +25,32 @@ def get_all(self) -> typing.List[Activity]: for activity in self.__get_activities_from_file() ] + def delete(self, activity_id: str) -> Activity: + activity = self.get_by_id(activity_id) + if activity: + activity_deleted = {**activity.__dict__, 'status': 'inactive'} + activities_updated = list( + map( + lambda activity: activity + if activity.get('id') != activity_id + else activity_deleted, + self.__get_activities_from_file(), + ) + ) + + try: + file = open(self.json_data_file_path, 'w') + json.dump(activities_updated, file) + file.close() + + return self.__create_activity_dto(activity_deleted) + + except FileNotFoundError: + return None + + else: + return None + def __get_activities_from_file(self) -> typing.List[dict]: try: file = open(self.json_data_file_path) diff --git a/V2/time_entries/interface.py b/V2/time_entries/interface.py index d82d680e..f1500529 100644 --- a/V2/time_entries/interface.py +++ b/V2/time_entries/interface.py @@ -1 +1,2 @@ from ._application import get_activities +from ._application import delete_activity From 22de108ef0bd7de0f130a697e5068cad308f76e2 Mon Sep 17 00:00:00 2001 From: Sandro Castillo Date: Wed, 27 Oct 2021 10:47:12 -0500 Subject: [PATCH 27/74] fix: TT-385 adding library Flask-SQLAlchemy (#332) --- requirements/time_tracker_api/dev.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements/time_tracker_api/dev.txt b/requirements/time_tracker_api/dev.txt index 302acb78..2e5aee81 100644 --- a/requirements/time_tracker_api/dev.txt +++ b/requirements/time_tracker_api/dev.txt @@ -7,6 +7,7 @@ # Tests pytest==5.2.0 +Flask_sqlalchemy # Mocking pytest-mock==2.0.0 From abec3f42adaf0c7a2916ba4b0ff8942f410092e8 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Wed, 27 Oct 2021 16:09:56 +0000 Subject: [PATCH 28/74] 0.40.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 13 +++++++++++++ time_tracker_api/version.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1aec3228..6b29b069 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,19 @@ +## v0.40.0 (2021-10-27) +### Feature +* TT-367 V2 - Delete Activity ([#330](https://github.com/ioet/time-tracker-backend/issues/330)) ([`6ba8320`](https://github.com/ioet/time-tracker-backend/commit/6ba8320c6ddd6599679dfbbbaf9ac1dba9addb8d)) +* TT-358 Use serverless to create Azure endpoint ([#328](https://github.com/ioet/time-tracker-backend/issues/328)) ([`464f281`](https://github.com/ioet/time-tracker-backend/commit/464f28193d986f12ccea6c785eee1f818b5989fb)) + +### Fix +* TT-385 adding library Flask-SQLAlchemy ([#332](https://github.com/ioet/time-tracker-backend/issues/332)) ([`22de108`](https://github.com/ioet/time-tracker-backend/commit/22de108ef0bd7de0f130a697e5068cad308f76e2)) +* TT-001 commit to bring the changes to production ([#327](https://github.com/ioet/time-tracker-backend/issues/327)) ([`1d65c1d`](https://github.com/ioet/time-tracker-backend/commit/1d65c1d65c5a29bb6330dc8d52ae1bd5c38003be)) +* TT-335 patch to give admin permissions to certain users ([#323](https://github.com/ioet/time-tracker-backend/issues/323)) ([`c0b51c9`](https://github.com/ioet/time-tracker-backend/commit/c0b51c9b3127c7d231448e038a713fcc6126c093)) + +### Documentation +* TT-000 test 2 release ([#326](https://github.com/ioet/time-tracker-backend/issues/326)) ([`7294e2e`](https://github.com/ioet/time-tracker-backend/commit/7294e2e14641ee45f408c593e768cc7f2e07e742)) + ## v0.39.1 (2021-10-06) ### Fix * TT-339 skip users with azureioet.onmicrosoft.com extension from user search ([#322](https://github.com/ioet/time-tracker-backend/issues/322)) ([`8b37d4a`](https://github.com/ioet/time-tracker-backend/commit/8b37d4a7a890b9e4880efedd19dc733e60c5e7cf)) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index fd7ffa6b..eb9b6f12 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.39.1' +__version__ = '0.40.0' From 500a5d0261497ce9aa9a9040342fea94dbe70704 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20Soto?= <41339889+EdansRocks@users.noreply.github.com> Date: Thu, 28 Oct 2021 15:11:26 -0500 Subject: [PATCH 29/74] feat: TT-366 V2 - PUT update activity (#331) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: TT-366 Add update activities method * refactor: TT-366 Solving SonalCloud code smells Co-authored-by: Andrés Soto --- .gitignore | 4 -- V2/serverless.yml | 12 +++- .../azure/activity_azure_endpoints_test.py | 49 ++++++++++---- .../daos/activities_json_dao_test.py | 65 ++++++++++++------- .../unit/services/activity_service_test.py | 17 +++++ .../use_cases/activities_use_case_test.py | 17 +++++ V2/time_entries/_application/__init__.py | 1 + .../_application/_activities/__init__.py | 1 + .../_activities/_update_activity.py | 44 +++++++++++++ V2/time_entries/_domain/__init__.py | 6 +- .../_persistence_contracts/_activities_dao.py | 4 ++ .../_domain/_services/_activity.py | 3 + .../_domain/_use_cases/__init__.py | 1 + .../_use_cases/_update_activity_use_case.py | 11 ++++ .../_data_persistence/_activities_json_dao.py | 26 ++++++++ V2/time_entries/interface.py | 1 + 16 files changed, 218 insertions(+), 44 deletions(-) create mode 100644 V2/time_entries/_application/_activities/_update_activity.py create mode 100644 V2/time_entries/_domain/_use_cases/_update_activity_use_case.py diff --git a/.gitignore b/.gitignore index f754bfdf..a9edb626 100644 --- a/.gitignore +++ b/.gitignore @@ -29,10 +29,6 @@ node_modules # Serverless directories .serverless/ -# Azure Functions json config -host.json -local.settings.json - # Files generated for development .env timetracker-api-postman-collection.json diff --git a/V2/serverless.yml b/V2/serverless.yml index 5c08f749..223c8a33 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -55,4 +55,14 @@ functions: methods: - DELETE route: activities/{id} - authLevel: anonymous \ No newline at end of file + authLevel: anonymous + + update_activity: + handler: time_entries/interface.update_activity + events: + - http: true + x-azure-settings: + methods: + - PUT + route: activities/{id} + authLevel: anonymous diff --git a/V2/tests/api/azure/activity_azure_endpoints_test.py b/V2/tests/api/azure/activity_azure_endpoints_test.py index 824a52d4..ef5ba3b0 100644 --- a/V2/tests/api/azure/activity_azure_endpoints_test.py +++ b/V2/tests/api/azure/activity_azure_endpoints_test.py @@ -1,20 +1,22 @@ -from time_entries._application._activities import ( - _get_activities, - _delete_activity, -) +from time_entries._application import _activities as activities +from faker import Faker + import azure.functions as func import json import typing +ACTIVITY_URL = '/api/activities/' + + def test__activity_azure_endpoint__returns_all_activities( create_temp_activities, ): activities_json, tmp_directory = create_temp_activities - _get_activities.JSON_PATH = tmp_directory - req = func.HttpRequest(method='GET', body=None, url='/api/activities') + activities._get_activities.JSON_PATH = tmp_directory + req = func.HttpRequest(method='GET', body=None, url=ACTIVITY_URL) - response = _get_activities.get_activities(req) + response = activities.get_activities(req) activities_json_data = response.get_body().decode("utf-8") assert response.status_code == 200 @@ -25,15 +27,15 @@ def test__activity_azure_endpoint__returns_an_activity__when_activity_matches_it create_temp_activities, ): activities_json, tmp_directory = create_temp_activities - _get_activities.JSON_PATH = tmp_directory + activities._get_activities.JSON_PATH = tmp_directory req = func.HttpRequest( method='GET', body=None, - url='/api/activities/', + url=ACTIVITY_URL, route_params={"id": activities_json[0]['id']}, ) - response = _get_activities.get_activities(req) + response = activities.get_activities(req) activitiy_json_data = response.get_body().decode("utf-8") assert response.status_code == 200 @@ -44,16 +46,37 @@ def test__activity_azure_endpoint__returns_an_activity_with_inactive_status__whe create_temp_activities, ): activities_json, tmp_directory = create_temp_activities - _delete_activity.JSON_PATH = tmp_directory + activities._delete_activity.JSON_PATH = tmp_directory req = func.HttpRequest( method='DELETE', body=None, - url='/api/activities/', + url=ACTIVITY_URL, route_params={"id": activities_json[0]['id']}, ) - response = _delete_activity.delete_activity(req) + response = activities.delete_activity(req) activity_json_data = json.loads(response.get_body().decode("utf-8")) assert response.status_code == 200 assert activity_json_data['status'] == 'inactive' + + +def test__update_activity_azure_endpoint__returns_an_activity__when_found_an_activity_to_update( + create_temp_activities, +): + activities_json, tmp_directory = create_temp_activities + activities._update_activity.JSON_PATH = tmp_directory + activity_data = {"description": Faker().sentence()} + req = func.HttpRequest( + method='PUT', + body=json.dumps(activity_data).encode("utf-8"), + url=ACTIVITY_URL, + route_params={"id": activities_json[0]['id']}, + ) + + response = activities.update_activity(req) + activitiy_json_data = response.get_body().decode("utf-8") + new_activity = {**activities_json[0], **activity_data} + + assert response.status_code == 200 + assert activitiy_json_data == json.dumps(new_activity) diff --git a/V2/tests/integration/daos/activities_json_dao_test.py b/V2/tests/integration/daos/activities_json_dao_test.py index 0022900a..00ce99cf 100644 --- a/V2/tests/integration/daos/activities_json_dao_test.py +++ b/V2/tests/integration/daos/activities_json_dao_test.py @@ -6,6 +6,18 @@ import typing +fake_activities = [ + { + 'id': Faker().uuid4(), + 'name': Faker().user_name(), + 'description': Faker().sentence(), + 'deleted': Faker().uuid4(), + 'status': 'active', + 'tenant_id': Faker().uuid4(), + } +] + + @pytest.fixture(name='create_fake_activities') def _create_fake_activities(mocker) -> typing.List[Activity]: def _creator(activities): @@ -20,18 +32,7 @@ def test_get_by_id__returns_an_activity_dto__when_found_one_activity_that_matche create_fake_activities, ): activities_json_dao = ActivitiesJsonDao(Faker().file_path()) - activities = create_fake_activities( - [ - { - "name": "test_name", - "description": "test_description", - "tenant_id": "test_tenant_id", - "id": "test_id", - "deleted": "test_deleted", - "status": "test_status", - } - ] - ) + activities = create_fake_activities(fake_activities) activity_dto = activities.pop() result = activities_json_dao.get_by_id(activity_dto.id) @@ -55,19 +56,7 @@ def test__get_all__returns_a_list_of_activity_dto_objects__when_one_or_more_acti ): activities_json_dao = ActivitiesJsonDao(Faker().file_path()) number_of_activities = 3 - activities = create_fake_activities( - [ - { - "name": "test_name", - "description": "test_description", - "tenant_id": "test_tenant_id", - "id": "test_id", - "deleted": "test_deleted", - "status": "test_status", - } - ] - * number_of_activities - ) + activities = create_fake_activities(fake_activities * number_of_activities) result = activities_json_dao.get_all() @@ -117,3 +106,29 @@ def test_delete__returns_none__when_no_activity_matching_its_id_is_found( result = activities_json_dao.delete(Faker().uuid4()) assert result is None + + +def test_update__returns_an_activity_dto__when_found_one_activity_to_update( + create_fake_activities, +): + activities_json_dao = ActivitiesJsonDao(Faker().file_path()) + activities = create_fake_activities(fake_activities) + activity_dto = activities.pop() + activity_data = {"description": Faker().sentence()} + + result = activities_json_dao.update(activity_dto.id, activity_data) + new_activity = {**activity_dto.__dict__, **activity_data} + + assert result == Activity(**new_activity) + + +def test_update__returns_none__when_doesnt_found_one_activity_to_update( + create_fake_activities, +): + activities_json_dao = ActivitiesJsonDao(Faker().file_path()) + create_fake_activities([]) + activity_data = {"description": Faker().sentence()} + + result = activities_json_dao.update('', activity_data) + + assert result == None diff --git a/V2/tests/unit/services/activity_service_test.py b/V2/tests/unit/services/activity_service_test.py index 5a400b4e..772b3e15 100644 --- a/V2/tests/unit/services/activity_service_test.py +++ b/V2/tests/unit/services/activity_service_test.py @@ -41,3 +41,20 @@ def test__delete_activity__uses_the_activity_dao__to_change_activity_status( assert activity_dao.delete.called assert expected_activity == deleted_activity + + +def test__update_activity__uses_the_activity_dao__to_update_one_activity( + mocker, +): + expected_activity = mocker.Mock() + activity_dao = mocker.Mock( + update=mocker.Mock(return_value=expected_activity) + ) + activity_service = ActivityService(activity_dao) + + updated_activity = activity_service.update( + Faker().uuid4(), Faker().pydict() + ) + + assert activity_dao.update.called + assert expected_activity == updated_activity diff --git a/V2/tests/unit/use_cases/activities_use_case_test.py b/V2/tests/unit/use_cases/activities_use_case_test.py index dfdfcf2b..f3e9a38b 100644 --- a/V2/tests/unit/use_cases/activities_use_case_test.py +++ b/V2/tests/unit/use_cases/activities_use_case_test.py @@ -49,3 +49,20 @@ def test__delete_activity_function__uses_the_activity_service__to_change_activit assert activity_service.delete.called assert expected_activity == deleted_activity + + +def test__update_activity_function__uses_the_activities_service__to_update_an_activity( + mocker: MockFixture, +): + expected_activity = mocker.Mock() + activity_service = mocker.Mock( + update=mocker.Mock(return_value=expected_activity) + ) + + activity_use_case = _use_cases.UpdateActivityUseCase(activity_service) + updated_activity = activity_use_case.update_activity( + fake.uuid4(), fake.pydict() + ) + + assert activity_service.update.called + assert expected_activity == updated_activity diff --git a/V2/time_entries/_application/__init__.py b/V2/time_entries/_application/__init__.py index cb958a05..faa68527 100644 --- a/V2/time_entries/_application/__init__.py +++ b/V2/time_entries/_application/__init__.py @@ -1,2 +1,3 @@ from ._activities import get_activities from ._activities import delete_activity +from ._activities import update_activity diff --git a/V2/time_entries/_application/_activities/__init__.py b/V2/time_entries/_application/_activities/__init__.py index e42c3a12..3482a9c6 100644 --- a/V2/time_entries/_application/_activities/__init__.py +++ b/V2/time_entries/_application/_activities/__init__.py @@ -1,2 +1,3 @@ from ._get_activities import get_activities from ._delete_activity import delete_activity +from ._update_activity import update_activity diff --git a/V2/time_entries/_application/_activities/_update_activity.py b/V2/time_entries/_application/_activities/_update_activity.py new file mode 100644 index 00000000..2aff2881 --- /dev/null +++ b/V2/time_entries/_application/_activities/_update_activity.py @@ -0,0 +1,44 @@ +from time_entries._infrastructure import ActivitiesJsonDao +from time_entries._domain import ActivityService, Activity, _use_cases + +import azure.functions as func +import dataclasses +import json +import logging + +JSON_PATH = ( + 'time_entries/_infrastructure/_data_persistence/activities_data.json' +) + + +def update_activity(req: func.HttpRequest) -> func.HttpResponse: + logging.info( + 'Python HTTP trigger function processed a request to update an activity.' + ) + activity_id = req.route_params.get('id') + activity_data = req.get_json() if req.get_body() else {} + activity_keys = [field.name for field in dataclasses.fields(Activity)] + + if all(key in activity_keys for key in activity_data.keys()): + response = _update(activity_id, activity_data) + status_code = 200 + else: + response = b'Incorrect activity body' + status_code = 400 + + return func.HttpResponse( + body=response, status_code=status_code, mimetype="application/json" + ) + + +def _update(activity_id: str, activity_data: dict) -> str: + activity_use_case = _use_cases.UpdateActivityUseCase( + _create_activity_service(JSON_PATH) + ) + activity = activity_use_case.update_activity(activity_id, activity_data) + return json.dumps(activity.__dict__) if activity else b'Not Found' + + +def _create_activity_service(path: str): + activity_json = ActivitiesJsonDao(path) + return ActivityService(activity_json) diff --git a/V2/time_entries/_domain/__init__.py b/V2/time_entries/_domain/__init__.py index 69cc80f9..f1a97246 100644 --- a/V2/time_entries/_domain/__init__.py +++ b/V2/time_entries/_domain/__init__.py @@ -1,4 +1,8 @@ from ._entities import Activity from ._persistence_contracts import ActivitiesDao from ._services import ActivityService -from ._use_cases import GetActivitiesUseCase, GetActivityUseCase +from ._use_cases import ( + GetActivitiesUseCase, + GetActivityUseCase, + UpdateActivityUseCase, +) diff --git a/V2/time_entries/_domain/_persistence_contracts/_activities_dao.py b/V2/time_entries/_domain/_persistence_contracts/_activities_dao.py index 09de92c0..d2f9e4c7 100644 --- a/V2/time_entries/_domain/_persistence_contracts/_activities_dao.py +++ b/V2/time_entries/_domain/_persistence_contracts/_activities_dao.py @@ -15,3 +15,7 @@ def get_all(self) -> typing.List[Activity]: @abc.abstractmethod def delete(self, id: str) -> Activity: pass + + @abc.abstractmethod + def update(self, id: str, new_activity: dict) -> Activity: + pass diff --git a/V2/time_entries/_domain/_services/_activity.py b/V2/time_entries/_domain/_services/_activity.py index b2294d9a..f4be7836 100644 --- a/V2/time_entries/_domain/_services/_activity.py +++ b/V2/time_entries/_domain/_services/_activity.py @@ -14,3 +14,6 @@ def get_all(self) -> typing.List[Activity]: def delete(self, activity_id: str) -> Activity: return self.activities_dao.delete(activity_id) + + def update(self, activity_id: str, new_activity: dict) -> Activity: + return self.activities_dao.update(activity_id, new_activity) diff --git a/V2/time_entries/_domain/_use_cases/__init__.py b/V2/time_entries/_domain/_use_cases/__init__.py index 476930d0..64c9bb6b 100644 --- a/V2/time_entries/_domain/_use_cases/__init__.py +++ b/V2/time_entries/_domain/_use_cases/__init__.py @@ -1,3 +1,4 @@ from ._get_activities_use_case import GetActivitiesUseCase from ._get_activity_by_id_use_case import GetActivityUseCase from ._delete_activity_use_case import DeleteActivityUseCase +from ._update_activity_use_case import UpdateActivityUseCase diff --git a/V2/time_entries/_domain/_use_cases/_update_activity_use_case.py b/V2/time_entries/_domain/_use_cases/_update_activity_use_case.py new file mode 100644 index 00000000..ea0bc3c5 --- /dev/null +++ b/V2/time_entries/_domain/_use_cases/_update_activity_use_case.py @@ -0,0 +1,11 @@ +from time_entries._domain import ActivityService, Activity + + +class UpdateActivityUseCase: + def __init__(self, activity_service: ActivityService): + self.activity_service = activity_service + + def update_activity( + self, activity_id: str, new_activity: dict + ) -> Activity: + return self.activity_service.update(activity_id, new_activity) diff --git a/V2/time_entries/_infrastructure/_data_persistence/_activities_json_dao.py b/V2/time_entries/_infrastructure/_data_persistence/_activities_json_dao.py index caa89179..dfc41d04 100644 --- a/V2/time_entries/_infrastructure/_data_persistence/_activities_json_dao.py +++ b/V2/time_entries/_infrastructure/_data_persistence/_activities_json_dao.py @@ -51,6 +51,32 @@ def delete(self, activity_id: str) -> Activity: else: return None + def update(self, activity_id: str, new_activity: dict) -> Activity: + activity = self.get_by_id(activity_id) + if not activity: + return None + + new_activity = {**activity.__dict__, **new_activity} + + activities_updated = list( + map( + lambda activity: activity + if activity.get('id') != activity_id + else new_activity, + self.__get_activities_from_file(), + ) + ) + + try: + file = open(self.json_data_file_path, 'w') + json.dump(activities_updated, file) + file.close() + + return self.__create_activity_dto(new_activity) + + except FileNotFoundError: + return None + def __get_activities_from_file(self) -> typing.List[dict]: try: file = open(self.json_data_file_path) diff --git a/V2/time_entries/interface.py b/V2/time_entries/interface.py index f1500529..ffe31e51 100644 --- a/V2/time_entries/interface.py +++ b/V2/time_entries/interface.py @@ -1,2 +1,3 @@ from ._application import get_activities from ._application import delete_activity +from ._application import update_activity From 3c9ac26c7fac2b1da1687f2cc86c87c78f9f6528 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Thu, 28 Oct 2021 20:32:05 +0000 Subject: [PATCH 30/74] 0.41.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 4 ++++ time_tracker_api/version.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6b29b069..8cd38258 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ +## v0.41.0 (2021-10-28) +### Feature +* TT-366 V2 - PUT update activity ([#331](https://github.com/ioet/time-tracker-backend/issues/331)) ([`500a5d0`](https://github.com/ioet/time-tracker-backend/commit/500a5d0261497ce9aa9a9040342fea94dbe70704)) + ## v0.40.0 (2021-10-27) ### Feature * TT-367 V2 - Delete Activity ([#330](https://github.com/ioet/time-tracker-backend/issues/330)) ([`6ba8320`](https://github.com/ioet/time-tracker-backend/commit/6ba8320c6ddd6599679dfbbbaf9ac1dba9addb8d)) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index eb9b6f12..9f86a39e 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.40.0' +__version__ = '0.41.0' From 6c3687b5ca60893be23b6ad663de3081af964272 Mon Sep 17 00:00:00 2001 From: Daniela Garcia <70675131+dsgarcia8@users.noreply.github.com> Date: Wed, 3 Nov 2021 12:46:13 -0500 Subject: [PATCH 31/74] feat: TT-365 / TT-369 POST V2 Activity (#329) * feat: TT-365 Method POST activity and create function serverless * fix: TT-365 resolve comments * fix: TT-365 format code Co-authored-by: Sandro Castillo --- V2/serverless.yml | 12 +++- .../azure/activity_azure_endpoints_test.py | 19 ++++++ .../daos/activities_json_dao_test.py | 15 +++++ .../unit/services/activity_service_test.py | 12 ++++ .../use_cases/activities_use_case_test.py | 15 ++++- V2/time_entries/_application/__init__.py | 1 + .../_application/_activities/__init__.py | 1 + .../_activities/_create_activity.py | 62 +++++++++++++++++++ .../_persistence_contracts/_activities_dao.py | 8 +++ .../_domain/_services/_activity.py | 3 + .../_domain/_use_cases/__init__.py | 1 + .../_use_cases/_create_activity_use_case.py | 11 ++++ .../_data_persistence/_activities_json_dao.py | 14 ++++- .../_data_persistence/activities_data.json | 1 - V2/time_entries/interface.py | 1 + requirements/time_tracker_api/dev.txt | 1 + requirements/time_tracker_api/prod.txt | 3 +- 17 files changed, 175 insertions(+), 5 deletions(-) create mode 100644 V2/time_entries/_application/_activities/_create_activity.py create mode 100644 V2/time_entries/_domain/_use_cases/_create_activity_use_case.py diff --git a/V2/serverless.yml b/V2/serverless.yml index 223c8a33..0d085c36 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -63,6 +63,16 @@ functions: - http: true x-azure-settings: methods: - - PUT + - PUT route: activities/{id} + authLevel: anonymous + + create_activity: + handler: time_entries/interface.create_activity + events: + - http: true + x-azure-settings: + methods: + - POST + route: activities/ authLevel: anonymous diff --git a/V2/tests/api/azure/activity_azure_endpoints_test.py b/V2/tests/api/azure/activity_azure_endpoints_test.py index ef5ba3b0..05b22801 100644 --- a/V2/tests/api/azure/activity_azure_endpoints_test.py +++ b/V2/tests/api/azure/activity_azure_endpoints_test.py @@ -80,3 +80,22 @@ def test__update_activity_azure_endpoint__returns_an_activity__when_found_an_act assert response.status_code == 200 assert activitiy_json_data == json.dumps(new_activity) + +def test__activity_azure_endpoint__creates_an_activity__when_activity_has_all_attributes( + create_temp_activities, + ): + activities_json, tmp_directory = create_temp_activities + activities._create_activity._JSON_PATH = tmp_directory + + activity_body = {'id': None, 'name': Faker().user_name(), 'description': Faker().sentence(),'deleted': Faker().uuid4() ,'status': 'active', 'tenant_id': Faker().uuid4()} + body = json.dumps(activity_body).encode("utf-8") + req = func.HttpRequest( + method='POST', + body= body, + url=ACTIVITY_URL, + ) + + response = activities.create_activity(req) + activitiy_json_data = response.get_body() + assert response.status_code == 201 + assert activitiy_json_data == body \ No newline at end of file diff --git a/V2/tests/integration/daos/activities_json_dao_test.py b/V2/tests/integration/daos/activities_json_dao_test.py index 00ce99cf..f2b0dacd 100644 --- a/V2/tests/integration/daos/activities_json_dao_test.py +++ b/V2/tests/integration/daos/activities_json_dao_test.py @@ -132,3 +132,18 @@ def test_update__returns_none__when_doesnt_found_one_activity_to_update( result = activities_json_dao.update('', activity_data) assert result == None + +def test_create_activity__returns_an_activity_dto__when_create_an_activity_that_matches_attributes(create_fake_activities): + create_fake_activities([]) + + activities_json_dao = ActivitiesJsonDao(Faker().file_path()) + activity_data = { + "name": "test_name", + "description": "test_description", + "tenant_id": "test_tenant_id", + "id": "test_id", + "deleted": "test_deleted", + "status": "test_status", + } + result = activities_json_dao.create_activity(activity_data) + assert result == Activity(**activity_data) \ No newline at end of file diff --git a/V2/tests/unit/services/activity_service_test.py b/V2/tests/unit/services/activity_service_test.py index 772b3e15..9315d24f 100644 --- a/V2/tests/unit/services/activity_service_test.py +++ b/V2/tests/unit/services/activity_service_test.py @@ -58,3 +58,15 @@ def test__update_activity__uses_the_activity_dao__to_update_one_activity( assert activity_dao.update.called assert expected_activity == updated_activity + +def test__create_activity__uses_the_activity_dao__to_create_an_activity(mocker): + expected_activity = mocker.Mock() + activity_dao = mocker.Mock( + create_activity=mocker.Mock(return_value=expected_activity) + ) + activity_service = ActivityService(activity_dao) + + actual_activity = activity_service.create_activity(Faker().pydict()) + + assert activity_dao.create_activity.called + assert expected_activity == actual_activity diff --git a/V2/tests/unit/use_cases/activities_use_case_test.py b/V2/tests/unit/use_cases/activities_use_case_test.py index f3e9a38b..793c32d6 100644 --- a/V2/tests/unit/use_cases/activities_use_case_test.py +++ b/V2/tests/unit/use_cases/activities_use_case_test.py @@ -36,6 +36,20 @@ def test__get_activity_by_id_function__uses_the_activity_service__to_retrieve_ac assert expected_activity == actual_activity +def test__create_activity_function__uses_the_activities_service__to_create_activity( + mocker: MockFixture, + ): + expected_activity = mocker.Mock() + activity_service = mocker.Mock( + create_activity=mocker.Mock(return_value=expected_activity) + ) + + activity_use_case = _use_cases.CreateActivityUseCase(activity_service) + actual_activity = activity_use_case.create_activity(fake.pydict()) + + assert activity_service.create_activity.called + assert expected_activity == actual_activity + def test__delete_activity_function__uses_the_activity_service__to_change_activity_status( mocker: MockFixture, ): @@ -50,7 +64,6 @@ def test__delete_activity_function__uses_the_activity_service__to_change_activit assert activity_service.delete.called assert expected_activity == deleted_activity - def test__update_activity_function__uses_the_activities_service__to_update_an_activity( mocker: MockFixture, ): diff --git a/V2/time_entries/_application/__init__.py b/V2/time_entries/_application/__init__.py index faa68527..c8f26492 100644 --- a/V2/time_entries/_application/__init__.py +++ b/V2/time_entries/_application/__init__.py @@ -1,3 +1,4 @@ from ._activities import get_activities from ._activities import delete_activity from ._activities import update_activity +from ._activities import create_activity diff --git a/V2/time_entries/_application/_activities/__init__.py b/V2/time_entries/_application/_activities/__init__.py index 3482a9c6..ab7d3844 100644 --- a/V2/time_entries/_application/_activities/__init__.py +++ b/V2/time_entries/_application/_activities/__init__.py @@ -1,3 +1,4 @@ from ._get_activities import get_activities from ._delete_activity import delete_activity from ._update_activity import update_activity +from ._create_activity import create_activity diff --git a/V2/time_entries/_application/_activities/_create_activity.py b/V2/time_entries/_application/_activities/_create_activity.py new file mode 100644 index 00000000..cb5f5472 --- /dev/null +++ b/V2/time_entries/_application/_activities/_create_activity.py @@ -0,0 +1,62 @@ +import json +import logging +import dataclasses +import typing + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure + +_JSON_PATH = ( + 'time_entries/_infrastructure/_data_persistence/activities_data.json' +) + + +def create_activity(req: func.HttpRequest) -> func.HttpResponse: + activity_dao = _infrastructure.ActivitiesJsonDao(_JSON_PATH) + activity_service = _domain.ActivityService(activity_dao) + use_case = _domain._use_cases.CreateActivityUseCase(activity_service) + + + activity_data = req.get_json() + + + validation_errors = _validate_activity(activity_data) + if validation_errors: + return func.HttpResponse( + body=validation_errors, status_code=400, mimetype="application/json" + ) + + + activity_to_create = _domain.Activity( + id= None, + name=activity_data['name'], + description=activity_data['description'], + status=activity_data['status'], + deleted=activity_data['deleted'], + tenant_id=activity_data['tenant_id'] + ) + + + created_activity = use_case.create_activity(activity_to_create.__dict__) + if not create_activity: + return func.HttpResponse( + body={'error': 'activity could not be created'}, + status_code=500, + mimetype="application/json", + ) + return func.HttpResponse( + body=json.dumps(created_activity.__dict__), + status_code=201, + mimetype="application/json" + ) + + +def _validate_activity(activity_data: dict) -> typing.List[str]: + activity_fields = [field.name for field in dataclasses.fields(_domain.Activity)] + missing_keys = [field for field in activity_fields if field not in activity_data] + return [ + f'The {missing_key} key is missing in the input data' + for missing_key in missing_keys + ] \ No newline at end of file diff --git a/V2/time_entries/_domain/_persistence_contracts/_activities_dao.py b/V2/time_entries/_domain/_persistence_contracts/_activities_dao.py index d2f9e4c7..2037841d 100644 --- a/V2/time_entries/_domain/_persistence_contracts/_activities_dao.py +++ b/V2/time_entries/_domain/_persistence_contracts/_activities_dao.py @@ -19,3 +19,11 @@ def delete(self, id: str) -> Activity: @abc.abstractmethod def update(self, id: str, new_activity: dict) -> Activity: pass + + @abc.abstractmethod + def create_activity(self, activity_data: dict) -> Activity: + pass + + @abc.abstractmethod + def delete(self, id: str) -> Activity: + pass diff --git a/V2/time_entries/_domain/_services/_activity.py b/V2/time_entries/_domain/_services/_activity.py index f4be7836..8d29a7ab 100644 --- a/V2/time_entries/_domain/_services/_activity.py +++ b/V2/time_entries/_domain/_services/_activity.py @@ -17,3 +17,6 @@ def delete(self, activity_id: str) -> Activity: def update(self, activity_id: str, new_activity: dict) -> Activity: return self.activities_dao.update(activity_id, new_activity) + + def create_activity(self, activity_data: dict) -> Activity: + return self.activities_dao.create_activity(activity_data) diff --git a/V2/time_entries/_domain/_use_cases/__init__.py b/V2/time_entries/_domain/_use_cases/__init__.py index 64c9bb6b..642d2425 100644 --- a/V2/time_entries/_domain/_use_cases/__init__.py +++ b/V2/time_entries/_domain/_use_cases/__init__.py @@ -2,3 +2,4 @@ from ._get_activity_by_id_use_case import GetActivityUseCase from ._delete_activity_use_case import DeleteActivityUseCase from ._update_activity_use_case import UpdateActivityUseCase +from ._create_activity_use_case import CreateActivityUseCase diff --git a/V2/time_entries/_domain/_use_cases/_create_activity_use_case.py b/V2/time_entries/_domain/_use_cases/_create_activity_use_case.py new file mode 100644 index 00000000..a7f7a66e --- /dev/null +++ b/V2/time_entries/_domain/_use_cases/_create_activity_use_case.py @@ -0,0 +1,11 @@ +from time_entries._domain import ActivityService, Activity +import typing + + +class CreateActivityUseCase: + def __init__(self, activity_service: ActivityService): + self.activity_service = activity_service + + def create_activity(self, activity_data: dict ) -> Activity: + return self.activity_service.create_activity(activity_data) + diff --git a/V2/time_entries/_infrastructure/_data_persistence/_activities_json_dao.py b/V2/time_entries/_infrastructure/_data_persistence/_activities_json_dao.py index dfc41d04..ab8f5765 100644 --- a/V2/time_entries/_infrastructure/_data_persistence/_activities_json_dao.py +++ b/V2/time_entries/_infrastructure/_data_persistence/_activities_json_dao.py @@ -3,7 +3,6 @@ import json import typing - class ActivitiesJsonDao(ActivitiesDao): def __init__(self, json_data_file_path: str): self.json_data_file_path = json_data_file_path @@ -77,6 +76,19 @@ def update(self, activity_id: str, new_activity: dict) -> Activity: except FileNotFoundError: return None + def create_activity(self, activity_data: dict) -> Activity: + activities = self.__get_activities_from_file() + activities.append(activity_data) + + try: + with open(self.json_data_file_path, 'w') as outfile: + json.dump(activities, outfile) + + return self.__create_activity_dto(activity_data) + except FileNotFoundError: + print("Can not create activity") + + def __get_activities_from_file(self) -> typing.List[dict]: try: file = open(self.json_data_file_path) diff --git a/V2/time_entries/_infrastructure/_data_persistence/activities_data.json b/V2/time_entries/_infrastructure/_data_persistence/activities_data.json index 0d949902..961251db 100644 --- a/V2/time_entries/_infrastructure/_data_persistence/activities_data.json +++ b/V2/time_entries/_infrastructure/_data_persistence/activities_data.json @@ -63,4 +63,3 @@ "_ts": 1632331515 } ] - diff --git a/V2/time_entries/interface.py b/V2/time_entries/interface.py index ffe31e51..1f1fc805 100644 --- a/V2/time_entries/interface.py +++ b/V2/time_entries/interface.py @@ -1,3 +1,4 @@ from ._application import get_activities from ._application import delete_activity from ._application import update_activity +from ._application import create_activity \ No newline at end of file diff --git a/requirements/time_tracker_api/dev.txt b/requirements/time_tracker_api/dev.txt index 2e5aee81..9657c071 100644 --- a/requirements/time_tracker_api/dev.txt +++ b/requirements/time_tracker_api/dev.txt @@ -6,6 +6,7 @@ # For development # Tests +Faker==4.0.2 pytest==5.2.0 Flask_sqlalchemy diff --git a/requirements/time_tracker_api/prod.txt b/requirements/time_tracker_api/prod.txt index 6fd17f94..77ed3a0a 100644 --- a/requirements/time_tracker_api/prod.txt +++ b/requirements/time_tracker_api/prod.txt @@ -3,11 +3,12 @@ # Dependencies -r ../commons.txt -r ../azure_cosmos.txt --r ../sql_db.txt +# -r ../sql_db.txt # For production releases #Required by Flask +Faker==4.0.2 Flask==1.1.1 Flask-WTF==0.15.1 flake8==3.7.9 From 5b4763e64368a460dad9dd4f070f9f9b479d85a5 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Wed, 3 Nov 2021 18:02:38 +0000 Subject: [PATCH 32/74] 0.42.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 4 ++++ time_tracker_api/version.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8cd38258..b2b24d10 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ +## v0.42.0 (2021-11-03) +### Feature +* TT-365 / TT-369 POST V2 Activity ([#329](https://github.com/ioet/time-tracker-backend/issues/329)) ([`6c3687b`](https://github.com/ioet/time-tracker-backend/commit/6c3687b5ca60893be23b6ad663de3081af964272)) + ## v0.41.0 (2021-10-28) ### Feature * TT-366 V2 - PUT update activity ([#331](https://github.com/ioet/time-tracker-backend/issues/331)) ([`500a5d0`](https://github.com/ioet/time-tracker-backend/commit/500a5d0261497ce9aa9a9040342fea94dbe70704)) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index 9f86a39e..ccd8b38e 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.41.0' +__version__ = '0.42.0' From cb892c338c1139640a5527772b398b3b34ff68a7 Mon Sep 17 00:00:00 2001 From: Daniela Garcia <70675131+dsgarcia8@users.noreply.github.com> Date: Thu, 4 Nov 2021 10:34:20 -0500 Subject: [PATCH 33/74] fix: TT-365 v2 post method fix (#333) * feat: TT-365 Method POST activity and create function serverless * fix: TT-365 Resolve validation error Co-authored-by: Sandro Castillo --- V2/time_entries/_application/_activities/_create_activity.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/V2/time_entries/_application/_activities/_create_activity.py b/V2/time_entries/_application/_activities/_create_activity.py index cb5f5472..c745a1cd 100644 --- a/V2/time_entries/_application/_activities/_create_activity.py +++ b/V2/time_entries/_application/_activities/_create_activity.py @@ -25,7 +25,7 @@ def create_activity(req: func.HttpRequest) -> func.HttpResponse: validation_errors = _validate_activity(activity_data) if validation_errors: return func.HttpResponse( - body=validation_errors, status_code=400, mimetype="application/json" + body=json.dumps(validation_errors), status_code=400, mimetype="application/json" ) @@ -59,4 +59,4 @@ def _validate_activity(activity_data: dict) -> typing.List[str]: return [ f'The {missing_key} key is missing in the input data' for missing_key in missing_keys - ] \ No newline at end of file + ] From d536b4c3f09d2751b69db8982f39f7530b87132c Mon Sep 17 00:00:00 2001 From: semantic-release Date: Thu, 4 Nov 2021 15:57:11 +0000 Subject: [PATCH 34/74] 0.42.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 4 ++++ time_tracker_api/version.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b2b24d10..5cfb3855 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ +## v0.42.1 (2021-11-04) +### Fix +* TT-365 v2 post method fix ([#333](https://github.com/ioet/time-tracker-backend/issues/333)) ([`cb892c3`](https://github.com/ioet/time-tracker-backend/commit/cb892c338c1139640a5527772b398b3b34ff68a7)) + ## v0.42.0 (2021-11-03) ### Feature * TT-365 / TT-369 POST V2 Activity ([#329](https://github.com/ioet/time-tracker-backend/issues/329)) ([`6c3687b`](https://github.com/ioet/time-tracker-backend/commit/6c3687b5ca60893be23b6ad663de3081af964272)) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index ccd8b38e..3861aea9 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.42.0' +__version__ = '0.42.1' From 9598ad452936d4e1c662d293da5cf60cc7ec61d1 Mon Sep 17 00:00:00 2001 From: Sandro Castillo Date: Mon, 8 Nov 2021 10:02:09 -0500 Subject: [PATCH 35/74] ci: TT-394 create file python-package.yml (#336) * ci: TT-394 create file python-package.yml * ci: TT-394 change branches to master --- .github/workflows/python-package.yml | 39 ++++++++++++ V2/.flake8 | 4 ++ V2/requirements.txt | 1 + .../azure/activity_azure_endpoints_test.py | 61 +++++++++++-------- V2/tests/conftest.py | 1 + .../daos/activities_json_dao_test.py | 51 ++++++++-------- .../unit/services/activity_service_test.py | 1 + .../use_cases/activities_use_case_test.py | 35 +++++------ V2/time_entries/_application/__init__.py | 1 + .../_application/_activities/__init__.py | 1 + .../_activities/_create_activity.py | 7 +-- V2/time_entries/_domain/__init__.py | 1 + V2/time_entries/_domain/_entities/__init__.py | 1 + .../_persistence_contracts/__init__.py | 1 + .../_persistence_contracts/_activities_dao.py | 4 -- V2/time_entries/_domain/_services/__init__.py | 1 + .../_domain/_use_cases/__init__.py | 1 + .../_use_cases/_create_activity_use_case.py | 4 +- V2/time_entries/_infrastructure/__init__.py | 1 + .../_data_persistence/__init__.py | 1 + .../_data_persistence/_activities_json_dao.py | 2 +- V2/time_entries/interface.py | 1 + 22 files changed, 135 insertions(+), 85 deletions(-) create mode 100644 .github/workflows/python-package.yml create mode 100644 V2/.flake8 diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml new file mode 100644 index 00000000..36bff27a --- /dev/null +++ b/.github/workflows/python-package.yml @@ -0,0 +1,39 @@ +name: Time Tacker V2 CI + +on: + push: + branches: + - master + + pull_request: + branches: + - master + +jobs: + build-ci-time-tracker: + runs-on: ubuntu-latest + strategy: + max-parallel: 5 + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.10.0 + uses: actions/setup-python@v2 + with: + python-version: 3.10.0 + + - name: Add conda to system path + run: | + echo $CONDA/bin >> $GITHUB_PATH + + - name: Install dependencies + run: | + pip install -r V2/requirements.txt + + - name: Lint with flake8 + run: | + cd V2 + flake8 . --show-source --statistics + - name: Test with pytest + run: | + cd V2 + python -m pytest -v \ No newline at end of file diff --git a/V2/.flake8 b/V2/.flake8 new file mode 100644 index 00000000..cb282cae --- /dev/null +++ b/V2/.flake8 @@ -0,0 +1,4 @@ +[flake8] +exclude = .git,__pycache__,./node_modules, +max-complexity = 10 +max_line_length = 120 \ No newline at end of file diff --git a/V2/requirements.txt b/V2/requirements.txt index 41acba1d..c651bb35 100644 --- a/V2/requirements.txt +++ b/V2/requirements.txt @@ -4,6 +4,7 @@ azure-functions-worker # Tests pytest +flake8==4.0.1 # Mocking pytest-mock diff --git a/V2/tests/api/azure/activity_azure_endpoints_test.py b/V2/tests/api/azure/activity_azure_endpoints_test.py index 05b22801..de52712e 100644 --- a/V2/tests/api/azure/activity_azure_endpoints_test.py +++ b/V2/tests/api/azure/activity_azure_endpoints_test.py @@ -3,10 +3,9 @@ import azure.functions as func import json -import typing -ACTIVITY_URL = '/api/activities/' +ACTIVITY_URL = "/api/activities/" def test__activity_azure_endpoint__returns_all_activities( @@ -14,7 +13,7 @@ def test__activity_azure_endpoint__returns_all_activities( ): activities_json, tmp_directory = create_temp_activities activities._get_activities.JSON_PATH = tmp_directory - req = func.HttpRequest(method='GET', body=None, url=ACTIVITY_URL) + req = func.HttpRequest(method="GET", body=None, url=ACTIVITY_URL) response = activities.get_activities(req) activities_json_data = response.get_body().decode("utf-8") @@ -29,10 +28,10 @@ def test__activity_azure_endpoint__returns_an_activity__when_activity_matches_it activities_json, tmp_directory = create_temp_activities activities._get_activities.JSON_PATH = tmp_directory req = func.HttpRequest( - method='GET', + method="GET", body=None, url=ACTIVITY_URL, - route_params={"id": activities_json[0]['id']}, + route_params={"id": activities_json[0]["id"]}, ) response = activities.get_activities(req) @@ -48,17 +47,17 @@ def test__activity_azure_endpoint__returns_an_activity_with_inactive_status__whe activities_json, tmp_directory = create_temp_activities activities._delete_activity.JSON_PATH = tmp_directory req = func.HttpRequest( - method='DELETE', + method="DELETE", body=None, url=ACTIVITY_URL, - route_params={"id": activities_json[0]['id']}, + route_params={"id": activities_json[0]["id"]}, ) response = activities.delete_activity(req) activity_json_data = json.loads(response.get_body().decode("utf-8")) assert response.status_code == 200 - assert activity_json_data['status'] == 'inactive' + assert activity_json_data["status"] == "inactive" def test__update_activity_azure_endpoint__returns_an_activity__when_found_an_activity_to_update( @@ -68,10 +67,10 @@ def test__update_activity_azure_endpoint__returns_an_activity__when_found_an_act activities._update_activity.JSON_PATH = tmp_directory activity_data = {"description": Faker().sentence()} req = func.HttpRequest( - method='PUT', + method="PUT", body=json.dumps(activity_data).encode("utf-8"), url=ACTIVITY_URL, - route_params={"id": activities_json[0]['id']}, + route_params={"id": activities_json[0]["id"]}, ) response = activities.update_activity(req) @@ -81,21 +80,29 @@ def test__update_activity_azure_endpoint__returns_an_activity__when_found_an_act assert response.status_code == 200 assert activitiy_json_data == json.dumps(new_activity) + def test__activity_azure_endpoint__creates_an_activity__when_activity_has_all_attributes( - create_temp_activities, - ): - activities_json, tmp_directory = create_temp_activities - activities._create_activity._JSON_PATH = tmp_directory - - activity_body = {'id': None, 'name': Faker().user_name(), 'description': Faker().sentence(),'deleted': Faker().uuid4() ,'status': 'active', 'tenant_id': Faker().uuid4()} - body = json.dumps(activity_body).encode("utf-8") - req = func.HttpRequest( - method='POST', - body= body, - url=ACTIVITY_URL, - ) - - response = activities.create_activity(req) - activitiy_json_data = response.get_body() - assert response.status_code == 201 - assert activitiy_json_data == body \ No newline at end of file + create_temp_activities, +): + activities_json, tmp_directory = create_temp_activities + activities._create_activity._JSON_PATH = tmp_directory + + activity_body = { + "id": None, + "name": Faker().user_name(), + "description": Faker().sentence(), + "deleted": Faker().uuid4(), + "status": "active", + "tenant_id": Faker().uuid4(), + } + body = json.dumps(activity_body).encode("utf-8") + req = func.HttpRequest( + method="POST", + body=body, + url=ACTIVITY_URL, + ) + + response = activities.create_activity(req) + activitiy_json_data = response.get_body() + assert response.status_code == 201 + assert activitiy_json_data == body diff --git a/V2/tests/conftest.py b/V2/tests/conftest.py index 59065296..2741ce95 100644 --- a/V2/tests/conftest.py +++ b/V2/tests/conftest.py @@ -1 +1,2 @@ +# flake8: noqa from tests.api.api_fixtures import create_temp_activities diff --git a/V2/tests/integration/daos/activities_json_dao_test.py b/V2/tests/integration/daos/activities_json_dao_test.py index f2b0dacd..d5d3a180 100644 --- a/V2/tests/integration/daos/activities_json_dao_test.py +++ b/V2/tests/integration/daos/activities_json_dao_test.py @@ -8,21 +8,21 @@ fake_activities = [ { - 'id': Faker().uuid4(), - 'name': Faker().user_name(), - 'description': Faker().sentence(), - 'deleted': Faker().uuid4(), - 'status': 'active', - 'tenant_id': Faker().uuid4(), + "id": Faker().uuid4(), + "name": Faker().user_name(), + "description": Faker().sentence(), + "deleted": Faker().uuid4(), + "status": "active", + "tenant_id": Faker().uuid4(), } ] -@pytest.fixture(name='create_fake_activities') +@pytest.fixture(name="create_fake_activities") def _create_fake_activities(mocker) -> typing.List[Activity]: def _creator(activities): read_data = json.dumps(activities) - mocker.patch('builtins.open', mocker.mock_open(read_data=read_data)) + mocker.patch("builtins.open", mocker.mock_open(read_data=read_data)) return [Activity(**activity) for activity in activities] return _creator @@ -94,7 +94,7 @@ def test_delete__returns_an_activity_with_inactive_status__when_an_activity_matc activity_dto = activities.pop() result = activities_json_dao.delete(activity_dto.id) - assert result.status == 'inactive' + assert result.status == "inactive" def test_delete__returns_none__when_no_activity_matching_its_id_is_found( @@ -129,21 +129,24 @@ def test_update__returns_none__when_doesnt_found_one_activity_to_update( create_fake_activities([]) activity_data = {"description": Faker().sentence()} - result = activities_json_dao.update('', activity_data) + result = activities_json_dao.update("", activity_data) - assert result == None + assert result is None -def test_create_activity__returns_an_activity_dto__when_create_an_activity_that_matches_attributes(create_fake_activities): - create_fake_activities([]) - activities_json_dao = ActivitiesJsonDao(Faker().file_path()) - activity_data = { - "name": "test_name", - "description": "test_description", - "tenant_id": "test_tenant_id", - "id": "test_id", - "deleted": "test_deleted", - "status": "test_status", - } - result = activities_json_dao.create_activity(activity_data) - assert result == Activity(**activity_data) \ No newline at end of file +def test_create_activity__returns_an_activity_dto__when_create_an_activity_that_matches_attributes( + create_fake_activities, +): + create_fake_activities([]) + + activities_json_dao = ActivitiesJsonDao(Faker().file_path()) + activity_data = { + "name": "test_name", + "description": "test_description", + "tenant_id": "test_tenant_id", + "id": "test_id", + "deleted": "test_deleted", + "status": "test_status", + } + result = activities_json_dao.create_activity(activity_data) + assert result == Activity(**activity_data) diff --git a/V2/tests/unit/services/activity_service_test.py b/V2/tests/unit/services/activity_service_test.py index 9315d24f..9fbed763 100644 --- a/V2/tests/unit/services/activity_service_test.py +++ b/V2/tests/unit/services/activity_service_test.py @@ -59,6 +59,7 @@ def test__update_activity__uses_the_activity_dao__to_update_one_activity( assert activity_dao.update.called assert expected_activity == updated_activity + def test__create_activity__uses_the_activity_dao__to_create_an_activity(mocker): expected_activity = mocker.Mock() activity_dao = mocker.Mock( diff --git a/V2/tests/unit/use_cases/activities_use_case_test.py b/V2/tests/unit/use_cases/activities_use_case_test.py index 793c32d6..3a8d1301 100644 --- a/V2/tests/unit/use_cases/activities_use_case_test.py +++ b/V2/tests/unit/use_cases/activities_use_case_test.py @@ -1,4 +1,3 @@ -from time_entries._domain import ActivityService from time_entries._domain import _use_cases from pytest_mock import MockFixture from faker import Faker @@ -37,26 +36,25 @@ def test__get_activity_by_id_function__uses_the_activity_service__to_retrieve_ac def test__create_activity_function__uses_the_activities_service__to_create_activity( - mocker: MockFixture, - ): - expected_activity = mocker.Mock() - activity_service = mocker.Mock( - create_activity=mocker.Mock(return_value=expected_activity) - ) + mocker: MockFixture, +): + expected_activity = mocker.Mock() + activity_service = mocker.Mock( + create_activity=mocker.Mock(return_value=expected_activity) + ) + + activity_use_case = _use_cases.CreateActivityUseCase(activity_service) + actual_activity = activity_use_case.create_activity(fake.pydict()) - activity_use_case = _use_cases.CreateActivityUseCase(activity_service) - actual_activity = activity_use_case.create_activity(fake.pydict()) + assert activity_service.create_activity.called + assert expected_activity == actual_activity - assert activity_service.create_activity.called - assert expected_activity == actual_activity def test__delete_activity_function__uses_the_activity_service__to_change_activity_status( mocker: MockFixture, ): expected_activity = mocker.Mock() - activity_service = mocker.Mock( - delete=mocker.Mock(return_value=expected_activity) - ) + activity_service = mocker.Mock(delete=mocker.Mock(return_value=expected_activity)) activity_use_case = _use_cases.DeleteActivityUseCase(activity_service) deleted_activity = activity_use_case.delete_activity(fake.uuid4()) @@ -64,18 +62,15 @@ def test__delete_activity_function__uses_the_activity_service__to_change_activit assert activity_service.delete.called assert expected_activity == deleted_activity + def test__update_activity_function__uses_the_activities_service__to_update_an_activity( mocker: MockFixture, ): expected_activity = mocker.Mock() - activity_service = mocker.Mock( - update=mocker.Mock(return_value=expected_activity) - ) + activity_service = mocker.Mock(update=mocker.Mock(return_value=expected_activity)) activity_use_case = _use_cases.UpdateActivityUseCase(activity_service) - updated_activity = activity_use_case.update_activity( - fake.uuid4(), fake.pydict() - ) + updated_activity = activity_use_case.update_activity(fake.uuid4(), fake.pydict()) assert activity_service.update.called assert expected_activity == updated_activity diff --git a/V2/time_entries/_application/__init__.py b/V2/time_entries/_application/__init__.py index c8f26492..6c34669a 100644 --- a/V2/time_entries/_application/__init__.py +++ b/V2/time_entries/_application/__init__.py @@ -1,3 +1,4 @@ +# flake8: noqa from ._activities import get_activities from ._activities import delete_activity from ._activities import update_activity diff --git a/V2/time_entries/_application/_activities/__init__.py b/V2/time_entries/_application/_activities/__init__.py index ab7d3844..ef38ae00 100644 --- a/V2/time_entries/_application/_activities/__init__.py +++ b/V2/time_entries/_application/_activities/__init__.py @@ -1,3 +1,4 @@ +# flake8: noqa from ._get_activities import get_activities from ._delete_activity import delete_activity from ._update_activity import update_activity diff --git a/V2/time_entries/_application/_activities/_create_activity.py b/V2/time_entries/_application/_activities/_create_activity.py index c745a1cd..2a2a622c 100644 --- a/V2/time_entries/_application/_activities/_create_activity.py +++ b/V2/time_entries/_application/_activities/_create_activity.py @@ -1,5 +1,4 @@ import json -import logging import dataclasses import typing @@ -18,19 +17,16 @@ def create_activity(req: func.HttpRequest) -> func.HttpResponse: activity_service = _domain.ActivityService(activity_dao) use_case = _domain._use_cases.CreateActivityUseCase(activity_service) - activity_data = req.get_json() - validation_errors = _validate_activity(activity_data) if validation_errors: return func.HttpResponse( body=json.dumps(validation_errors), status_code=400, mimetype="application/json" ) - activity_to_create = _domain.Activity( - id= None, + id=None, name=activity_data['name'], description=activity_data['description'], status=activity_data['status'], @@ -38,7 +34,6 @@ def create_activity(req: func.HttpRequest) -> func.HttpResponse: tenant_id=activity_data['tenant_id'] ) - created_activity = use_case.create_activity(activity_to_create.__dict__) if not create_activity: return func.HttpResponse( diff --git a/V2/time_entries/_domain/__init__.py b/V2/time_entries/_domain/__init__.py index f1a97246..66eb2eec 100644 --- a/V2/time_entries/_domain/__init__.py +++ b/V2/time_entries/_domain/__init__.py @@ -1,3 +1,4 @@ +# flake8: noqa from ._entities import Activity from ._persistence_contracts import ActivitiesDao from ._services import ActivityService diff --git a/V2/time_entries/_domain/_entities/__init__.py b/V2/time_entries/_domain/_entities/__init__.py index bf3eb08e..a8cf9289 100644 --- a/V2/time_entries/_domain/_entities/__init__.py +++ b/V2/time_entries/_domain/_entities/__init__.py @@ -1 +1,2 @@ +# flake8: noqa from ._activity import Activity diff --git a/V2/time_entries/_domain/_persistence_contracts/__init__.py b/V2/time_entries/_domain/_persistence_contracts/__init__.py index 3495445e..2401254d 100644 --- a/V2/time_entries/_domain/_persistence_contracts/__init__.py +++ b/V2/time_entries/_domain/_persistence_contracts/__init__.py @@ -1 +1,2 @@ +# flake8: noqa from ._activities_dao import ActivitiesDao diff --git a/V2/time_entries/_domain/_persistence_contracts/_activities_dao.py b/V2/time_entries/_domain/_persistence_contracts/_activities_dao.py index 2037841d..f7e7bac3 100644 --- a/V2/time_entries/_domain/_persistence_contracts/_activities_dao.py +++ b/V2/time_entries/_domain/_persistence_contracts/_activities_dao.py @@ -23,7 +23,3 @@ def update(self, id: str, new_activity: dict) -> Activity: @abc.abstractmethod def create_activity(self, activity_data: dict) -> Activity: pass - - @abc.abstractmethod - def delete(self, id: str) -> Activity: - pass diff --git a/V2/time_entries/_domain/_services/__init__.py b/V2/time_entries/_domain/_services/__init__.py index 1a9befa8..fb9a65cc 100644 --- a/V2/time_entries/_domain/_services/__init__.py +++ b/V2/time_entries/_domain/_services/__init__.py @@ -1 +1,2 @@ +# flake8: noqa from ._activity import ActivityService diff --git a/V2/time_entries/_domain/_use_cases/__init__.py b/V2/time_entries/_domain/_use_cases/__init__.py index 642d2425..6b330e07 100644 --- a/V2/time_entries/_domain/_use_cases/__init__.py +++ b/V2/time_entries/_domain/_use_cases/__init__.py @@ -1,3 +1,4 @@ +# flake8: noqa from ._get_activities_use_case import GetActivitiesUseCase from ._get_activity_by_id_use_case import GetActivityUseCase from ._delete_activity_use_case import DeleteActivityUseCase diff --git a/V2/time_entries/_domain/_use_cases/_create_activity_use_case.py b/V2/time_entries/_domain/_use_cases/_create_activity_use_case.py index a7f7a66e..fbd13aac 100644 --- a/V2/time_entries/_domain/_use_cases/_create_activity_use_case.py +++ b/V2/time_entries/_domain/_use_cases/_create_activity_use_case.py @@ -1,11 +1,9 @@ from time_entries._domain import ActivityService, Activity -import typing class CreateActivityUseCase: def __init__(self, activity_service: ActivityService): self.activity_service = activity_service - def create_activity(self, activity_data: dict ) -> Activity: + def create_activity(self, activity_data: dict) -> Activity: return self.activity_service.create_activity(activity_data) - diff --git a/V2/time_entries/_infrastructure/__init__.py b/V2/time_entries/_infrastructure/__init__.py index df144da6..1734e5b8 100644 --- a/V2/time_entries/_infrastructure/__init__.py +++ b/V2/time_entries/_infrastructure/__init__.py @@ -1 +1,2 @@ +# flake8: noqa from ._data_persistence import ActivitiesJsonDao diff --git a/V2/time_entries/_infrastructure/_data_persistence/__init__.py b/V2/time_entries/_infrastructure/_data_persistence/__init__.py index 802f35f4..d2a77fc4 100644 --- a/V2/time_entries/_infrastructure/_data_persistence/__init__.py +++ b/V2/time_entries/_infrastructure/_data_persistence/__init__.py @@ -1 +1,2 @@ +# flake8: noqa from ._activities_json_dao import ActivitiesJsonDao diff --git a/V2/time_entries/_infrastructure/_data_persistence/_activities_json_dao.py b/V2/time_entries/_infrastructure/_data_persistence/_activities_json_dao.py index ab8f5765..54418c45 100644 --- a/V2/time_entries/_infrastructure/_data_persistence/_activities_json_dao.py +++ b/V2/time_entries/_infrastructure/_data_persistence/_activities_json_dao.py @@ -3,6 +3,7 @@ import json import typing + class ActivitiesJsonDao(ActivitiesDao): def __init__(self, json_data_file_path: str): self.json_data_file_path = json_data_file_path @@ -88,7 +89,6 @@ def create_activity(self, activity_data: dict) -> Activity: except FileNotFoundError: print("Can not create activity") - def __get_activities_from_file(self) -> typing.List[dict]: try: file = open(self.json_data_file_path) diff --git a/V2/time_entries/interface.py b/V2/time_entries/interface.py index 1f1fc805..877b631e 100644 --- a/V2/time_entries/interface.py +++ b/V2/time_entries/interface.py @@ -1,3 +1,4 @@ +# flake8: noqa from ._application import get_activities from ._application import delete_activity from ._application import update_activity From 73bb21a74bb690a9677cc1c8f4897b09fec316ac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Rafael=20Arcos=20G=C3=B3mez?= <37599693+ararcos@users.noreply.github.com> Date: Wed, 10 Nov 2021 16:26:05 -0500 Subject: [PATCH 36/74] refactor: TT-401 Rename the time entries folder (#340) --- V2/serverless.yml | 8 +- .../azure/activity_azure_endpoints_test.py | 2 +- .../daos/activities_json_dao_test.py | 4 +- .../unit/services/activity_service_test.py | 2 +- .../use_cases/activities_use_case_test.py | 2 +- .../activities}/_application/__init__.py | 0 .../_application/_activities/__init__.py | 0 .../_activities/_create_activity.py | 2 +- .../_activities/_delete_activity.py | 6 +- .../_activities/_get_activities.py | 110 +++++++++--------- .../_activities/_update_activity.py | 6 +- .../activities}/_domain/__init__.py | 0 .../activities}/_domain/_entities/__init__.py | 0 .../_domain/_entities/_activity.py | 0 .../_persistence_contracts/__init__.py | 0 .../_persistence_contracts/_activities_dao.py | 2 +- .../activities}/_domain/_services/__init__.py | 0 .../_domain/_services/_activity.py | 2 +- .../_domain/_use_cases/__init__.py | 0 .../_use_cases/_create_activity_use_case.py | 2 +- .../_use_cases/_delete_activity_use_case.py | 2 +- .../_use_cases/_get_activities_use_case.py | 2 +- .../_get_activity_by_id_use_case.py | 2 +- .../_use_cases/_update_activity_use_case.py | 2 +- .../activities}/_infrastructure/__init__.py | 0 .../_data_persistence/__init__.py | 0 .../_data_persistence/_activities_json_dao.py | 2 +- .../_data_persistence/activities_data.json | 0 .../activities}/interface.py | 0 29 files changed, 79 insertions(+), 79 deletions(-) rename V2/{time_entries => time_tracker/activities}/_application/__init__.py (100%) rename V2/{time_entries => time_tracker/activities}/_application/_activities/__init__.py (100%) rename V2/{time_entries => time_tracker/activities}/_application/_activities/_create_activity.py (95%) rename V2/{time_entries => time_tracker/activities}/_application/_activities/_delete_activity.py (80%) rename V2/{time_entries => time_tracker/activities}/_application/_activities/_get_activities.py (83%) rename V2/{time_entries => time_tracker/activities}/_application/_activities/_update_activity.py (84%) rename V2/{time_entries => time_tracker/activities}/_domain/__init__.py (100%) rename V2/{time_entries => time_tracker/activities}/_domain/_entities/__init__.py (100%) rename V2/{time_entries => time_tracker/activities}/_domain/_entities/_activity.py (100%) rename V2/{time_entries => time_tracker/activities}/_domain/_persistence_contracts/__init__.py (100%) rename V2/{time_entries => time_tracker/activities}/_domain/_persistence_contracts/_activities_dao.py (90%) rename V2/{time_entries => time_tracker/activities}/_domain/_services/__init__.py (100%) rename V2/{time_entries => time_tracker/activities}/_domain/_services/_activity.py (91%) rename V2/{time_entries => time_tracker/activities}/_domain/_use_cases/__init__.py (100%) rename V2/{time_entries => time_tracker/activities}/_domain/_use_cases/_create_activity_use_case.py (79%) rename V2/{time_entries => time_tracker/activities}/_domain/_use_cases/_delete_activity_use_case.py (77%) rename V2/{time_entries => time_tracker/activities}/_domain/_use_cases/_get_activities_use_case.py (78%) rename V2/{time_entries => time_tracker/activities}/_domain/_use_cases/_get_activity_by_id_use_case.py (77%) rename V2/{time_entries => time_tracker/activities}/_domain/_use_cases/_update_activity_use_case.py (81%) rename V2/{time_entries => time_tracker/activities}/_infrastructure/__init__.py (100%) rename V2/{time_entries => time_tracker/activities}/_infrastructure/_data_persistence/__init__.py (100%) rename V2/{time_entries => time_tracker/activities}/_infrastructure/_data_persistence/_activities_json_dao.py (97%) rename V2/{time_entries => time_tracker/activities}/_infrastructure/_data_persistence/activities_data.json (100%) rename V2/{time_entries => time_tracker/activities}/interface.py (100%) diff --git a/V2/serverless.yml b/V2/serverless.yml index 0d085c36..0eb3f42f 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -38,7 +38,7 @@ package: functions: get_activities: - handler: time_entries/interface.get_activities + handler: time_tracker/activities/interface.get_activities events: - http: true x-azure-settings: @@ -48,7 +48,7 @@ functions: authLevel: anonymous delete_activity: - handler: time_entries/interface.delete_activity + handler: time_tracker/activities/interface.delete_activity events: - http: true x-azure-settings: @@ -58,7 +58,7 @@ functions: authLevel: anonymous update_activity: - handler: time_entries/interface.update_activity + handler: time_tracker/activities/interface.update_activity events: - http: true x-azure-settings: @@ -68,7 +68,7 @@ functions: authLevel: anonymous create_activity: - handler: time_entries/interface.create_activity + handler: time_tracker/activities/interface.create_activity events: - http: true x-azure-settings: diff --git a/V2/tests/api/azure/activity_azure_endpoints_test.py b/V2/tests/api/azure/activity_azure_endpoints_test.py index de52712e..e3bf4ffe 100644 --- a/V2/tests/api/azure/activity_azure_endpoints_test.py +++ b/V2/tests/api/azure/activity_azure_endpoints_test.py @@ -1,4 +1,4 @@ -from time_entries._application import _activities as activities +from time_tracker.activities._application import _activities as activities from faker import Faker import azure.functions as func diff --git a/V2/tests/integration/daos/activities_json_dao_test.py b/V2/tests/integration/daos/activities_json_dao_test.py index d5d3a180..8eff9609 100644 --- a/V2/tests/integration/daos/activities_json_dao_test.py +++ b/V2/tests/integration/daos/activities_json_dao_test.py @@ -1,5 +1,5 @@ -from time_entries._infrastructure import ActivitiesJsonDao -from time_entries._domain import Activity +from time_tracker.activities._infrastructure import ActivitiesJsonDao +from time_tracker.activities._domain import Activity from faker import Faker import json import pytest diff --git a/V2/tests/unit/services/activity_service_test.py b/V2/tests/unit/services/activity_service_test.py index 9fbed763..befdb1fb 100644 --- a/V2/tests/unit/services/activity_service_test.py +++ b/V2/tests/unit/services/activity_service_test.py @@ -1,4 +1,4 @@ -from time_entries._domain import ActivityService +from time_tracker.activities._domain import ActivityService from faker import Faker diff --git a/V2/tests/unit/use_cases/activities_use_case_test.py b/V2/tests/unit/use_cases/activities_use_case_test.py index 3a8d1301..334c7489 100644 --- a/V2/tests/unit/use_cases/activities_use_case_test.py +++ b/V2/tests/unit/use_cases/activities_use_case_test.py @@ -1,4 +1,4 @@ -from time_entries._domain import _use_cases +from time_tracker.activities._domain import _use_cases from pytest_mock import MockFixture from faker import Faker diff --git a/V2/time_entries/_application/__init__.py b/V2/time_tracker/activities/_application/__init__.py similarity index 100% rename from V2/time_entries/_application/__init__.py rename to V2/time_tracker/activities/_application/__init__.py diff --git a/V2/time_entries/_application/_activities/__init__.py b/V2/time_tracker/activities/_application/_activities/__init__.py similarity index 100% rename from V2/time_entries/_application/_activities/__init__.py rename to V2/time_tracker/activities/_application/_activities/__init__.py diff --git a/V2/time_entries/_application/_activities/_create_activity.py b/V2/time_tracker/activities/_application/_activities/_create_activity.py similarity index 95% rename from V2/time_entries/_application/_activities/_create_activity.py rename to V2/time_tracker/activities/_application/_activities/_create_activity.py index 2a2a622c..be53815a 100644 --- a/V2/time_entries/_application/_activities/_create_activity.py +++ b/V2/time_tracker/activities/_application/_activities/_create_activity.py @@ -8,7 +8,7 @@ from ... import _infrastructure _JSON_PATH = ( - 'time_entries/_infrastructure/_data_persistence/activities_data.json' + 'activities/_infrastructure/_data_persistence/activities_data.json' ) diff --git a/V2/time_entries/_application/_activities/_delete_activity.py b/V2/time_tracker/activities/_application/_activities/_delete_activity.py similarity index 80% rename from V2/time_entries/_application/_activities/_delete_activity.py rename to V2/time_tracker/activities/_application/_activities/_delete_activity.py index 896c8a92..80d55446 100644 --- a/V2/time_entries/_application/_activities/_delete_activity.py +++ b/V2/time_tracker/activities/_application/_activities/_delete_activity.py @@ -1,12 +1,12 @@ -from time_entries._infrastructure import ActivitiesJsonDao -from time_entries._domain import ActivityService, _use_cases +from time_tracker.activities._infrastructure import ActivitiesJsonDao +from time_tracker.activities._domain import ActivityService, _use_cases import azure.functions as func import json import logging JSON_PATH = ( - 'time_entries/_infrastructure/_data_persistence/activities_data.json' + 'activities/_infrastructure/_data_persistence/activities_data.json' ) diff --git a/V2/time_entries/_application/_activities/_get_activities.py b/V2/time_tracker/activities/_application/_activities/_get_activities.py similarity index 83% rename from V2/time_entries/_application/_activities/_get_activities.py rename to V2/time_tracker/activities/_application/_activities/_get_activities.py index fc929e77..9f52069d 100644 --- a/V2/time_entries/_application/_activities/_get_activities.py +++ b/V2/time_tracker/activities/_application/_activities/_get_activities.py @@ -1,55 +1,55 @@ -from time_entries._infrastructure import ActivitiesJsonDao -from time_entries._domain import ActivityService, _use_cases - -import azure.functions as func -import json -import logging - -JSON_PATH = ( - 'time_entries/_infrastructure/_data_persistence/activities_data.json' -) - - -def get_activities(req: func.HttpRequest) -> func.HttpResponse: - logging.info( - 'Python HTTP trigger function processed a request to get an activity.' - ) - activity_id = req.route_params.get('id') - status_code = 200 - - if activity_id: - response = _get_by_id(activity_id) - if response == b'Not Found': - status_code = 404 - else: - response = _get_all() - - return func.HttpResponse( - body=response, status_code=status_code, mimetype="application/json" - ) - - -def _get_by_id(activity_id: str) -> str: - activity_use_case = _use_cases.GetActivityUseCase( - _create_activity_service(JSON_PATH) - ) - activity = activity_use_case.get_activity_by_id(activity_id) - - return json.dumps(activity.__dict__) if activity else b'Not Found' - - -def _get_all() -> str: - activities_use_case = _use_cases.GetActivitiesUseCase( - _create_activity_service(JSON_PATH) - ) - return json.dumps( - [ - activity.__dict__ - for activity in activities_use_case.get_activities() - ] - ) - - -def _create_activity_service(path: str): - activity_json = ActivitiesJsonDao(path) - return ActivityService(activity_json) +from time_tracker.activities._infrastructure import ActivitiesJsonDao +from time_tracker.activities._domain import ActivityService, _use_cases + +import azure.functions as func +import json +import logging + +JSON_PATH = ( + 'activities/_infrastructure/_data_persistence/activities_data.json' +) + + +def get_activities(req: func.HttpRequest) -> func.HttpResponse: + logging.info( + 'Python HTTP trigger function processed a request to get an activity.' + ) + activity_id = req.route_params.get('id') + status_code = 200 + + if activity_id: + response = _get_by_id(activity_id) + if response == b'Not Found': + status_code = 404 + else: + response = _get_all() + + return func.HttpResponse( + body=response, status_code=status_code, mimetype="application/json" + ) + + +def _get_by_id(activity_id: str) -> str: + activity_use_case = _use_cases.GetActivityUseCase( + _create_activity_service(JSON_PATH) + ) + activity = activity_use_case.get_activity_by_id(activity_id) + + return json.dumps(activity.__dict__) if activity else b'Not Found' + + +def _get_all() -> str: + activities_use_case = _use_cases.GetActivitiesUseCase( + _create_activity_service(JSON_PATH) + ) + return json.dumps( + [ + activity.__dict__ + for activity in activities_use_case.get_activities() + ] + ) + + +def _create_activity_service(path: str): + activity_json = ActivitiesJsonDao(path) + return ActivityService(activity_json) diff --git a/V2/time_entries/_application/_activities/_update_activity.py b/V2/time_tracker/activities/_application/_activities/_update_activity.py similarity index 84% rename from V2/time_entries/_application/_activities/_update_activity.py rename to V2/time_tracker/activities/_application/_activities/_update_activity.py index 2aff2881..1709f77a 100644 --- a/V2/time_entries/_application/_activities/_update_activity.py +++ b/V2/time_tracker/activities/_application/_activities/_update_activity.py @@ -1,5 +1,5 @@ -from time_entries._infrastructure import ActivitiesJsonDao -from time_entries._domain import ActivityService, Activity, _use_cases +from time_tracker.activities._infrastructure import ActivitiesJsonDao +from time_tracker.activities._domain import ActivityService, Activity, _use_cases import azure.functions as func import dataclasses @@ -7,7 +7,7 @@ import logging JSON_PATH = ( - 'time_entries/_infrastructure/_data_persistence/activities_data.json' + 'activities/_infrastructure/_data_persistence/activities_data.json' ) diff --git a/V2/time_entries/_domain/__init__.py b/V2/time_tracker/activities/_domain/__init__.py similarity index 100% rename from V2/time_entries/_domain/__init__.py rename to V2/time_tracker/activities/_domain/__init__.py diff --git a/V2/time_entries/_domain/_entities/__init__.py b/V2/time_tracker/activities/_domain/_entities/__init__.py similarity index 100% rename from V2/time_entries/_domain/_entities/__init__.py rename to V2/time_tracker/activities/_domain/_entities/__init__.py diff --git a/V2/time_entries/_domain/_entities/_activity.py b/V2/time_tracker/activities/_domain/_entities/_activity.py similarity index 100% rename from V2/time_entries/_domain/_entities/_activity.py rename to V2/time_tracker/activities/_domain/_entities/_activity.py diff --git a/V2/time_entries/_domain/_persistence_contracts/__init__.py b/V2/time_tracker/activities/_domain/_persistence_contracts/__init__.py similarity index 100% rename from V2/time_entries/_domain/_persistence_contracts/__init__.py rename to V2/time_tracker/activities/_domain/_persistence_contracts/__init__.py diff --git a/V2/time_entries/_domain/_persistence_contracts/_activities_dao.py b/V2/time_tracker/activities/_domain/_persistence_contracts/_activities_dao.py similarity index 90% rename from V2/time_entries/_domain/_persistence_contracts/_activities_dao.py rename to V2/time_tracker/activities/_domain/_persistence_contracts/_activities_dao.py index f7e7bac3..80b8c711 100644 --- a/V2/time_entries/_domain/_persistence_contracts/_activities_dao.py +++ b/V2/time_tracker/activities/_domain/_persistence_contracts/_activities_dao.py @@ -1,4 +1,4 @@ -from time_entries._domain import Activity +from time_tracker.activities._domain import Activity import abc import typing diff --git a/V2/time_entries/_domain/_services/__init__.py b/V2/time_tracker/activities/_domain/_services/__init__.py similarity index 100% rename from V2/time_entries/_domain/_services/__init__.py rename to V2/time_tracker/activities/_domain/_services/__init__.py diff --git a/V2/time_entries/_domain/_services/_activity.py b/V2/time_tracker/activities/_domain/_services/_activity.py similarity index 91% rename from V2/time_entries/_domain/_services/_activity.py rename to V2/time_tracker/activities/_domain/_services/_activity.py index 8d29a7ab..a564577a 100644 --- a/V2/time_entries/_domain/_services/_activity.py +++ b/V2/time_tracker/activities/_domain/_services/_activity.py @@ -1,4 +1,4 @@ -from time_entries._domain import ActivitiesDao, Activity +from time_tracker.activities._domain import ActivitiesDao, Activity import typing diff --git a/V2/time_entries/_domain/_use_cases/__init__.py b/V2/time_tracker/activities/_domain/_use_cases/__init__.py similarity index 100% rename from V2/time_entries/_domain/_use_cases/__init__.py rename to V2/time_tracker/activities/_domain/_use_cases/__init__.py diff --git a/V2/time_entries/_domain/_use_cases/_create_activity_use_case.py b/V2/time_tracker/activities/_domain/_use_cases/_create_activity_use_case.py similarity index 79% rename from V2/time_entries/_domain/_use_cases/_create_activity_use_case.py rename to V2/time_tracker/activities/_domain/_use_cases/_create_activity_use_case.py index fbd13aac..241718db 100644 --- a/V2/time_entries/_domain/_use_cases/_create_activity_use_case.py +++ b/V2/time_tracker/activities/_domain/_use_cases/_create_activity_use_case.py @@ -1,4 +1,4 @@ -from time_entries._domain import ActivityService, Activity +from time_tracker.activities._domain import ActivityService, Activity class CreateActivityUseCase: diff --git a/V2/time_entries/_domain/_use_cases/_delete_activity_use_case.py b/V2/time_tracker/activities/_domain/_use_cases/_delete_activity_use_case.py similarity index 77% rename from V2/time_entries/_domain/_use_cases/_delete_activity_use_case.py rename to V2/time_tracker/activities/_domain/_use_cases/_delete_activity_use_case.py index e13acaa5..5af54ee8 100644 --- a/V2/time_entries/_domain/_use_cases/_delete_activity_use_case.py +++ b/V2/time_tracker/activities/_domain/_use_cases/_delete_activity_use_case.py @@ -1,4 +1,4 @@ -from time_entries._domain import ActivityService, Activity +from time_tracker.activities._domain import ActivityService, Activity class DeleteActivityUseCase: diff --git a/V2/time_entries/_domain/_use_cases/_get_activities_use_case.py b/V2/time_tracker/activities/_domain/_use_cases/_get_activities_use_case.py similarity index 78% rename from V2/time_entries/_domain/_use_cases/_get_activities_use_case.py rename to V2/time_tracker/activities/_domain/_use_cases/_get_activities_use_case.py index 1262ff14..0e42dd32 100644 --- a/V2/time_entries/_domain/_use_cases/_get_activities_use_case.py +++ b/V2/time_tracker/activities/_domain/_use_cases/_get_activities_use_case.py @@ -1,4 +1,4 @@ -from time_entries._domain import ActivityService, Activity +from time_tracker.activities._domain import ActivityService, Activity import typing diff --git a/V2/time_entries/_domain/_use_cases/_get_activity_by_id_use_case.py b/V2/time_tracker/activities/_domain/_use_cases/_get_activity_by_id_use_case.py similarity index 77% rename from V2/time_entries/_domain/_use_cases/_get_activity_by_id_use_case.py rename to V2/time_tracker/activities/_domain/_use_cases/_get_activity_by_id_use_case.py index 65ce104f..04ca442e 100644 --- a/V2/time_entries/_domain/_use_cases/_get_activity_by_id_use_case.py +++ b/V2/time_tracker/activities/_domain/_use_cases/_get_activity_by_id_use_case.py @@ -1,4 +1,4 @@ -from time_entries._domain import ActivityService, Activity +from time_tracker.activities._domain import ActivityService, Activity class GetActivityUseCase: diff --git a/V2/time_entries/_domain/_use_cases/_update_activity_use_case.py b/V2/time_tracker/activities/_domain/_use_cases/_update_activity_use_case.py similarity index 81% rename from V2/time_entries/_domain/_use_cases/_update_activity_use_case.py rename to V2/time_tracker/activities/_domain/_use_cases/_update_activity_use_case.py index ea0bc3c5..a890d85f 100644 --- a/V2/time_entries/_domain/_use_cases/_update_activity_use_case.py +++ b/V2/time_tracker/activities/_domain/_use_cases/_update_activity_use_case.py @@ -1,4 +1,4 @@ -from time_entries._domain import ActivityService, Activity +from time_tracker.activities._domain import ActivityService, Activity class UpdateActivityUseCase: diff --git a/V2/time_entries/_infrastructure/__init__.py b/V2/time_tracker/activities/_infrastructure/__init__.py similarity index 100% rename from V2/time_entries/_infrastructure/__init__.py rename to V2/time_tracker/activities/_infrastructure/__init__.py diff --git a/V2/time_entries/_infrastructure/_data_persistence/__init__.py b/V2/time_tracker/activities/_infrastructure/_data_persistence/__init__.py similarity index 100% rename from V2/time_entries/_infrastructure/_data_persistence/__init__.py rename to V2/time_tracker/activities/_infrastructure/_data_persistence/__init__.py diff --git a/V2/time_entries/_infrastructure/_data_persistence/_activities_json_dao.py b/V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_json_dao.py similarity index 97% rename from V2/time_entries/_infrastructure/_data_persistence/_activities_json_dao.py rename to V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_json_dao.py index 54418c45..60859a15 100644 --- a/V2/time_entries/_infrastructure/_data_persistence/_activities_json_dao.py +++ b/V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_json_dao.py @@ -1,4 +1,4 @@ -from time_entries._domain import ActivitiesDao, Activity +from time_tracker.activities._domain import ActivitiesDao, Activity import dataclasses import json import typing diff --git a/V2/time_entries/_infrastructure/_data_persistence/activities_data.json b/V2/time_tracker/activities/_infrastructure/_data_persistence/activities_data.json similarity index 100% rename from V2/time_entries/_infrastructure/_data_persistence/activities_data.json rename to V2/time_tracker/activities/_infrastructure/_data_persistence/activities_data.json diff --git a/V2/time_entries/interface.py b/V2/time_tracker/activities/interface.py similarity index 100% rename from V2/time_entries/interface.py rename to V2/time_tracker/activities/interface.py From 568e0479859b0a92cc2780fb5cb522da664c0a92 Mon Sep 17 00:00:00 2001 From: Jipson Murillo <38593785+Jobzi@users.noreply.github.com> Date: Thu, 11 Nov 2021 15:40:31 -0500 Subject: [PATCH 37/74] feat: TT-384 Refactor Tables (#337) * feat: file stream from azure blob storage * refactor: add new python package in dev.txt * feat: implement new methods to read files from blob storage * feat: implemented the reading of the blob storage to the endpoint activity * fix: TT-384 Change blob storage connection input names * fix: TT-384 Add the file name as a parameter of the function * test: TT-384 Add a tests to obtain activities from blob storage, endpoint and repository * fix: TT-384 revert changes * test: TT-384 Change blob storage connection input names * feat: TT-384 implemented the reading of the storage blob to the endpoint and repository * test: TT-384 Add a tests to obtain activities from blob storage, endpoint and repository * test: TT-384 changed test name with correct formatting * refactor: TT-384 change import to global and name method --- commons/data_access_layer/file_stream.py | 27 +++++++++++++++++++ requirements/time_tracker_api/dev.txt | 5 +++- .../data_access_layer/file_stream_test.py | 15 +++++++++++ .../activities/activities_model_test.py | 24 +++++++++++++++++ .../activities/activities_namespace_test.py | 13 +++++++-- .../activities/activities_model.py | 24 +++++++++++++++-- 6 files changed, 103 insertions(+), 5 deletions(-) create mode 100644 commons/data_access_layer/file_stream.py create mode 100644 tests/commons/data_access_layer/file_stream_test.py diff --git a/commons/data_access_layer/file_stream.py b/commons/data_access_layer/file_stream.py new file mode 100644 index 00000000..a705c061 --- /dev/null +++ b/commons/data_access_layer/file_stream.py @@ -0,0 +1,27 @@ +import os +from azure.storage.blob.blockblobservice import BlockBlobService + +ACCOUNT_KEY = os.environ.get('AZURE_STORAGE_ACCOUNT_KEY') + +class FileStream: + def __init__(self, account_name:str, container_name:str): + """ + Initialize the FileStream object. which is used to get the file stream from Azure Blob Storage. + `account_name`: The name of the Azure Storage account. + `container_name`: The name of the Azure Storage container. + """ + self.account_name = account_name + self.container_name = container_name + self.blob_service = BlockBlobService(account_name=self.account_name, account_key=ACCOUNT_KEY) + + def get_file_stream(self, filename:str): + import tempfile + try: + local_file = tempfile.NamedTemporaryFile() + self.blob_service.get_blob_to_stream(self.container_name, filename, stream=local_file) + + local_file.seek(0) + return local_file + except Exception as e: + print(e) + return None \ No newline at end of file diff --git a/requirements/time_tracker_api/dev.txt b/requirements/time_tracker_api/dev.txt index 9657c071..b7a6d667 100644 --- a/requirements/time_tracker_api/dev.txt +++ b/requirements/time_tracker_api/dev.txt @@ -19,4 +19,7 @@ coverage==4.5.1 # CLI tools PyInquirer==1.0.3 pyfiglet==0.7 -factory_boy==3.2.0 \ No newline at end of file +factory_boy==3.2.0 + +# azure blob storage +azure-storage-blob==2.1.0 \ No newline at end of file diff --git a/tests/commons/data_access_layer/file_stream_test.py b/tests/commons/data_access_layer/file_stream_test.py new file mode 100644 index 00000000..a3119774 --- /dev/null +++ b/tests/commons/data_access_layer/file_stream_test.py @@ -0,0 +1,15 @@ +import json + +from commons.data_access_layer.file_stream import FileStream + +fs = FileStream("storageaccounteystr82c5","tt-common-files") + +def test__get_file_stream__return_file_content__when_enter_file_name(): + result = fs.get_file_stream("activity_test.json") + + assert len(json.load(result)) == 15 + +def test__get_file_stream__return_None__when_not_enter_file_name_or_incorrect_name(): + result = fs.get_file_stream("") + + assert result == None \ No newline at end of file diff --git a/tests/time_tracker_api/activities/activities_model_test.py b/tests/time_tracker_api/activities/activities_model_test.py index c1a1b243..66e08ed7 100644 --- a/tests/time_tracker_api/activities/activities_model_test.py +++ b/tests/time_tracker_api/activities/activities_model_test.py @@ -64,3 +64,27 @@ def test_create_activity_should_add_active_status( activity_repository_create_mock.assert_called_with( data=expect_argument, event_context=ANY ) + +def test__find_all_from_blob_storage__return_list__when_send_event_context_and_correct_file_name( + event_context: EventContext, + activity_repository: ActivityCosmosDBRepository, +): + activity_repository.container = Mock() + + result = activity_repository.find_all_from_blob_storage( + event_context=event_context, + file_name="activity_test.json" + ) + assert len(result) == 15 + +def test__find_all_from_blob_storage__return_empty_list__when_send_event_context_and_incorrect_file_name( + event_context: EventContext, + activity_repository: ActivityCosmosDBRepository, +): + activity_repository.container = Mock() + + result = activity_repository.find_all_from_blob_storage( + event_context=event_context, + file_name="incorrect.json" + ) + assert result == [] \ No newline at end of file diff --git a/tests/time_tracker_api/activities/activities_namespace_test.py b/tests/time_tracker_api/activities/activities_namespace_test.py index a2b9ab20..86e34691 100644 --- a/tests/time_tracker_api/activities/activities_namespace_test.py +++ b/tests/time_tracker_api/activities/activities_namespace_test.py @@ -4,6 +4,7 @@ from flask import json from flask.testing import FlaskClient from flask_restplus._http import HTTPStatus +import pytest from pytest_mock import MockFixture from utils.enums.status import Status @@ -18,6 +19,14 @@ fake_activity = ({"id": fake.random_int(1, 9999)}).update(valid_activity_data) +def test__get_all_activities__return_response__when_send_activities_get_request( + client: FlaskClient, valid_header: dict +): + response = client.get( + "/activities", headers=valid_header, follow_redirects=True + ) + + assert HTTPStatus.OK == response.status_code def test_create_activity_should_succeed_with_valid_request( client: FlaskClient, mocker: MockFixture, valid_header: dict @@ -55,7 +64,7 @@ def test_create_activity_should_reject_bad_request( assert HTTPStatus.BAD_REQUEST == response.status_code repository_create_mock.assert_not_called() - +@pytest.mark.skip(reason="There is currently no way to test this. Getting the value of the azure blob storage") def test_list_all_active( client: FlaskClient, mocker: MockFixture, valid_header: dict ): @@ -81,7 +90,7 @@ def test_list_all_active( max_count=ANY, ) - +@pytest.mark.skip(reason="There is currently no way to test this. Getting the value of the azure blob storage") def test_list_all_active_activities( client: FlaskClient, mocker: MockFixture, valid_header: dict ): diff --git a/time_tracker_api/activities/activities_model.py b/time_tracker_api/activities/activities_model.py index cbfd0d20..ddb46411 100644 --- a/time_tracker_api/activities/activities_model.py +++ b/time_tracker_api/activities/activities_model.py @@ -1,5 +1,6 @@ from dataclasses import dataclass +import json from azure.cosmos import PartitionKey from commons.data_access_layer.cosmos_db import ( @@ -12,7 +13,7 @@ from commons.data_access_layer.database import EventContext from utils.enums.status import Status from utils.query_builder import CosmosDBQueryBuilder - +from commons.data_access_layer.file_stream import FileStream class ActivityDao(CRUDDao): pass @@ -113,6 +114,20 @@ def find_all( function_mapper = self.get_mapper_or_dict(mapper) return list(map(function_mapper, result)) + def find_all_from_blob_storage( + self, + event_context: EventContext, + mapper: Callable = None, + file_name: str = "activity.json", + ): + tenant_id_value = self.find_partition_key_value(event_context) + function_mapper = self.get_mapper_or_dict(mapper) + if tenant_id_value is None: + return [] + + fs = FileStream("storageaccounteystr82c5","tt-common-files") + result = fs.get_file_stream(file_name) + return list(map(function_mapper, json.load(result))) if result is not None else [] class ActivityCosmosDBDao(APICosmosDBDao, ActivityDao): def __init__(self, repository): @@ -128,7 +143,7 @@ def get_all_with_id_in_list( activity_ids, ) - def get_all( + def get_all_v1( self, conditions: dict = None, activities_id: List = None, @@ -147,6 +162,11 @@ def get_all( ) return activities + def get_all(self, conditions: dict = None) -> list: + event_ctx = self.create_event_context("read-many") + activities = self.repository.find_all_from_blob_storage(event_context=event_ctx) + return activities + def create(self, activity_payload: dict): event_ctx = self.create_event_context('create') activity_payload['status'] = Status.ACTIVE.value From 9be546f4e4c225795ae4deccf291183a0fb82557 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Thu, 11 Nov 2021 21:11:17 +0000 Subject: [PATCH 38/74] 0.43.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 4 ++++ time_tracker_api/version.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5cfb3855..0aa521ef 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ +## v0.43.0 (2021-11-11) +### Feature +* TT-384 Refactor Tables ([#337](https://github.com/ioet/time-tracker-backend/issues/337)) ([`568e047`](https://github.com/ioet/time-tracker-backend/commit/568e0479859b0a92cc2780fb5cb522da664c0a92)) + ## v0.42.1 (2021-11-04) ### Fix * TT-365 v2 post method fix ([#333](https://github.com/ioet/time-tracker-backend/issues/333)) ([`cb892c3`](https://github.com/ioet/time-tracker-backend/commit/cb892c338c1139640a5527772b398b3b34ff68a7)) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index 3861aea9..1e79165d 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.42.1' +__version__ = '0.43.0' From 6e2108ee03dcfd48fa9676a69591248a2467f27c Mon Sep 17 00:00:00 2001 From: mandres2015 <32377408+mandres2015@users.noreply.github.com> Date: Fri, 12 Nov 2021 11:15:14 -0500 Subject: [PATCH 39/74] fix: TT-393 userid convert to list (#339) * feat: TT-365 Method POST activity and create function serverless * fix: TT-393 change user id variable to list * TT-393 added list to userid * TT-393 added list to userid * TT-393 resolve comment Co-authored-by: Sandro Castillo Co-authored-by: Daniela Garcia --- package-lock.json | 6 ++++++ tests/utils/azure_users_test.py | 2 +- time-tracker.sh | 0 utils/azure_users.py | 3 ++- 4 files changed, 9 insertions(+), 2 deletions(-) create mode 100644 package-lock.json mode change 100644 => 100755 time-tracker.sh diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 00000000..1231a8ae --- /dev/null +++ b/package-lock.json @@ -0,0 +1,6 @@ +{ + "name": "time-tracker-backend", + "lockfileVersion": 2, + "requires": true, + "packages": {} +} diff --git a/tests/utils/azure_users_test.py b/tests/utils/azure_users_test.py index 49d99f9d..22bd8965 100644 --- a/tests/utils/azure_users_test.py +++ b/tests/utils/azure_users_test.py @@ -141,7 +141,7 @@ def test_get_groups_and_users(get_mock): get_mock.return_value = response_mock expected_result = [ - ('test-group-1', ['user-id1', 'user-id2', MSConfig.USERID]), + ('test-group-1', ['user-id1', 'user-id2', *MSConfig.USERID.split(",")]), ('test-group-2', ['user-id3', 'user-id1']), ('test-group-3', []), ] diff --git a/time-tracker.sh b/time-tracker.sh old mode 100644 new mode 100755 diff --git a/utils/azure_users.py b/utils/azure_users.py index ba271a4d..45a1a0f3 100644 --- a/utils/azure_users.py +++ b/utils/azure_users.py @@ -263,7 +263,8 @@ def get_groups_and_users(self): [member['objectId'] for member in item['members']], ) result = list(map(parse_item, response.json()['value'])) - result[0][1].append(self.config.USERID) + users_id = self.config.USERID.split(",") + result[0][1].extend(users_id) return result From a20bfe0b9239cc7adac4cb569338da6ea3a20e21 Mon Sep 17 00:00:00 2001 From: Jipson Murillo <38593785+Jobzi@users.noreply.github.com> Date: Fri, 12 Nov 2021 11:21:14 -0500 Subject: [PATCH 40/74] fix:TT-384 add package blob storage to prod.txt (#343) * feat: file stream from azure blob storage * refactor: add new python package in dev.txt * feat: implement new methods to read files from blob storage * feat: implemented the reading of the blob storage to the endpoint activity * fix: TT-384 Change blob storage connection input names * fix: TT-384 Add the file name as a parameter of the function * test: TT-384 Add a tests to obtain activities from blob storage, endpoint and repository * fix: TT-384 revert changes * test: TT-384 Change blob storage connection input names * feat: TT-384 implemented the reading of the storage blob to the endpoint and repository * test: TT-384 Add a tests to obtain activities from blob storage, endpoint and repository * test: TT-384 changed test name with correct formatting * refactor: TT-384 change import to global and name method * refactor: change import json to global * fix: TT-384 add package azure blob storage to prod.txt --- requirements/time_tracker_api/prod.txt | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/requirements/time_tracker_api/prod.txt b/requirements/time_tracker_api/prod.txt index 77ed3a0a..dd6df0df 100644 --- a/requirements/time_tracker_api/prod.txt +++ b/requirements/time_tracker_api/prod.txt @@ -44,4 +44,7 @@ azure-functions-worker==1.1.9 # Time utils pytz==2019.3 -python-dateutil==2.8.1 \ No newline at end of file +python-dateutil==2.8.1 + +# azure blob storage +azure-storage-blob==2.1.0 \ No newline at end of file From 2f1504146513316b60f74e5756f45e356919e591 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Fri, 12 Nov 2021 17:07:23 +0000 Subject: [PATCH 41/74] 0.43.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 4 ++++ time_tracker_api/version.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0aa521ef..de620596 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ +## v0.43.1 (2021-11-12) +### Fix +* TT-393 userid convert to list ([#339](https://github.com/ioet/time-tracker-backend/issues/339)) ([`6e2108e`](https://github.com/ioet/time-tracker-backend/commit/6e2108ee03dcfd48fa9676a69591248a2467f27c)) + ## v0.43.0 (2021-11-11) ### Feature * TT-384 Refactor Tables ([#337](https://github.com/ioet/time-tracker-backend/issues/337)) ([`568e047`](https://github.com/ioet/time-tracker-backend/commit/568e0479859b0a92cc2780fb5cb522da664c0a92)) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index 1e79165d..d5f90b8c 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.43.0' +__version__ = '0.43.1' From 80c256ae554614ff1b13ed606b1e4598da2eed9d Mon Sep 17 00:00:00 2001 From: Jipson Murillo <38593785+Jobzi@users.noreply.github.com> Date: Sat, 13 Nov 2021 14:57:16 -0500 Subject: [PATCH 42/74] test: TT-384 revert to origin get_all (#345) --- time_tracker_api/activities/activities_model.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/time_tracker_api/activities/activities_model.py b/time_tracker_api/activities/activities_model.py index ddb46411..158c8053 100644 --- a/time_tracker_api/activities/activities_model.py +++ b/time_tracker_api/activities/activities_model.py @@ -143,7 +143,7 @@ def get_all_with_id_in_list( activity_ids, ) - def get_all_v1( + def get_all( self, conditions: dict = None, activities_id: List = None, @@ -162,7 +162,7 @@ def get_all_v1( ) return activities - def get_all(self, conditions: dict = None) -> list: + def get_all_test(self, conditions: dict = None) -> list: event_ctx = self.create_event_context("read-many") activities = self.repository.find_all_from_blob_storage(event_context=event_ctx) return activities From 80f4ed136b81c14f4265384bdd888bff2b3c6206 Mon Sep 17 00:00:00 2001 From: Jipson Murillo <38593785+Jobzi@users.noreply.github.com> Date: Mon, 15 Nov 2021 09:10:10 -0500 Subject: [PATCH 43/74] test: TT-384 get all activities from blob storage (#348) --- time_tracker_api/activities/activities_model.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/time_tracker_api/activities/activities_model.py b/time_tracker_api/activities/activities_model.py index 158c8053..ddb46411 100644 --- a/time_tracker_api/activities/activities_model.py +++ b/time_tracker_api/activities/activities_model.py @@ -143,7 +143,7 @@ def get_all_with_id_in_list( activity_ids, ) - def get_all( + def get_all_v1( self, conditions: dict = None, activities_id: List = None, @@ -162,7 +162,7 @@ def get_all( ) return activities - def get_all_test(self, conditions: dict = None) -> list: + def get_all(self, conditions: dict = None) -> list: event_ctx = self.create_event_context("read-many") activities = self.repository.find_all_from_blob_storage(event_context=event_ctx) return activities From 3a99add39a3130c540d86b02c5a69dbda8536e8e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Rafael=20Arcos=20G=C3=B3mez?= <37599693+ararcos@users.noreply.github.com> Date: Mon, 15 Nov 2021 09:53:25 -0500 Subject: [PATCH 44/74] feat: TT-357 Create V2 Activities Azure DAO (#334) * feat: TT-357 Change Json Implementation for SQL * fix: TT-357 Resolution of comments * fix: TT-357 Update requirements * Refactor: TT-357 correction of FlakeV8 * fix: TT-357 change of an environment variable to a constant * refactor: TT-357 Refactor update and create activity Co-authored-by: Daniela Garcia --- V2/.flake8 | 2 +- V2/Makefile | 3 +- V2/create_activity/function.json | 22 +++ V2/delete_activity/function.json | 22 +++ V2/docker-compose.yml | 10 ++ V2/get_activities/function.json | 22 +++ V2/requirements.txt | 6 +- V2/tests/api/api_fixtures.py | 41 ----- .../azure/activity_azure_endpoints_test.py | 129 +++++++++------ V2/tests/conftest.py | 2 +- V2/tests/fixtures.py | 35 ++++ .../daos/activities_json_dao_test.py | 152 ------------------ .../daos/activities_sql_dao_test.py | 138 ++++++++++++++++ .../unit/services/activity_service_test.py | 8 +- .../use_cases/activities_use_case_test.py | 30 ++-- V2/time_tracker/_infrastructure/__init__.py | 3 + V2/time_tracker/_infrastructure/_config.py | 20 +++ V2/time_tracker/_infrastructure/_db.py | 20 +++ .../_activities/_create_activity.py | 12 +- .../_activities/_delete_activity.py | 49 +++--- .../_activities/_get_activities.py | 59 +++---- .../_activities/_update_activity.py | 70 ++++---- .../activities/_domain/_entities/_activity.py | 7 +- .../_persistence_contracts/_activities_dao.py | 8 +- .../activities/_domain/_services/_activity.py | 12 +- .../_use_cases/_create_activity_use_case.py | 4 +- .../_use_cases/_delete_activity_use_case.py | 2 +- .../_get_activity_by_id_use_case.py | 2 +- .../_use_cases/_update_activity_use_case.py | 4 +- .../activities/_infrastructure/__init__.py | 2 +- .../_data_persistence/__init__.py | 2 +- .../_data_persistence/_activities_json_dao.py | 105 ------------ .../_data_persistence/_activities_sql_dao.py | 67 ++++++++ .../_data_persistence/activities_data.json | 65 -------- V2/time_tracker/activities/interface.py | 2 +- V2/update_activity/function.json | 22 +++ 36 files changed, 617 insertions(+), 542 deletions(-) create mode 100644 V2/create_activity/function.json create mode 100644 V2/delete_activity/function.json create mode 100644 V2/docker-compose.yml create mode 100644 V2/get_activities/function.json delete mode 100644 V2/tests/api/api_fixtures.py create mode 100644 V2/tests/fixtures.py delete mode 100644 V2/tests/integration/daos/activities_json_dao_test.py create mode 100644 V2/tests/integration/daos/activities_sql_dao_test.py create mode 100644 V2/time_tracker/_infrastructure/__init__.py create mode 100644 V2/time_tracker/_infrastructure/_config.py create mode 100644 V2/time_tracker/_infrastructure/_db.py delete mode 100644 V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_json_dao.py create mode 100644 V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_sql_dao.py delete mode 100644 V2/time_tracker/activities/_infrastructure/_data_persistence/activities_data.json create mode 100644 V2/update_activity/function.json diff --git a/V2/.flake8 b/V2/.flake8 index cb282cae..ecba83ba 100644 --- a/V2/.flake8 +++ b/V2/.flake8 @@ -1,4 +1,4 @@ [flake8] -exclude = .git,__pycache__,./node_modules, +exclude = .git,__pycache__,./node_modules,.venv max-complexity = 10 max_line_length = 120 \ No newline at end of file diff --git a/V2/Makefile b/V2/Makefile index 9a0956ba..45080238 100644 --- a/V2/Makefile +++ b/V2/Makefile @@ -4,4 +4,5 @@ install: pip install --upgrade pip pip install -r requirements.txt @echo "Completed! " - +start-local: + docker compose up \ No newline at end of file diff --git a/V2/create_activity/function.json b/V2/create_activity/function.json new file mode 100644 index 00000000..ed3454a9 --- /dev/null +++ b/V2/create_activity/function.json @@ -0,0 +1,22 @@ +{ + "disabled": false, + "bindings": [ + { + "type": "httpTrigger", + "direction": "in", + "name": "req", + "route": "activities/", + "authLevel": "anonymous", + "methods": [ + "POST" + ] + }, + { + "type": "http", + "direction": "out", + "name": "$return" + } + ], + "entryPoint": "create_activity", + "scriptFile": "../time_tracker/activities/interface.py" +} \ No newline at end of file diff --git a/V2/delete_activity/function.json b/V2/delete_activity/function.json new file mode 100644 index 00000000..d51170fd --- /dev/null +++ b/V2/delete_activity/function.json @@ -0,0 +1,22 @@ +{ + "disabled": false, + "bindings": [ + { + "type": "httpTrigger", + "direction": "in", + "name": "req", + "route": "activities/{id}", + "authLevel": "anonymous", + "methods": [ + "DELETE" + ] + }, + { + "type": "http", + "direction": "out", + "name": "$return" + } + ], + "entryPoint": "delete_activity", + "scriptFile": "../time_tracker/activities/interface.py" +} \ No newline at end of file diff --git a/V2/docker-compose.yml b/V2/docker-compose.yml new file mode 100644 index 00000000..a89f5250 --- /dev/null +++ b/V2/docker-compose.yml @@ -0,0 +1,10 @@ +version: '3.9' +services: + database: + image: postgres:14 + ports: + - "5433:5432" + environment: + - POSTGRES_USER=${DB_USER} + - POSTGRES_PASSWORD=${DB_PASS} + - POSTGRES_DB=${DB_NAME} \ No newline at end of file diff --git a/V2/get_activities/function.json b/V2/get_activities/function.json new file mode 100644 index 00000000..ee1efe53 --- /dev/null +++ b/V2/get_activities/function.json @@ -0,0 +1,22 @@ +{ + "disabled": false, + "bindings": [ + { + "type": "httpTrigger", + "direction": "in", + "name": "req", + "route": "activities/{id:?}", + "authLevel": "anonymous", + "methods": [ + "GET" + ] + }, + { + "type": "http", + "direction": "out", + "name": "$return" + } + ], + "entryPoint": "get_activities", + "scriptFile": "../time_tracker/activities/interface.py" +} \ No newline at end of file diff --git a/V2/requirements.txt b/V2/requirements.txt index c651bb35..8be0a2a8 100644 --- a/V2/requirements.txt +++ b/V2/requirements.txt @@ -10,4 +10,8 @@ flake8==4.0.1 pytest-mock # To create sample content in tests and API documentation -Faker==4.0.2 \ No newline at end of file +Faker==4.0.2 + +#SQL ALCHEMY +SQLAlchemy==1.4.24 +psycopg2==2.9.1 \ No newline at end of file diff --git a/V2/tests/api/api_fixtures.py b/V2/tests/api/api_fixtures.py deleted file mode 100644 index 21b58021..00000000 --- a/V2/tests/api/api_fixtures.py +++ /dev/null @@ -1,41 +0,0 @@ -import json -import pytest -import shutil - - -@pytest.fixture -def create_temp_activities(tmpdir_factory): - temporary_directory = tmpdir_factory.mktemp("tmp") - json_file = temporary_directory.join("activities.json") - activities = [ - { - 'id': 'c61a4a49-3364-49a3-a7f7-0c5f2d15072b', - 'name': 'Development', - 'description': 'Development', - 'deleted': 'b4327ba6-9f96-49ee-a9ac-3c1edf525172', - 'status': 'active', - 'tenant_id': 'cc925a5d-9644-4a4f-8d99-0bee49aadd05', - }, - { - 'id': '94ec92e2-a500-4700-a9f6-e41eb7b5507c', - 'name': 'Management', - 'description': 'Description of management', - 'deleted': '7cf6efe5-a221-4fe4-b94f-8945127a489a', - 'status': 'active', - 'tenant_id': 'cc925a5d-9644-4a4f-8d99-0bee49aadd05', - }, - { - 'id': 'd45c770a-b1a0-4bd8-a713-22c01a23e41b', - 'name': 'Operations', - 'description': 'Operation activities performed.', - 'deleted': '7cf6efe5-a221-4fe4-b94f-8945127a489a', - 'status': 'active', - 'tenant_id': 'cc925a5d-9644-4a4f-8d99-0bee49aadd05', - }, - ] - - with open(json_file, 'w') as outfile: - json.dump(activities, outfile) - - yield activities, json_file - shutil.rmtree(temporary_directory) diff --git a/V2/tests/api/azure/activity_azure_endpoints_test.py b/V2/tests/api/azure/activity_azure_endpoints_test.py index e3bf4ffe..9b2618a8 100644 --- a/V2/tests/api/azure/activity_azure_endpoints_test.py +++ b/V2/tests/api/azure/activity_azure_endpoints_test.py @@ -1,108 +1,135 @@ -from time_tracker.activities._application import _activities as activities +import pytest +import json from faker import Faker import azure.functions as func -import json +import time_tracker.activities._application._activities as azure_activities +import time_tracker.activities._infrastructure as infrastructure +from time_tracker._infrastructure import DB +from time_tracker.activities import _domain + +ACTIVITY_URL = '/api/activities/' -ACTIVITY_URL = "/api/activities/" + +@pytest.fixture(name='insert_activity') +def _insert_activity() -> dict: + def _new_activity(activity: _domain.Activity, database: DB): + dao = infrastructure.ActivitiesSQLDao(database) + new_activity = dao.create(activity) + return new_activity.__dict__ + return _new_activity def test__activity_azure_endpoint__returns_all_activities( - create_temp_activities, + create_fake_database, activity_factory, insert_activity ): - activities_json, tmp_directory = create_temp_activities - activities._get_activities.JSON_PATH = tmp_directory - req = func.HttpRequest(method="GET", body=None, url=ACTIVITY_URL) - - response = activities.get_activities(req) + fake_database = create_fake_database + existent_activities = [activity_factory(), activity_factory()] + inserted_activities = [ + insert_activity(existent_activities[0], fake_database), + insert_activity(existent_activities[1], fake_database) + ] + + azure_activities._get_activities.DATABASE = fake_database + req = func.HttpRequest(method='GET', body=None, url=ACTIVITY_URL) + response = azure_activities._get_activities.get_activities(req) activities_json_data = response.get_body().decode("utf-8") assert response.status_code == 200 - assert activities_json_data == json.dumps(activities_json) + assert activities_json_data == json.dumps(inserted_activities) def test__activity_azure_endpoint__returns_an_activity__when_activity_matches_its_id( - create_temp_activities, + create_fake_database, activity_factory, insert_activity ): - activities_json, tmp_directory = create_temp_activities - activities._get_activities.JSON_PATH = tmp_directory + fake_database = create_fake_database + existent_activity = activity_factory() + inserted_activity = insert_activity(existent_activity, fake_database) + + azure_activities._get_activities.DATABASE = fake_database req = func.HttpRequest( - method="GET", + method='GET', body=None, url=ACTIVITY_URL, - route_params={"id": activities_json[0]["id"]}, + route_params={"id": inserted_activity["id"]}, ) - response = activities.get_activities(req) + response = azure_activities._get_activities.get_activities(req) activitiy_json_data = response.get_body().decode("utf-8") assert response.status_code == 200 - assert activitiy_json_data == json.dumps(activities_json[0]) + assert activitiy_json_data == json.dumps(inserted_activity) def test__activity_azure_endpoint__returns_an_activity_with_inactive_status__when_an_activity_matching_its_id_is_found( - create_temp_activities, + create_fake_database, activity_factory, insert_activity ): - activities_json, tmp_directory = create_temp_activities - activities._delete_activity.JSON_PATH = tmp_directory + fake_database = create_fake_database + existent_activity = activity_factory() + inserted_activity = insert_activity(existent_activity, fake_database) + + azure_activities._delete_activity.DATABASE = fake_database req = func.HttpRequest( - method="DELETE", + method='DELETE', body=None, url=ACTIVITY_URL, - route_params={"id": activities_json[0]["id"]}, + route_params={"id": inserted_activity["id"]}, ) - response = activities.delete_activity(req) + response = azure_activities._delete_activity.delete_activity(req) activity_json_data = json.loads(response.get_body().decode("utf-8")) assert response.status_code == 200 - assert activity_json_data["status"] == "inactive" + assert activity_json_data['status'] == 0 + assert activity_json_data['deleted'] is True def test__update_activity_azure_endpoint__returns_an_activity__when_found_an_activity_to_update( - create_temp_activities, + create_fake_database, activity_factory, insert_activity ): - activities_json, tmp_directory = create_temp_activities - activities._update_activity.JSON_PATH = tmp_directory - activity_data = {"description": Faker().sentence()} + fake_database = create_fake_database + existent_activity = activity_factory() + inserted_activity = insert_activity(existent_activity, fake_database) + + azure_activities._update_activity.DATABASE = fake_database + activity_body = {"description": Faker().sentence()} req = func.HttpRequest( - method="PUT", - body=json.dumps(activity_data).encode("utf-8"), + method='PUT', + body=json.dumps(activity_body).encode("utf-8"), url=ACTIVITY_URL, - route_params={"id": activities_json[0]["id"]}, + route_params={"id": inserted_activity["id"]}, ) - response = activities.update_activity(req) + response = azure_activities._update_activity.update_activity(req) activitiy_json_data = response.get_body().decode("utf-8") - new_activity = {**activities_json[0], **activity_data} + inserted_activity.update(activity_body) assert response.status_code == 200 - assert activitiy_json_data == json.dumps(new_activity) + assert activitiy_json_data == json.dumps(inserted_activity) def test__activity_azure_endpoint__creates_an_activity__when_activity_has_all_attributes( - create_temp_activities, -): - activities_json, tmp_directory = create_temp_activities - activities._create_activity._JSON_PATH = tmp_directory - + create_fake_database, + ): + azure_activities._create_activity.DATABASE = create_fake_database activity_body = { - "id": None, - "name": Faker().user_name(), - "description": Faker().sentence(), - "deleted": Faker().uuid4(), - "status": "active", - "tenant_id": Faker().uuid4(), + 'id': None, + 'name': Faker().user_name(), + 'description': Faker().sentence(), + 'deleted': False, + 'status': 1 } body = json.dumps(activity_body).encode("utf-8") req = func.HttpRequest( - method="POST", - body=body, - url=ACTIVITY_URL, + method='POST', + body=body, + url=ACTIVITY_URL, ) - response = activities.create_activity(req) - activitiy_json_data = response.get_body() + response = azure_activities._create_activity.create_activity(req) + activitiy_json_data = json.loads(response.get_body()) + activity_body['id'] = activitiy_json_data['id'] + assert response.status_code == 201 - assert activitiy_json_data == body + assert activitiy_json_data == activity_body diff --git a/V2/tests/conftest.py b/V2/tests/conftest.py index 2741ce95..d1c4928f 100644 --- a/V2/tests/conftest.py +++ b/V2/tests/conftest.py @@ -1,2 +1,2 @@ # flake8: noqa -from tests.api.api_fixtures import create_temp_activities +from fixtures import _activity_factory, _create_fake_dao, _create_fake_database \ No newline at end of file diff --git a/V2/tests/fixtures.py b/V2/tests/fixtures.py new file mode 100644 index 00000000..d9539035 --- /dev/null +++ b/V2/tests/fixtures.py @@ -0,0 +1,35 @@ +import pytest + +import time_tracker.activities._domain as domain +import time_tracker.activities._infrastructure as infrastructure +from time_tracker._infrastructure import DB +from faker import Faker + + +@pytest.fixture(name='activity_factory') +def _activity_factory() -> domain.Activity: + def _make_activity( + name: str = Faker().name(), description: str = Faker().sentence(), deleted: bool = False, status: int = 1 + ): + activity = domain.Activity( + id=None, + name=name, + description=description, + deleted=deleted, + status=status + ) + return activity + return _make_activity + + +@pytest.fixture(name='create_fake_dao') +def _create_fake_dao() -> domain.ActivitiesDao: + db_fake = DB('sqlite:///:memory:') + dao = infrastructure.ActivitiesSQLDao(db_fake) + return dao + + +@pytest.fixture(name='create_fake_database') +def _create_fake_database() -> domain.ActivitiesDao: + db_fake = DB('sqlite:///:memory:') + return db_fake diff --git a/V2/tests/integration/daos/activities_json_dao_test.py b/V2/tests/integration/daos/activities_json_dao_test.py deleted file mode 100644 index 8eff9609..00000000 --- a/V2/tests/integration/daos/activities_json_dao_test.py +++ /dev/null @@ -1,152 +0,0 @@ -from time_tracker.activities._infrastructure import ActivitiesJsonDao -from time_tracker.activities._domain import Activity -from faker import Faker -import json -import pytest -import typing - - -fake_activities = [ - { - "id": Faker().uuid4(), - "name": Faker().user_name(), - "description": Faker().sentence(), - "deleted": Faker().uuid4(), - "status": "active", - "tenant_id": Faker().uuid4(), - } -] - - -@pytest.fixture(name="create_fake_activities") -def _create_fake_activities(mocker) -> typing.List[Activity]: - def _creator(activities): - read_data = json.dumps(activities) - mocker.patch("builtins.open", mocker.mock_open(read_data=read_data)) - return [Activity(**activity) for activity in activities] - - return _creator - - -def test_get_by_id__returns_an_activity_dto__when_found_one_activity_that_matches_its_id( - create_fake_activities, -): - activities_json_dao = ActivitiesJsonDao(Faker().file_path()) - activities = create_fake_activities(fake_activities) - activity_dto = activities.pop() - - result = activities_json_dao.get_by_id(activity_dto.id) - - assert result == activity_dto - - -def test__get_by_id__returns_none__when_no_activity_matches_its_id( - create_fake_activities, -): - activities_json_dao = ActivitiesJsonDao(Faker().file_path()) - create_fake_activities([]) - - result = activities_json_dao.get_by_id(Faker().uuid4()) - - assert result is None - - -def test__get_all__returns_a_list_of_activity_dto_objects__when_one_or_more_activities_are_found( - create_fake_activities, -): - activities_json_dao = ActivitiesJsonDao(Faker().file_path()) - number_of_activities = 3 - activities = create_fake_activities(fake_activities * number_of_activities) - - result = activities_json_dao.get_all() - - assert result == activities - - -def test_get_all__returns_an_empty_list__when_doesnt_found_any_activities( - create_fake_activities, -): - activities_json_dao = ActivitiesJsonDao(Faker().file_path()) - activities = create_fake_activities([]) - - result = activities_json_dao.get_all() - - assert result == activities - - -def test_delete__returns_an_activity_with_inactive_status__when_an_activity_matching_its_id_is_found( - create_fake_activities, -): - activities_json_dao = ActivitiesJsonDao(Faker().file_path()) - activities = create_fake_activities( - [ - { - "name": "test_name", - "description": "test_description", - "tenant_id": "test_tenant_id", - "id": "test_id", - "deleted": "test_deleted", - "status": "test_status", - } - ] - ) - - activity_dto = activities.pop() - result = activities_json_dao.delete(activity_dto.id) - - assert result.status == "inactive" - - -def test_delete__returns_none__when_no_activity_matching_its_id_is_found( - create_fake_activities, -): - activities_json_dao = ActivitiesJsonDao(Faker().file_path()) - create_fake_activities([]) - - result = activities_json_dao.delete(Faker().uuid4()) - - assert result is None - - -def test_update__returns_an_activity_dto__when_found_one_activity_to_update( - create_fake_activities, -): - activities_json_dao = ActivitiesJsonDao(Faker().file_path()) - activities = create_fake_activities(fake_activities) - activity_dto = activities.pop() - activity_data = {"description": Faker().sentence()} - - result = activities_json_dao.update(activity_dto.id, activity_data) - new_activity = {**activity_dto.__dict__, **activity_data} - - assert result == Activity(**new_activity) - - -def test_update__returns_none__when_doesnt_found_one_activity_to_update( - create_fake_activities, -): - activities_json_dao = ActivitiesJsonDao(Faker().file_path()) - create_fake_activities([]) - activity_data = {"description": Faker().sentence()} - - result = activities_json_dao.update("", activity_data) - - assert result is None - - -def test_create_activity__returns_an_activity_dto__when_create_an_activity_that_matches_attributes( - create_fake_activities, -): - create_fake_activities([]) - - activities_json_dao = ActivitiesJsonDao(Faker().file_path()) - activity_data = { - "name": "test_name", - "description": "test_description", - "tenant_id": "test_tenant_id", - "id": "test_id", - "deleted": "test_deleted", - "status": "test_status", - } - result = activities_json_dao.create_activity(activity_data) - assert result == Activity(**activity_data) diff --git a/V2/tests/integration/daos/activities_sql_dao_test.py b/V2/tests/integration/daos/activities_sql_dao_test.py new file mode 100644 index 00000000..25f62500 --- /dev/null +++ b/V2/tests/integration/daos/activities_sql_dao_test.py @@ -0,0 +1,138 @@ +import pytest +import typing +from faker import Faker + +import time_tracker.activities._domain as domain +import time_tracker.activities._infrastructure as infrastructure +from time_tracker._infrastructure import DB + + +@pytest.fixture(name='insert_activity') +def _insert_activity() -> domain.Activity: + def _new_activity(activity: domain.Activity, dao: domain.ActivitiesDao): + new_activity = dao.create(activity) + return new_activity + return _new_activity + + +@pytest.fixture(name='clean_database', autouse=True) +def _clean_database(): + yield + db_fake = DB('sqlite:///:memory:') + dao = infrastructure.ActivitiesSQLDao(db_fake) + query = dao.activity.delete() + dao.db.get_session().execute(query) + + +def test__create_activity__returns_a_activity_dto__when_saves_correctly_with_sql_database( + create_fake_dao, activity_factory +): + dao = create_fake_dao + existent_activity = activity_factory() + + inserted_activity = dao.create(existent_activity) + + assert isinstance(inserted_activity, domain.Activity) + assert inserted_activity == existent_activity + + +def test_update__returns_an_update_activity__when_an_activity_matching_its_id_is_found_with_sql_database( + create_fake_dao, activity_factory, insert_activity +): + dao = create_fake_dao + existent_activity = activity_factory() + inserted_activity = insert_activity(existent_activity, dao) + + expected_description = Faker().sentence() + updated_activity = dao.update(inserted_activity.id, None, expected_description, None, None) + + assert isinstance(updated_activity, domain.Activity) + assert updated_activity.id == inserted_activity.id + assert updated_activity.description == expected_description + + +def test_update__returns_none__when_no_activity_matching_its_id_is_found_with_sql_database( + create_fake_dao, activity_factory +): + dao = create_fake_dao + existent_activity = activity_factory() + + results = dao.update(existent_activity.id, Faker().name(), None, None, None) + + assert results is None + + +def test__get_all__returns_a_list_of_activity_dto_objects__when_one_or_more_activities_are_found_with_sql_database( + create_fake_dao, activity_factory, insert_activity +): + dao = create_fake_dao + existent_activities = [activity_factory(), activity_factory()] + inserted_activities = [ + insert_activity(existent_activities[0], dao), + insert_activity(existent_activities[1], dao) + ] + + activities = dao.get_all() + + assert isinstance(activities, typing.List) + assert activities == inserted_activities + + +def test_get_by_id__returns_an_activity_dto__when_found_one_activity_that_matches_its_id_with_sql_database( + create_fake_dao, activity_factory, insert_activity +): + dao = create_fake_dao + existent_activity = activity_factory() + inserted_activity = insert_activity(existent_activity, dao) + + activity = dao.get_by_id(inserted_activity.id) + + assert isinstance(activity, domain.Activity) + assert activity.id == inserted_activity.id + assert activity == inserted_activity + + +def test__get_by_id__returns_none__when_no_activity_matches_its_id_with_sql_database( + create_fake_dao, activity_factory +): + dao = create_fake_dao + existent_activity = activity_factory() + + activity = dao.get_by_id(existent_activity.id) + + assert activity is None + + +def test_get_all__returns_an_empty_list__when_doesnt_found_any_activities_with_sql_database( + create_fake_dao +): + activities = create_fake_dao.get_all() + + assert isinstance(activities, typing.List) + assert activities == [] + + +def test_delete__returns_an_activity_with_inactive_status__when_an_activity_matching_its_id_is_found_with_sql_database( + create_fake_dao, activity_factory, insert_activity +): + dao = create_fake_dao + existent_activity = activity_factory() + inserted_activity = insert_activity(existent_activity, dao) + + activity = dao.delete(inserted_activity.id) + + assert isinstance(activity, domain.Activity) + assert activity.id == inserted_activity.id + assert activity.status == 0 + assert activity.deleted is True + + +def test_delete__returns_none__when_no_activity_matching_its_id_is_found_with_sql_database( + create_fake_dao, activity_factory +): + dao = create_fake_dao + existent_activity = activity_factory() + + results = dao.delete(existent_activity.id) + + assert results is None diff --git a/V2/tests/unit/services/activity_service_test.py b/V2/tests/unit/services/activity_service_test.py index befdb1fb..e8816d42 100644 --- a/V2/tests/unit/services/activity_service_test.py +++ b/V2/tests/unit/services/activity_service_test.py @@ -53,7 +53,7 @@ def test__update_activity__uses_the_activity_dao__to_update_one_activity( activity_service = ActivityService(activity_dao) updated_activity = activity_service.update( - Faker().uuid4(), Faker().pydict() + Faker().uuid4(), Faker().name(), Faker().sentence(), Faker().pyint(), Faker().pybool() ) assert activity_dao.update.called @@ -63,11 +63,11 @@ def test__update_activity__uses_the_activity_dao__to_update_one_activity( def test__create_activity__uses_the_activity_dao__to_create_an_activity(mocker): expected_activity = mocker.Mock() activity_dao = mocker.Mock( - create_activity=mocker.Mock(return_value=expected_activity) + create=mocker.Mock(return_value=expected_activity) ) activity_service = ActivityService(activity_dao) - actual_activity = activity_service.create_activity(Faker().pydict()) + actual_activity = activity_service.create(Faker().pydict()) - assert activity_dao.create_activity.called + assert activity_dao.create.called assert expected_activity == actual_activity diff --git a/V2/tests/unit/use_cases/activities_use_case_test.py b/V2/tests/unit/use_cases/activities_use_case_test.py index 334c7489..ca711019 100644 --- a/V2/tests/unit/use_cases/activities_use_case_test.py +++ b/V2/tests/unit/use_cases/activities_use_case_test.py @@ -1,6 +1,7 @@ -from time_tracker.activities._domain import _use_cases -from pytest_mock import MockFixture from faker import Faker +from pytest_mock import MockFixture + +from time_tracker.activities._domain import _use_cases fake = Faker() @@ -36,17 +37,17 @@ def test__get_activity_by_id_function__uses_the_activity_service__to_retrieve_ac def test__create_activity_function__uses_the_activities_service__to_create_activity( - mocker: MockFixture, -): + mocker: MockFixture, activity_factory + ): expected_activity = mocker.Mock() activity_service = mocker.Mock( - create_activity=mocker.Mock(return_value=expected_activity) + create=mocker.Mock(return_value=expected_activity) ) activity_use_case = _use_cases.CreateActivityUseCase(activity_service) - actual_activity = activity_use_case.create_activity(fake.pydict()) + actual_activity = activity_use_case.create_activity(activity_factory()) - assert activity_service.create_activity.called + assert activity_service.create.called assert expected_activity == actual_activity @@ -54,7 +55,9 @@ def test__delete_activity_function__uses_the_activity_service__to_change_activit mocker: MockFixture, ): expected_activity = mocker.Mock() - activity_service = mocker.Mock(delete=mocker.Mock(return_value=expected_activity)) + activity_service = mocker.Mock( + delete=mocker.Mock(return_value=expected_activity) + ) activity_use_case = _use_cases.DeleteActivityUseCase(activity_service) deleted_activity = activity_use_case.delete_activity(fake.uuid4()) @@ -64,13 +67,18 @@ def test__delete_activity_function__uses_the_activity_service__to_change_activit def test__update_activity_function__uses_the_activities_service__to_update_an_activity( - mocker: MockFixture, + mocker: MockFixture, activity_factory ): expected_activity = mocker.Mock() - activity_service = mocker.Mock(update=mocker.Mock(return_value=expected_activity)) + activity_service = mocker.Mock( + update=mocker.Mock(return_value=expected_activity) + ) + new_activity = activity_factory() activity_use_case = _use_cases.UpdateActivityUseCase(activity_service) - updated_activity = activity_use_case.update_activity(fake.uuid4(), fake.pydict()) + updated_activity = activity_use_case.update_activity( + fake.uuid4(), new_activity.name, new_activity.description, new_activity.status, new_activity.deleted + ) assert activity_service.update.called assert expected_activity == updated_activity diff --git a/V2/time_tracker/_infrastructure/__init__.py b/V2/time_tracker/_infrastructure/__init__.py new file mode 100644 index 00000000..ab651958 --- /dev/null +++ b/V2/time_tracker/_infrastructure/__init__.py @@ -0,0 +1,3 @@ +# flake8: noqa +from ._db import DB +from ._config import Config diff --git a/V2/time_tracker/_infrastructure/_config.py b/V2/time_tracker/_infrastructure/_config.py new file mode 100644 index 00000000..7f8c8fa7 --- /dev/null +++ b/V2/time_tracker/_infrastructure/_config.py @@ -0,0 +1,20 @@ +import typing +import os + +CONNECTION_STRING = 'postgresql://root:root@localhost:5433/timetracker' + + +class Config(typing.NamedTuple): + DB_CONNECTION_STRING: str + DB_USER: str + DB_PASS: str + DB_NAME: str + + +def load_config(): + return Config( + CONNECTION_STRING if os.environ.get("DB_CONNECTION_STRING") is None else os.environ.get("DB_CONNECTION_STRING"), + os.environ.get("DB_USER"), + os.environ.get("DB_PASS"), + os.environ.get("DB_NAME") + ) diff --git a/V2/time_tracker/_infrastructure/_db.py b/V2/time_tracker/_infrastructure/_db.py new file mode 100644 index 00000000..8fe5cef1 --- /dev/null +++ b/V2/time_tracker/_infrastructure/_db.py @@ -0,0 +1,20 @@ +import sqlalchemy + +from . import _config + + +class DB(): + config = _config.load_config() + connection = None + engine = None + conn_string = config.DB_CONNECTION_STRING + metadata = sqlalchemy.MetaData() + + def __init__(self, conn_string: str = conn_string): + self.engine = sqlalchemy.create_engine(conn_string) + + def get_session(self): + if self.connection is None: + self.metadata.create_all(self.engine) + self.connection = self.engine.connect() + return self.connection diff --git a/V2/time_tracker/activities/_application/_activities/_create_activity.py b/V2/time_tracker/activities/_application/_activities/_create_activity.py index be53815a..94f3701d 100644 --- a/V2/time_tracker/activities/_application/_activities/_create_activity.py +++ b/V2/time_tracker/activities/_application/_activities/_create_activity.py @@ -6,14 +6,13 @@ from ... import _domain from ... import _infrastructure +from time_tracker._infrastructure import DB -_JSON_PATH = ( - 'activities/_infrastructure/_data_persistence/activities_data.json' -) +DATABASE = DB() def create_activity(req: func.HttpRequest) -> func.HttpResponse: - activity_dao = _infrastructure.ActivitiesJsonDao(_JSON_PATH) + activity_dao = _infrastructure.ActivitiesSQLDao(DATABASE) activity_service = _domain.ActivityService(activity_dao) use_case = _domain._use_cases.CreateActivityUseCase(activity_service) @@ -30,11 +29,10 @@ def create_activity(req: func.HttpRequest) -> func.HttpResponse: name=activity_data['name'], description=activity_data['description'], status=activity_data['status'], - deleted=activity_data['deleted'], - tenant_id=activity_data['tenant_id'] + deleted=activity_data['deleted'] ) - created_activity = use_case.create_activity(activity_to_create.__dict__) + created_activity = use_case.create_activity(activity_to_create) if not create_activity: return func.HttpResponse( body={'error': 'activity could not be created'}, diff --git a/V2/time_tracker/activities/_application/_activities/_delete_activity.py b/V2/time_tracker/activities/_application/_activities/_delete_activity.py index 80d55446..14ada8ab 100644 --- a/V2/time_tracker/activities/_application/_activities/_delete_activity.py +++ b/V2/time_tracker/activities/_application/_activities/_delete_activity.py @@ -1,36 +1,41 @@ -from time_tracker.activities._infrastructure import ActivitiesJsonDao -from time_tracker.activities._domain import ActivityService, _use_cases - -import azure.functions as func import json import logging -JSON_PATH = ( - 'activities/_infrastructure/_data_persistence/activities_data.json' -) +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + +DATABASE = DB() def delete_activity(req: func.HttpRequest) -> func.HttpResponse: logging.info( 'Python HTTP trigger function processed a request to delete an activity.' ) - activity_id = req.route_params.get('id') - response = _delete(activity_id) - status_code = 200 if response != b'Not found' else 404 - - return func.HttpResponse( - body=response, status_code=status_code, mimetype="application/json" - ) - - -def _delete(activity_id: str) -> str: - activity_use_case = _use_cases.DeleteActivityUseCase( - _create_activity_service(JSON_PATH) + try: + activity_id = int(req.route_params.get('id')) + response = _delete(activity_id) + status_code = 200 if response != b'Not found' else 404 + + return func.HttpResponse( + body=response, status_code=status_code, mimetype="application/json" + ) + except ValueError: + return func.HttpResponse( + body=b"Invalid format id", status_code=400, mimetype="application/json" + ) + + +def _delete(activity_id: int) -> str: + activity_use_case = _domain._use_cases.DeleteActivityUseCase( + _create_activity_service(DATABASE) ) activity = activity_use_case.delete_activity(activity_id) return json.dumps(activity.__dict__) if activity else b'Not found' -def _create_activity_service(path: str): - activity_json = ActivitiesJsonDao(path) - return ActivityService(activity_json) +def _create_activity_service(db: DB) -> _domain.ActivityService: + activity_sql = _infrastructure.ActivitiesSQLDao(db) + return _domain.ActivityService(activity_sql) diff --git a/V2/time_tracker/activities/_application/_activities/_get_activities.py b/V2/time_tracker/activities/_application/_activities/_get_activities.py index 9f52069d..d92503dd 100644 --- a/V2/time_tracker/activities/_application/_activities/_get_activities.py +++ b/V2/time_tracker/activities/_application/_activities/_get_activities.py @@ -1,13 +1,13 @@ -from time_tracker.activities._infrastructure import ActivitiesJsonDao -from time_tracker.activities._domain import ActivityService, _use_cases - -import azure.functions as func import json import logging -JSON_PATH = ( - 'activities/_infrastructure/_data_persistence/activities_data.json' -) +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + +DATABASE = DB() def get_activities(req: func.HttpRequest) -> func.HttpResponse: @@ -17,21 +17,26 @@ def get_activities(req: func.HttpRequest) -> func.HttpResponse: activity_id = req.route_params.get('id') status_code = 200 - if activity_id: - response = _get_by_id(activity_id) - if response == b'Not Found': - status_code = 404 - else: - response = _get_all() - - return func.HttpResponse( - body=response, status_code=status_code, mimetype="application/json" - ) - - -def _get_by_id(activity_id: str) -> str: - activity_use_case = _use_cases.GetActivityUseCase( - _create_activity_service(JSON_PATH) + try: + if activity_id: + response = _get_by_id(int(activity_id)) + if response == b'Not Found': + status_code = 404 + else: + response = _get_all() + + return func.HttpResponse( + body=response, status_code=status_code, mimetype="application/json" + ) + except ValueError: + return func.HttpResponse( + body=b"Invalid format id", status_code=400, mimetype="application/json" + ) + + +def _get_by_id(activity_id: int) -> str: + activity_use_case = _domain._use_cases.GetActivityUseCase( + _create_activity_service(DATABASE) ) activity = activity_use_case.get_activity_by_id(activity_id) @@ -39,8 +44,8 @@ def _get_by_id(activity_id: str) -> str: def _get_all() -> str: - activities_use_case = _use_cases.GetActivitiesUseCase( - _create_activity_service(JSON_PATH) + activities_use_case = _domain._use_cases.GetActivitiesUseCase( + _create_activity_service(DATABASE) ) return json.dumps( [ @@ -50,6 +55,6 @@ def _get_all() -> str: ) -def _create_activity_service(path: str): - activity_json = ActivitiesJsonDao(path) - return ActivityService(activity_json) +def _create_activity_service(db: DB) -> _domain.ActivityService: + activity_sql = _infrastructure.ActivitiesSQLDao(db) + return _domain.ActivityService(activity_sql) diff --git a/V2/time_tracker/activities/_application/_activities/_update_activity.py b/V2/time_tracker/activities/_application/_activities/_update_activity.py index 1709f77a..0933fd72 100644 --- a/V2/time_tracker/activities/_application/_activities/_update_activity.py +++ b/V2/time_tracker/activities/_application/_activities/_update_activity.py @@ -1,44 +1,54 @@ -from time_tracker.activities._infrastructure import ActivitiesJsonDao -from time_tracker.activities._domain import ActivityService, Activity, _use_cases - -import azure.functions as func import dataclasses import json import logging -JSON_PATH = ( - 'activities/_infrastructure/_data_persistence/activities_data.json' -) +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + +DATABASE = DB() def update_activity(req: func.HttpRequest) -> func.HttpResponse: logging.info( 'Python HTTP trigger function processed a request to update an activity.' ) - activity_id = req.route_params.get('id') - activity_data = req.get_json() if req.get_body() else {} - activity_keys = [field.name for field in dataclasses.fields(Activity)] - - if all(key in activity_keys for key in activity_data.keys()): - response = _update(activity_id, activity_data) - status_code = 200 - else: - response = b'Incorrect activity body' - status_code = 400 - - return func.HttpResponse( - body=response, status_code=status_code, mimetype="application/json" - ) - - -def _update(activity_id: str, activity_data: dict) -> str: - activity_use_case = _use_cases.UpdateActivityUseCase( - _create_activity_service(JSON_PATH) + try: + activity_id = int(req.route_params.get('id')) + activity_data = req.get_json() if req.get_body() else {} + activity_keys = [field.name for field in dataclasses.fields(_domain.Activity)] + + if all(key in activity_keys for key in activity_data.keys()): + response = _update(activity_id, activity_data) + status_code = 200 + else: + response = b'Incorrect activity body' + status_code = 400 + + return func.HttpResponse( + body=response, status_code=status_code, mimetype="application/json" + ) + except ValueError: + return func.HttpResponse( + body=b"Invalid format id", status_code=400, mimetype="application/json" + ) + + +def _update(activity_id: int, activity_data: dict) -> str: + activity_use_case = _domain._use_cases.UpdateActivityUseCase( + _create_activity_service(DATABASE) ) - activity = activity_use_case.update_activity(activity_id, activity_data) + activity = activity_use_case.update_activity( + activity_id, activity_data.get("name"), + activity_data.get("description"), + activity_data.get("status"), + activity_data.get("deleted") + ) return json.dumps(activity.__dict__) if activity else b'Not Found' -def _create_activity_service(path: str): - activity_json = ActivitiesJsonDao(path) - return ActivityService(activity_json) +def _create_activity_service(db: DB) -> _domain.ActivityService: + activity_sql = _infrastructure.ActivitiesSQLDao(db) + return _domain.ActivityService(activity_sql) diff --git a/V2/time_tracker/activities/_domain/_entities/_activity.py b/V2/time_tracker/activities/_domain/_entities/_activity.py index 86f56ee9..cf574054 100644 --- a/V2/time_tracker/activities/_domain/_entities/_activity.py +++ b/V2/time_tracker/activities/_domain/_entities/_activity.py @@ -3,9 +3,8 @@ @dataclass(frozen=True) class Activity: - id: str + id: int name: str description: str - deleted: str - status: str - tenant_id: str + deleted: bool + status: int diff --git a/V2/time_tracker/activities/_domain/_persistence_contracts/_activities_dao.py b/V2/time_tracker/activities/_domain/_persistence_contracts/_activities_dao.py index 80b8c711..e079ed6a 100644 --- a/V2/time_tracker/activities/_domain/_persistence_contracts/_activities_dao.py +++ b/V2/time_tracker/activities/_domain/_persistence_contracts/_activities_dao.py @@ -5,7 +5,7 @@ class ActivitiesDao(abc.ABC): @abc.abstractmethod - def get_by_id(self, id: str) -> Activity: + def get_by_id(self, id: int) -> Activity: pass @abc.abstractmethod @@ -13,13 +13,13 @@ def get_all(self) -> typing.List[Activity]: pass @abc.abstractmethod - def delete(self, id: str) -> Activity: + def delete(self, id: int) -> Activity: pass @abc.abstractmethod - def update(self, id: str, new_activity: dict) -> Activity: + def update(self, id: int, name: str, description: str, status: int, deleted: bool) -> Activity: pass @abc.abstractmethod - def create_activity(self, activity_data: dict) -> Activity: + def create(self, activity_data: Activity) -> Activity: pass diff --git a/V2/time_tracker/activities/_domain/_services/_activity.py b/V2/time_tracker/activities/_domain/_services/_activity.py index a564577a..a2c45e54 100644 --- a/V2/time_tracker/activities/_domain/_services/_activity.py +++ b/V2/time_tracker/activities/_domain/_services/_activity.py @@ -6,17 +6,17 @@ class ActivityService: def __init__(self, activities_dao: ActivitiesDao): self.activities_dao = activities_dao - def get_by_id(self, activity_id: str) -> Activity: + def get_by_id(self, activity_id: int) -> Activity: return self.activities_dao.get_by_id(activity_id) def get_all(self) -> typing.List[Activity]: return self.activities_dao.get_all() - def delete(self, activity_id: str) -> Activity: + def delete(self, activity_id: int) -> Activity: return self.activities_dao.delete(activity_id) - def update(self, activity_id: str, new_activity: dict) -> Activity: - return self.activities_dao.update(activity_id, new_activity) + def update(self, activity_id: int, name: str, description: str, status: int, deleted: bool) -> Activity: + return self.activities_dao.update(activity_id, name, description, status, deleted) - def create_activity(self, activity_data: dict) -> Activity: - return self.activities_dao.create_activity(activity_data) + def create(self, activity_data: Activity) -> Activity: + return self.activities_dao.create(activity_data) diff --git a/V2/time_tracker/activities/_domain/_use_cases/_create_activity_use_case.py b/V2/time_tracker/activities/_domain/_use_cases/_create_activity_use_case.py index 241718db..26d0f475 100644 --- a/V2/time_tracker/activities/_domain/_use_cases/_create_activity_use_case.py +++ b/V2/time_tracker/activities/_domain/_use_cases/_create_activity_use_case.py @@ -5,5 +5,5 @@ class CreateActivityUseCase: def __init__(self, activity_service: ActivityService): self.activity_service = activity_service - def create_activity(self, activity_data: dict) -> Activity: - return self.activity_service.create_activity(activity_data) + def create_activity(self, activity_data: Activity) -> Activity: + return self.activity_service.create(activity_data) diff --git a/V2/time_tracker/activities/_domain/_use_cases/_delete_activity_use_case.py b/V2/time_tracker/activities/_domain/_use_cases/_delete_activity_use_case.py index 5af54ee8..67fcf31c 100644 --- a/V2/time_tracker/activities/_domain/_use_cases/_delete_activity_use_case.py +++ b/V2/time_tracker/activities/_domain/_use_cases/_delete_activity_use_case.py @@ -5,5 +5,5 @@ class DeleteActivityUseCase: def __init__(self, activity_service: ActivityService): self.activity_service = activity_service - def delete_activity(self, id: str) -> Activity: + def delete_activity(self, id: int) -> Activity: return self.activity_service.delete(id) diff --git a/V2/time_tracker/activities/_domain/_use_cases/_get_activity_by_id_use_case.py b/V2/time_tracker/activities/_domain/_use_cases/_get_activity_by_id_use_case.py index 04ca442e..45dbbad0 100644 --- a/V2/time_tracker/activities/_domain/_use_cases/_get_activity_by_id_use_case.py +++ b/V2/time_tracker/activities/_domain/_use_cases/_get_activity_by_id_use_case.py @@ -5,5 +5,5 @@ class GetActivityUseCase: def __init__(self, activity_service: ActivityService): self.activity_service = activity_service - def get_activity_by_id(self, id: str) -> Activity: + def get_activity_by_id(self, id: int) -> Activity: return self.activity_service.get_by_id(id) diff --git a/V2/time_tracker/activities/_domain/_use_cases/_update_activity_use_case.py b/V2/time_tracker/activities/_domain/_use_cases/_update_activity_use_case.py index a890d85f..c270f465 100644 --- a/V2/time_tracker/activities/_domain/_use_cases/_update_activity_use_case.py +++ b/V2/time_tracker/activities/_domain/_use_cases/_update_activity_use_case.py @@ -6,6 +6,6 @@ def __init__(self, activity_service: ActivityService): self.activity_service = activity_service def update_activity( - self, activity_id: str, new_activity: dict + self, activity_id: int, name: str, description: str, status: int, deleted: bool ) -> Activity: - return self.activity_service.update(activity_id, new_activity) + return self.activity_service.update(activity_id, name, description, status, deleted) diff --git a/V2/time_tracker/activities/_infrastructure/__init__.py b/V2/time_tracker/activities/_infrastructure/__init__.py index 1734e5b8..b3896baf 100644 --- a/V2/time_tracker/activities/_infrastructure/__init__.py +++ b/V2/time_tracker/activities/_infrastructure/__init__.py @@ -1,2 +1,2 @@ # flake8: noqa -from ._data_persistence import ActivitiesJsonDao +from ._data_persistence import ActivitiesSQLDao diff --git a/V2/time_tracker/activities/_infrastructure/_data_persistence/__init__.py b/V2/time_tracker/activities/_infrastructure/_data_persistence/__init__.py index d2a77fc4..1e7220c5 100644 --- a/V2/time_tracker/activities/_infrastructure/_data_persistence/__init__.py +++ b/V2/time_tracker/activities/_infrastructure/_data_persistence/__init__.py @@ -1,2 +1,2 @@ # flake8: noqa -from ._activities_json_dao import ActivitiesJsonDao +from ._activities_sql_dao import ActivitiesSQLDao diff --git a/V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_json_dao.py b/V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_json_dao.py deleted file mode 100644 index 60859a15..00000000 --- a/V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_json_dao.py +++ /dev/null @@ -1,105 +0,0 @@ -from time_tracker.activities._domain import ActivitiesDao, Activity -import dataclasses -import json -import typing - - -class ActivitiesJsonDao(ActivitiesDao): - def __init__(self, json_data_file_path: str): - self.json_data_file_path = json_data_file_path - self.activity_keys = [ - field.name for field in dataclasses.fields(Activity) - ] - - def get_by_id(self, activity_id: str) -> Activity: - activity = { - activity.get('id'): activity - for activity in self.__get_activities_from_file() - }.get(activity_id) - - return self.__create_activity_dto(activity) if activity else None - - def get_all(self) -> typing.List[Activity]: - return [ - self.__create_activity_dto(activity) - for activity in self.__get_activities_from_file() - ] - - def delete(self, activity_id: str) -> Activity: - activity = self.get_by_id(activity_id) - if activity: - activity_deleted = {**activity.__dict__, 'status': 'inactive'} - activities_updated = list( - map( - lambda activity: activity - if activity.get('id') != activity_id - else activity_deleted, - self.__get_activities_from_file(), - ) - ) - - try: - file = open(self.json_data_file_path, 'w') - json.dump(activities_updated, file) - file.close() - - return self.__create_activity_dto(activity_deleted) - - except FileNotFoundError: - return None - - else: - return None - - def update(self, activity_id: str, new_activity: dict) -> Activity: - activity = self.get_by_id(activity_id) - if not activity: - return None - - new_activity = {**activity.__dict__, **new_activity} - - activities_updated = list( - map( - lambda activity: activity - if activity.get('id') != activity_id - else new_activity, - self.__get_activities_from_file(), - ) - ) - - try: - file = open(self.json_data_file_path, 'w') - json.dump(activities_updated, file) - file.close() - - return self.__create_activity_dto(new_activity) - - except FileNotFoundError: - return None - - def create_activity(self, activity_data: dict) -> Activity: - activities = self.__get_activities_from_file() - activities.append(activity_data) - - try: - with open(self.json_data_file_path, 'w') as outfile: - json.dump(activities, outfile) - - return self.__create_activity_dto(activity_data) - except FileNotFoundError: - print("Can not create activity") - - def __get_activities_from_file(self) -> typing.List[dict]: - try: - file = open(self.json_data_file_path) - activities = json.load(file) - file.close() - - return activities - - except FileNotFoundError: - return [] - - def __create_activity_dto(self, activity: dict) -> Activity: - activity = {key: activity.get(key) for key in self.activity_keys} - return Activity(**activity) diff --git a/V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_sql_dao.py b/V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_sql_dao.py new file mode 100644 index 00000000..e69dd1a4 --- /dev/null +++ b/V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_sql_dao.py @@ -0,0 +1,67 @@ +import dataclasses +import typing + +import sqlalchemy +import sqlalchemy.sql as sql + +import time_tracker.activities._domain as domain +from time_tracker._infrastructure import _db + + +class ActivitiesSQLDao(domain.ActivitiesDao): + + def __init__(self, database: _db.DB): + self.activity_keys = [ + field.name for field in dataclasses.fields(domain.Activity) + ] + self.db = database + self.activity = sqlalchemy.Table( + 'activity', + self.db.metadata, + sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True, autoincrement=True), + sqlalchemy.Column('name', sqlalchemy.String), + sqlalchemy.Column('description', sqlalchemy.String), + sqlalchemy.Column('deleted', sqlalchemy.Boolean), + sqlalchemy.Column('status', sqlalchemy.SmallInteger), + extend_existing=True, + ) + + def get_by_id(self, activity_id: int) -> domain.Activity: + query = sql.select(self.activity).where(self.activity.c.id == activity_id) + activity = self.db.get_session().execute(query).one_or_none() + return self.__create_activity_dto(dict(activity)) if activity else None + + def get_all(self) -> typing.List[domain.Activity]: + query = sql.select(self.activity) + result = self.db.get_session().execute(query) + return [ + self.__create_activity_dto(dict(activity)) + for activity in result + ] + + def create(self, activity_data: domain.Activity) -> domain.Activity: + new_activity = activity_data.__dict__ + new_activity.pop('id', None) + new_activity.update({"status": 1, "deleted": False}) + + query = self.activity.insert().values(new_activity).return_defaults() + activity = self.db.get_session().execute(query) + new_activity.update({"id": activity.inserted_primary_key[0]}) + return self.__create_activity_dto(new_activity) + + def delete(self, activity_id: int) -> domain.Activity: + query = self.activity.update().where(self.activity.c.id == activity_id).values({"status": 0, "deleted": True}) + self.db.get_session().execute(query) + return self.get_by_id(activity_id) + + def update(self, activity_id: int, name: str, description: str, status: int, deleted: bool) -> domain.Activity: + new_activity = {"name": name, "description": description, "status": status, "deleted": deleted} + activity_validated = {key: value for (key, value) in new_activity.items() if value is not None} + + query = self.activity.update().where(self.activity.c.id == activity_id).values(activity_validated) + self.db.get_session().execute(query) + return self.get_by_id(activity_id) + + def __create_activity_dto(self, activity: dict) -> domain.Activity: + activity = {key: activity.get(key)for key in self.activity_keys} + return domain.Activity(**activity) diff --git a/V2/time_tracker/activities/_infrastructure/_data_persistence/activities_data.json b/V2/time_tracker/activities/_infrastructure/_data_persistence/activities_data.json deleted file mode 100644 index 961251db..00000000 --- a/V2/time_tracker/activities/_infrastructure/_data_persistence/activities_data.json +++ /dev/null @@ -1,65 +0,0 @@ -[ - { - "name": "Development", - "description": "Development", - "tenant_id": "cc925a5d-9644-4a4f-8d99-0bee49aadd05", - "id": "c61a4a49-3364-49a3-a7f7-0c5f2d15072b", - "_rid": "QUwFAPuumiRhAAAAAAAAAA==", - "_self": "dbs/QUwFAA==/colls/QUwFAPuumiQ=/docs/QUwFAPuumiRhAAAAAAAAAA==/", - "_etag": "\"4e006cc9-0000-0500-0000-607dcc0d0000\"", - "_attachments": "attachments/", - "_last_event_ctx": { - "user_id": "dd76e5d6-3949-46fd-b418-f15bf7c354fa", - "tenant_id": "cc925a5d-9644-4a4f-8d99-0bee49aadd05", - "action": "delete", - "description": null, - "container_id": "activity", - "session_id": null - }, - "deleted": "b4327ba6-9f96-49ee-a9ac-3c1edf525172", - "status": null, - "_ts": 1618856973 - }, - { - "name": "Management", - "description": null, - "tenant_id": "cc925a5d-9644-4a4f-8d99-0bee49aadd05", - "id": "94ec92e2-a500-4700-a9f6-e41eb7b5507c", - "_last_event_ctx": { - "user_id": "dd76e5d6-3949-46fd-b418-f15bf7c354fa", - "tenant_id": "cc925a5d-9644-4a4f-8d99-0bee49aadd05", - "action": "delete", - "description": null, - "container_id": "activity", - "session_id": null - }, - "_rid": "QUwFAPuumiRfAAAAAAAAAA==", - "_self": "dbs/QUwFAA==/colls/QUwFAPuumiQ=/docs/QUwFAPuumiRfAAAAAAAAAA==/", - "_etag": "\"4e0069c9-0000-0500-0000-607dcc0d0000\"", - "_attachments": "attachments/", - "deleted": "7cf6efe5-a221-4fe4-b94f-8945127a489a", - "status": null, - "_ts": 1618856973 - }, - { - "name": "Operations", - "description": "Operation activities performed.", - "tenant_id": "cc925a5d-9644-4a4f-8d99-0bee49aadd05", - "id": "d45c770a-b1a0-4bd8-a713-22c01a23e41b", - "_rid": "QUwFAPuumiRjAAAAAAAAAA==", - "_self": "dbs/QUwFAA==/colls/QUwFAPuumiQ=/docs/QUwFAPuumiRjAAAAAAAAAA==/", - "_etag": "\"09009a4d-0000-0500-0000-614b66fb0000\"", - "_attachments": "attachments/", - "_last_event_ctx": { - "user_id": "82ed0f65-051c-4898-890f-870805900e21", - "tenant_id": "cc925a5d-9644-4a4f-8d99-0bee49aadd05", - "action": "update", - "description": null, - "container_id": "activity", - "session_id": null - }, - "deleted": "7cf6efe5-a221-4fe4-b94f-8945127a489a", - "status": "active", - "_ts": 1632331515 - } -] diff --git a/V2/time_tracker/activities/interface.py b/V2/time_tracker/activities/interface.py index 877b631e..24c888ad 100644 --- a/V2/time_tracker/activities/interface.py +++ b/V2/time_tracker/activities/interface.py @@ -2,4 +2,4 @@ from ._application import get_activities from ._application import delete_activity from ._application import update_activity -from ._application import create_activity \ No newline at end of file +from ._application import create_activity diff --git a/V2/update_activity/function.json b/V2/update_activity/function.json new file mode 100644 index 00000000..97c9fb49 --- /dev/null +++ b/V2/update_activity/function.json @@ -0,0 +1,22 @@ +{ + "disabled": false, + "bindings": [ + { + "type": "httpTrigger", + "direction": "in", + "name": "req", + "route": "activities/{id}", + "authLevel": "anonymous", + "methods": [ + "PUT" + ] + }, + { + "type": "http", + "direction": "out", + "name": "$return" + } + ], + "entryPoint": "update_activity", + "scriptFile": "../time_tracker/activities/interface.py" +} \ No newline at end of file From b869c09f890b6867a923b5a11331b1902870126f Mon Sep 17 00:00:00 2001 From: Sandro Castillo Date: Mon, 15 Nov 2021 10:57:53 -0500 Subject: [PATCH 45/74] fix: TT-384 Revert get all activities from blob storage (#348) (#349) This reverts commit 80f4ed136b81c14f4265384bdd888bff2b3c6206. --- time_tracker_api/activities/activities_model.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/time_tracker_api/activities/activities_model.py b/time_tracker_api/activities/activities_model.py index ddb46411..158c8053 100644 --- a/time_tracker_api/activities/activities_model.py +++ b/time_tracker_api/activities/activities_model.py @@ -143,7 +143,7 @@ def get_all_with_id_in_list( activity_ids, ) - def get_all_v1( + def get_all( self, conditions: dict = None, activities_id: List = None, @@ -162,7 +162,7 @@ def get_all_v1( ) return activities - def get_all(self, conditions: dict = None) -> list: + def get_all_test(self, conditions: dict = None) -> list: event_ctx = self.create_event_context("read-many") activities = self.repository.find_all_from_blob_storage(event_context=event_ctx) return activities From df3fe5caf7e4dc2e6b8f35590848e17673fc5a38 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Mon, 15 Nov 2021 16:15:18 +0000 Subject: [PATCH 46/74] 0.44.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 7 +++++++ time_tracker_api/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index de620596..b6b74353 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,13 @@ +## v0.44.0 (2021-11-15) +### Feature +* TT-357 Create V2 Activities Azure DAO ([#334](https://github.com/ioet/time-tracker-backend/issues/334)) ([`3a99add`](https://github.com/ioet/time-tracker-backend/commit/3a99add39a3130c540d86b02c5a69dbda8536e8e)) + +### Fix +* TT-384 Revert get all activities from blob storage (#348) ([#349](https://github.com/ioet/time-tracker-backend/issues/349)) ([`b869c09`](https://github.com/ioet/time-tracker-backend/commit/b869c09f890b6867a923b5a11331b1902870126f)) + ## v0.43.1 (2021-11-12) ### Fix * TT-393 userid convert to list ([#339](https://github.com/ioet/time-tracker-backend/issues/339)) ([`6e2108e`](https://github.com/ioet/time-tracker-backend/commit/6e2108ee03dcfd48fa9676a69591248a2467f27c)) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index d5f90b8c..a262ca73 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.43.1' +__version__ = '0.44.0' From 1448fc2bc8dce7d8f50c758a910182d7fe9c011a Mon Sep 17 00:00:00 2001 From: Sandro Castillo Date: Wed, 17 Nov 2021 16:00:01 -0500 Subject: [PATCH 47/74] ci: TT-411 inject secrets environment and test_db_connection (#351) --- .github/workflows/python-package.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 36bff27a..2f64bc87 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -29,6 +29,14 @@ jobs: run: | pip install -r V2/requirements.txt + - name: Inject Secrets + env: + ENVIRONMENT: ${{ secrets.environment }} + TEST_DB_CONNECTION: ${{ secrets.test_db_connection }} + run: | + echo $ENVIRONMENT + echo $TEST_DB_CONNECTION + - name: Lint with flake8 run: | cd V2 From 32ee36f39e81866c2f0767cf243c61afde6841c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gabriel=20Cobe=C3=B1a=20Cede=C3=B1o?= Date: Thu, 18 Nov 2021 10:05:00 -0500 Subject: [PATCH 48/74] feat: TT-399 Config use makefile to executing tests (#350) * feat: TT-399 Config use makefile to executing tests * feat: TT-399 quit comment on line for pip upgrade * fix: TT-399 inject environment variable for tests Co-authored-by: Alexander --- V2/Makefile | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/V2/Makefile b/V2/Makefile index 45080238..135e96d0 100644 --- a/V2/Makefile +++ b/V2/Makefile @@ -1,8 +1,33 @@ +.PHONY: help +help: + @echo "---------------HELP-----------------" + @echo "To install the dependencies type make install" + @echo "To test the project type make test" + @echo "To run the local database type make start-local" + @echo "To run all comands type make ci" + @echo "------------------------------------" + +.PHONY: install install: - @echo "Installing Time Tracker" + @echo "=========================================Installing dependencies Time Tracker=========================================" npm install pip install --upgrade pip pip install -r requirements.txt @echo "Completed! " + +.PHONY: test +test: export ENVIRONMENT = test +test: export TEST_DB_CONNECTION = sqlite:///:memory: +test: + @echo "=========================================Lint with flake8=========================================" + flake8 . --show-source --statistics + @echo "Completed flake8!" + @echo "=========================================Test with pytest=========================================" + python -m pytest -v + @echo "Completed test!" + start-local: - docker compose up \ No newline at end of file + docker compose up + +.PHONY: ci +ci: install test \ No newline at end of file From 10cc4269e4e60c6eff77bf1cf02cdf0d31dac86f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gabriel=20Cobe=C3=B1a=20Cede=C3=B1o?= Date: Thu, 18 Nov 2021 17:49:50 -0500 Subject: [PATCH 49/74] docs: TT-399 Readme update how to use makefile (#354) * docs: TT-399 Readme update how to use makefile * docs: TT-399 Readme update reqs to use makefile * docs: TT-399 Text correction --- V2/README.md | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/V2/README.md b/V2/README.md index e84c0268..f414079d 100644 --- a/V2/README.md +++ b/V2/README.md @@ -1,3 +1,23 @@ -# Azure Functions +# time-tracker-api V2 Refer to [Serverless docs](https://serverless.com/framework/docs/providers/azure/guide/intro/) for more information. + +## Requirements to use makefile + +- Python version 3.6 or 3.7. + +- Use an environment to install requirements (pyenv). + +## How to use makefile + +Execute the next command to show makefile help: + +```shell +make help +``` + +- To install the dependencies type the command ```make install``` + +- To test the project type the command ```make test``` + +- To run the local database type the command ```make start-local``` From 5f107f33cb640f7fa8e498db2157efb2d11f401d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Rafael=20Arcos=20G=C3=B3mez?= <37599693+ararcos@users.noreply.github.com> Date: Fri, 19 Nov 2021 09:37:54 -0500 Subject: [PATCH 50/74] feat: TT-401-Post-v2-time-entries (#344) * feat: TT-401 Implemented service, end-point, dao, test- time entries * feat: TT-401 validated request create time entry * fix: TT-401 implemented faker url * refactor: TT-401 changed the variable name * fix: implementation of the database connection * refactor: TT-401 fixtures changed * fix: TT-401 solution of comments, change of config * ci: TT-401 fix inject secrets * refactor: TT-401 rename of functions and imports * fix: TT-401 changed test db storage --- .github/workflows/python-package.yml | 11 +--- V2/create_activity/function.json | 22 ------- V2/delete_activity/function.json | 22 ------- V2/get_activities/function.json | 22 ------- V2/serverless.yml | 10 +++ .../azure/activity_azure_endpoints_test.py | 49 +++++--------- .../azure/time_entry_azure_endpoints_test.py | 28 ++++++++ V2/tests/conftest.py | 3 +- V2/tests/fixtures.py | 64 +++++++++++++++---- .../daos/activities_sql_dao_test.py | 21 +++--- .../integration/daos/time_entries_dao_test.py | 48 ++++++++++++++ .../unit/services/time_entry_service_test.py | 14 ++++ .../use_cases/time_entries_use_case_test.py | 18 ++++++ V2/time_tracker/_infrastructure/_config.py | 15 ++--- V2/time_tracker/_infrastructure/_db.py | 2 +- .../time_entries/_application/__init__.py | 2 + .../_application/_time_entries/__init__.py | 2 + .../_time_entries/_create_time_entry.py | 63 ++++++++++++++++++ .../time_entries/_domain/__init__.py | 7 ++ .../_domain/_entities/__init__.py | 2 + .../_domain/_entities/_time_entry.py | 17 +++++ .../_persistence_contracts/__init__.py | 2 + .../_time_entries_dao.py | 9 +++ .../_domain/_services/__init__.py | 2 + .../_domain/_services/_time_entry.py | 10 +++ .../_domain/_use_cases/__init__.py | 2 + .../_use_cases/_create_time_entry_use_case.py | 10 +++ .../time_entries/_infrastructure/__init__.py | 2 + .../_data_persistence/__init__.py | 2 + .../_data_persistence/_time_entries_dao.py | 49 ++++++++++++++ V2/time_tracker/time_entries/interface.py | 2 + V2/update_activity/function.json | 22 ------- 32 files changed, 388 insertions(+), 166 deletions(-) delete mode 100644 V2/create_activity/function.json delete mode 100644 V2/delete_activity/function.json delete mode 100644 V2/get_activities/function.json create mode 100644 V2/tests/api/azure/time_entry_azure_endpoints_test.py create mode 100644 V2/tests/integration/daos/time_entries_dao_test.py create mode 100644 V2/tests/unit/services/time_entry_service_test.py create mode 100644 V2/tests/unit/use_cases/time_entries_use_case_test.py create mode 100644 V2/time_tracker/time_entries/_application/__init__.py create mode 100644 V2/time_tracker/time_entries/_application/_time_entries/__init__.py create mode 100644 V2/time_tracker/time_entries/_application/_time_entries/_create_time_entry.py create mode 100644 V2/time_tracker/time_entries/_domain/__init__.py create mode 100644 V2/time_tracker/time_entries/_domain/_entities/__init__.py create mode 100644 V2/time_tracker/time_entries/_domain/_entities/_time_entry.py create mode 100644 V2/time_tracker/time_entries/_domain/_persistence_contracts/__init__.py create mode 100644 V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py create mode 100644 V2/time_tracker/time_entries/_domain/_services/__init__.py create mode 100644 V2/time_tracker/time_entries/_domain/_services/_time_entry.py create mode 100644 V2/time_tracker/time_entries/_domain/_use_cases/__init__.py create mode 100644 V2/time_tracker/time_entries/_domain/_use_cases/_create_time_entry_use_case.py create mode 100644 V2/time_tracker/time_entries/_infrastructure/__init__.py create mode 100644 V2/time_tracker/time_entries/_infrastructure/_data_persistence/__init__.py create mode 100644 V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py create mode 100644 V2/time_tracker/time_entries/interface.py delete mode 100644 V2/update_activity/function.json diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 2f64bc87..1c700563 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -14,6 +14,9 @@ jobs: runs-on: ubuntu-latest strategy: max-parallel: 5 + env: + ENVIRONMENT: ${{ secrets.ENVIRONMENT }} + TEST_DB_CONNECTION: ${{ secrets.TEST_DB_CONNECTION }} steps: - uses: actions/checkout@v2 - name: Set up Python 3.10.0 @@ -29,14 +32,6 @@ jobs: run: | pip install -r V2/requirements.txt - - name: Inject Secrets - env: - ENVIRONMENT: ${{ secrets.environment }} - TEST_DB_CONNECTION: ${{ secrets.test_db_connection }} - run: | - echo $ENVIRONMENT - echo $TEST_DB_CONNECTION - - name: Lint with flake8 run: | cd V2 diff --git a/V2/create_activity/function.json b/V2/create_activity/function.json deleted file mode 100644 index ed3454a9..00000000 --- a/V2/create_activity/function.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "disabled": false, - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "route": "activities/", - "authLevel": "anonymous", - "methods": [ - "POST" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ], - "entryPoint": "create_activity", - "scriptFile": "../time_tracker/activities/interface.py" -} \ No newline at end of file diff --git a/V2/delete_activity/function.json b/V2/delete_activity/function.json deleted file mode 100644 index d51170fd..00000000 --- a/V2/delete_activity/function.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "disabled": false, - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "route": "activities/{id}", - "authLevel": "anonymous", - "methods": [ - "DELETE" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ], - "entryPoint": "delete_activity", - "scriptFile": "../time_tracker/activities/interface.py" -} \ No newline at end of file diff --git a/V2/get_activities/function.json b/V2/get_activities/function.json deleted file mode 100644 index ee1efe53..00000000 --- a/V2/get_activities/function.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "disabled": false, - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "route": "activities/{id:?}", - "authLevel": "anonymous", - "methods": [ - "GET" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ], - "entryPoint": "get_activities", - "scriptFile": "../time_tracker/activities/interface.py" -} \ No newline at end of file diff --git a/V2/serverless.yml b/V2/serverless.yml index 0eb3f42f..c6c5e34b 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -76,3 +76,13 @@ functions: - POST route: activities/ authLevel: anonymous + + create_time_entry: + handler: time_tracker/time_entries/interface.create_time_entry + events: + - http: true + x-azure-settings: + methods: + - POST + route: time-entries/ + authLevel: anonymous diff --git a/V2/tests/api/azure/activity_azure_endpoints_test.py b/V2/tests/api/azure/activity_azure_endpoints_test.py index 9b2618a8..994c74c7 100644 --- a/V2/tests/api/azure/activity_azure_endpoints_test.py +++ b/V2/tests/api/azure/activity_azure_endpoints_test.py @@ -1,37 +1,24 @@ -import pytest import json from faker import Faker import azure.functions as func import time_tracker.activities._application._activities as azure_activities -import time_tracker.activities._infrastructure as infrastructure -from time_tracker._infrastructure import DB -from time_tracker.activities import _domain ACTIVITY_URL = '/api/activities/' -@pytest.fixture(name='insert_activity') -def _insert_activity() -> dict: - def _new_activity(activity: _domain.Activity, database: DB): - dao = infrastructure.ActivitiesSQLDao(database) - new_activity = dao.create(activity) - return new_activity.__dict__ - return _new_activity - - def test__activity_azure_endpoint__returns_all_activities( - create_fake_database, activity_factory, insert_activity + test_db, activity_factory, insert_activity ): - fake_database = create_fake_database existent_activities = [activity_factory(), activity_factory()] inserted_activities = [ - insert_activity(existent_activities[0], fake_database), - insert_activity(existent_activities[1], fake_database) + insert_activity(existent_activities[0], test_db).__dict__, + insert_activity(existent_activities[1], test_db).__dict__ ] - azure_activities._get_activities.DATABASE = fake_database + azure_activities._get_activities.DATABASE = test_db + req = func.HttpRequest(method='GET', body=None, url=ACTIVITY_URL) response = azure_activities._get_activities.get_activities(req) activities_json_data = response.get_body().decode("utf-8") @@ -41,13 +28,12 @@ def test__activity_azure_endpoint__returns_all_activities( def test__activity_azure_endpoint__returns_an_activity__when_activity_matches_its_id( - create_fake_database, activity_factory, insert_activity + test_db, activity_factory, insert_activity ): - fake_database = create_fake_database existent_activity = activity_factory() - inserted_activity = insert_activity(existent_activity, fake_database) + inserted_activity = insert_activity(existent_activity, test_db).__dict__ - azure_activities._get_activities.DATABASE = fake_database + azure_activities._get_activities.DATABASE = test_db req = func.HttpRequest( method='GET', body=None, @@ -63,13 +49,12 @@ def test__activity_azure_endpoint__returns_an_activity__when_activity_matches_it def test__activity_azure_endpoint__returns_an_activity_with_inactive_status__when_an_activity_matching_its_id_is_found( - create_fake_database, activity_factory, insert_activity + test_db, activity_factory, insert_activity ): - fake_database = create_fake_database existent_activity = activity_factory() - inserted_activity = insert_activity(existent_activity, fake_database) + inserted_activity = insert_activity(existent_activity, test_db).__dict__ - azure_activities._delete_activity.DATABASE = fake_database + azure_activities._delete_activity.DATABASE = test_db req = func.HttpRequest( method='DELETE', body=None, @@ -86,13 +71,12 @@ def test__activity_azure_endpoint__returns_an_activity_with_inactive_status__whe def test__update_activity_azure_endpoint__returns_an_activity__when_found_an_activity_to_update( - create_fake_database, activity_factory, insert_activity + test_db, activity_factory, insert_activity ): - fake_database = create_fake_database existent_activity = activity_factory() - inserted_activity = insert_activity(existent_activity, fake_database) + inserted_activity = insert_activity(existent_activity, test_db).__dict__ - azure_activities._update_activity.DATABASE = fake_database + azure_activities._update_activity.DATABASE = test_db activity_body = {"description": Faker().sentence()} req = func.HttpRequest( method='PUT', @@ -109,10 +93,7 @@ def test__update_activity_azure_endpoint__returns_an_activity__when_found_an_act assert activitiy_json_data == json.dumps(inserted_activity) -def test__activity_azure_endpoint__creates_an_activity__when_activity_has_all_attributes( - create_fake_database, - ): - azure_activities._create_activity.DATABASE = create_fake_database +def test__activity_azure_endpoint__creates_an_activity__when_activity_has_all_attributes(): activity_body = { 'id': None, 'name': Faker().user_name(), diff --git a/V2/tests/api/azure/time_entry_azure_endpoints_test.py b/V2/tests/api/azure/time_entry_azure_endpoints_test.py new file mode 100644 index 00000000..f801dad9 --- /dev/null +++ b/V2/tests/api/azure/time_entry_azure_endpoints_test.py @@ -0,0 +1,28 @@ +import json + +import azure.functions as func + +import time_tracker.time_entries._application._time_entries as azure_time_entries + +TIME_ENTRY_URL = "/api/time-entries/" + + +def test__time_entry_azure_endpoint__creates_an_time_entry__when_time_entry_has_all_attributes( + test_db, time_entry_factory, activity_factory, insert_activity +): + inserted_activity = insert_activity(activity_factory(), test_db) + time_entry_body = time_entry_factory(activity_id=inserted_activity.id, technologies="[jira,sql]").__dict__ + + body = json.dumps(time_entry_body).encode("utf-8") + req = func.HttpRequest( + method='POST', + body=body, + url=TIME_ENTRY_URL, + ) + + response = azure_time_entries._create_time_entry.create_time_entry(req) + time_entry_json_data = json.loads(response.get_body()) + time_entry_body['id'] = time_entry_json_data['id'] + + assert response.status_code == 201 + assert time_entry_json_data == time_entry_body diff --git a/V2/tests/conftest.py b/V2/tests/conftest.py index d1c4928f..cf6e362f 100644 --- a/V2/tests/conftest.py +++ b/V2/tests/conftest.py @@ -1,2 +1,3 @@ # flake8: noqa -from fixtures import _activity_factory, _create_fake_dao, _create_fake_database \ No newline at end of file +from fixtures import _activity_factory, _test_db, _insert_activity +from fixtures import _time_entry_factory diff --git a/V2/tests/fixtures.py b/V2/tests/fixtures.py index d9539035..51ee5e5d 100644 --- a/V2/tests/fixtures.py +++ b/V2/tests/fixtures.py @@ -1,17 +1,18 @@ import pytest +from faker import Faker -import time_tracker.activities._domain as domain -import time_tracker.activities._infrastructure as infrastructure +import time_tracker.activities._domain as activities_domain +import time_tracker.activities._infrastructure as activities_infrastructure +import time_tracker.time_entries._domain as time_entries_domain from time_tracker._infrastructure import DB -from faker import Faker @pytest.fixture(name='activity_factory') -def _activity_factory() -> domain.Activity: +def _activity_factory() -> activities_domain.Activity: def _make_activity( name: str = Faker().name(), description: str = Faker().sentence(), deleted: bool = False, status: int = 1 ): - activity = domain.Activity( + activity = activities_domain.Activity( id=None, name=name, description=description, @@ -22,14 +23,49 @@ def _make_activity( return _make_activity -@pytest.fixture(name='create_fake_dao') -def _create_fake_dao() -> domain.ActivitiesDao: - db_fake = DB('sqlite:///:memory:') - dao = infrastructure.ActivitiesSQLDao(db_fake) - return dao +@pytest.fixture(name='test_db') +def _test_db() -> DB: + db_fake = DB() + db_fake.get_session().execute("pragma foreign_keys=ON") + return db_fake -@pytest.fixture(name='create_fake_database') -def _create_fake_database() -> domain.ActivitiesDao: - db_fake = DB('sqlite:///:memory:') - return db_fake +@pytest.fixture(name='time_entry_factory') +def _time_entry_factory() -> time_entries_domain.TimeEntry: + def _make_time_entry( + id=Faker().random_int(), + start_date=str(Faker().date_time()), + owner_id=Faker().random_int(), + description=Faker().sentence(), + activity_id=Faker().random_int(), + uri=Faker().domain_name(), + technologies=["jira", "git"], + end_date=str(Faker().date_time()), + deleted=False, + timezone_offset="300", + project_id=Faker().random_int(), + ): + time_entry = time_entries_domain.TimeEntry( + id=id, + start_date=start_date, + owner_id=owner_id, + description=description, + activity_id=activity_id, + uri=uri, + technologies=technologies, + end_date=end_date, + deleted=deleted, + timezone_offset=timezone_offset, + project_id=project_id, + ) + return time_entry + return _make_time_entry + + +@pytest.fixture(name='insert_activity') +def _insert_activity() -> dict: + def _new_activity(activity: activities_domain.Activity, database: DB): + dao = activities_infrastructure.ActivitiesSQLDao(database) + new_activity = dao.create(activity) + return new_activity + return _new_activity diff --git a/V2/tests/integration/daos/activities_sql_dao_test.py b/V2/tests/integration/daos/activities_sql_dao_test.py index 25f62500..0f0170af 100644 --- a/V2/tests/integration/daos/activities_sql_dao_test.py +++ b/V2/tests/integration/daos/activities_sql_dao_test.py @@ -7,12 +7,11 @@ from time_tracker._infrastructure import DB -@pytest.fixture(name='insert_activity') -def _insert_activity() -> domain.Activity: - def _new_activity(activity: domain.Activity, dao: domain.ActivitiesDao): - new_activity = dao.create(activity) - return new_activity - return _new_activity +@pytest.fixture(name='create_fake_dao') +def _create_fake_dao() -> domain.ActivitiesDao: + db_fake = DB('sqlite:///:memory:') + dao = infrastructure.ActivitiesSQLDao(db_fake) + return dao @pytest.fixture(name='clean_database', autouse=True) @@ -41,7 +40,7 @@ def test_update__returns_an_update_activity__when_an_activity_matching_its_id_is ): dao = create_fake_dao existent_activity = activity_factory() - inserted_activity = insert_activity(existent_activity, dao) + inserted_activity = insert_activity(existent_activity, dao.db) expected_description = Faker().sentence() updated_activity = dao.update(inserted_activity.id, None, expected_description, None, None) @@ -68,8 +67,8 @@ def test__get_all__returns_a_list_of_activity_dto_objects__when_one_or_more_acti dao = create_fake_dao existent_activities = [activity_factory(), activity_factory()] inserted_activities = [ - insert_activity(existent_activities[0], dao), - insert_activity(existent_activities[1], dao) + insert_activity(existent_activities[0], dao.db), + insert_activity(existent_activities[1], dao.db) ] activities = dao.get_all() @@ -83,7 +82,7 @@ def test_get_by_id__returns_an_activity_dto__when_found_one_activity_that_matche ): dao = create_fake_dao existent_activity = activity_factory() - inserted_activity = insert_activity(existent_activity, dao) + inserted_activity = insert_activity(existent_activity, dao.db) activity = dao.get_by_id(inserted_activity.id) @@ -117,7 +116,7 @@ def test_delete__returns_an_activity_with_inactive_status__when_an_activity_matc ): dao = create_fake_dao existent_activity = activity_factory() - inserted_activity = insert_activity(existent_activity, dao) + inserted_activity = insert_activity(existent_activity, dao.db) activity = dao.delete(inserted_activity.id) diff --git a/V2/tests/integration/daos/time_entries_dao_test.py b/V2/tests/integration/daos/time_entries_dao_test.py new file mode 100644 index 00000000..403f80c6 --- /dev/null +++ b/V2/tests/integration/daos/time_entries_dao_test.py @@ -0,0 +1,48 @@ +import pytest + + +import time_tracker.time_entries._domain as domain +import time_tracker.time_entries._infrastructure as infrastructure +from time_tracker._infrastructure import DB + + +@pytest.fixture(name='create_fake_dao') +def _fake_dao() -> domain.TimeEntriesDao: + def _create_fake_dao(db_fake: DB) -> domain.TimeEntriesDao: + dao = infrastructure.TimeEntriesSQLDao(db_fake) + return dao + return _create_fake_dao + + +@pytest.fixture(name='clean_database', autouse=True) +def _clean_database(): + yield + db_fake = DB() + dao = infrastructure.TimeEntriesSQLDao(db_fake) + query = dao.time_entry.delete() + dao.db.get_session().execute(query) + + +def test__time_entry__returns_a_time_entry_dto__when_saves_correctly_with_sql_database( + test_db, time_entry_factory, create_fake_dao, insert_activity, activity_factory +): + dao = create_fake_dao(test_db) + inserted_activity = insert_activity(activity_factory(), dao.db) + + time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id, technologies="[jira,sql]") + + inserted_time_entry = dao.create(time_entry_to_insert) + + assert isinstance(inserted_time_entry, domain.TimeEntry) + assert inserted_time_entry == time_entry_to_insert + + +def test__time_entry__returns_None__when_not_saves_correctly( + time_entry_factory, create_fake_dao, test_db +): + dao = create_fake_dao(test_db) + time_entry_to_insert = time_entry_factory(activity_id=1203, technologies="[jira,sql]") + + inserted_time_entry = dao.create(time_entry_to_insert) + + assert inserted_time_entry is None diff --git a/V2/tests/unit/services/time_entry_service_test.py b/V2/tests/unit/services/time_entry_service_test.py new file mode 100644 index 00000000..bd5ce085 --- /dev/null +++ b/V2/tests/unit/services/time_entry_service_test.py @@ -0,0 +1,14 @@ +from time_tracker.time_entries._domain import TimeEntryService + + +def test__create_time_entries__uses_the_time_entry_dao__to_create_an_time_entry(mocker, time_entry_factory): + expected_time_entry = mocker.Mock() + time_entry_dao = mocker.Mock( + create=mocker.Mock(return_value=expected_time_entry) + ) + time_entry_service = TimeEntryService(time_entry_dao) + + actual_time_entry = time_entry_service.create(time_entry_factory()) + + assert time_entry_dao.create.called + assert expected_time_entry == actual_time_entry diff --git a/V2/tests/unit/use_cases/time_entries_use_case_test.py b/V2/tests/unit/use_cases/time_entries_use_case_test.py new file mode 100644 index 00000000..d2a31eb7 --- /dev/null +++ b/V2/tests/unit/use_cases/time_entries_use_case_test.py @@ -0,0 +1,18 @@ +from pytest_mock import MockFixture + +from time_tracker.time_entries._domain import _use_cases + + +def test__create_time_entry_function__uses_the_time_entries_service__to_create_time_entry( + mocker: MockFixture, time_entry_factory +): + expected_time_entry = mocker.Mock() + time_entry_service = mocker.Mock( + create=mocker.Mock(return_value=expected_time_entry) + ) + + time_entry_use_case = _use_cases.CreateTimeEntryUseCase(time_entry_service) + actual_time_entry = time_entry_use_case.create_time_entry(time_entry_factory()) + + assert time_entry_service.create.called + assert expected_time_entry == actual_time_entry diff --git a/V2/time_tracker/_infrastructure/_config.py b/V2/time_tracker/_infrastructure/_config.py index 7f8c8fa7..cf4f19bf 100644 --- a/V2/time_tracker/_infrastructure/_config.py +++ b/V2/time_tracker/_infrastructure/_config.py @@ -1,20 +1,17 @@ import typing import os -CONNECTION_STRING = 'postgresql://root:root@localhost:5433/timetracker' - class Config(typing.NamedTuple): DB_CONNECTION_STRING: str - DB_USER: str - DB_PASS: str - DB_NAME: str def load_config(): + if os.environ.get("ENVIRONMENT") == "development": + connection: str = os.environ.get("DB_CONNECTION") + else: + connection: str = os.environ.get("TEST_DB_CONNECTION") + return Config( - CONNECTION_STRING if os.environ.get("DB_CONNECTION_STRING") is None else os.environ.get("DB_CONNECTION_STRING"), - os.environ.get("DB_USER"), - os.environ.get("DB_PASS"), - os.environ.get("DB_NAME") + connection ) diff --git a/V2/time_tracker/_infrastructure/_db.py b/V2/time_tracker/_infrastructure/_db.py index 8fe5cef1..6f3a9f9a 100644 --- a/V2/time_tracker/_infrastructure/_db.py +++ b/V2/time_tracker/_infrastructure/_db.py @@ -14,7 +14,7 @@ def __init__(self, conn_string: str = conn_string): self.engine = sqlalchemy.create_engine(conn_string) def get_session(self): + self.metadata.create_all(self.engine) if self.connection is None: - self.metadata.create_all(self.engine) self.connection = self.engine.connect() return self.connection diff --git a/V2/time_tracker/time_entries/_application/__init__.py b/V2/time_tracker/time_entries/_application/__init__.py new file mode 100644 index 00000000..6e4ba9c3 --- /dev/null +++ b/V2/time_tracker/time_entries/_application/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._time_entries import create_time_entry \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py new file mode 100644 index 00000000..b46cddce --- /dev/null +++ b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._create_time_entry import create_time_entry \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_application/_time_entries/_create_time_entry.py b/V2/time_tracker/time_entries/_application/_time_entries/_create_time_entry.py new file mode 100644 index 00000000..a06c212c --- /dev/null +++ b/V2/time_tracker/time_entries/_application/_time_entries/_create_time_entry.py @@ -0,0 +1,63 @@ +import dataclasses +import json +import typing + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + + +def create_time_entry(req: func.HttpRequest) -> func.HttpResponse: + database = DB() + time_entry_dao = _infrastructure.TimeEntriesSQLDao(database) + time_entry_service = _domain.TimeEntryService(time_entry_dao) + use_case = _domain._use_cases.CreateTimeEntryUseCase(time_entry_service) + + time_entry_data = req.get_json() + + validation_errors = _validate_time_entry(time_entry_data) + if validation_errors: + return func.HttpResponse( + body=json.dumps(validation_errors), status_code=400, mimetype="application/json" + ) + + time_entry_to_create = _domain.TimeEntry( + id=None, + start_date=time_entry_data["start_date"], + owner_id=time_entry_data["owner_id"], + description=time_entry_data["description"], + activity_id=time_entry_data["activity_id"], + uri=time_entry_data["uri"], + technologies=time_entry_data["technologies"], + end_date=time_entry_data["end_date"], + deleted=False, + timezone_offset=time_entry_data["timezone_offset"], + project_id=time_entry_data["project_id"] + ) + + created_time_entry = use_case.create_time_entry(time_entry_to_create) + + if not created_time_entry: + return func.HttpResponse( + body=json.dumps({'error': 'time_entry could not be created'}), + status_code=500, + mimetype="application/json" + ) + + return func.HttpResponse( + body=json.dumps(created_time_entry.__dict__), + status_code=201, + mimetype="application/json" + ) + + +def _validate_time_entry(time_entry_data: dict) -> typing.List[str]: + time_entry_fields = [field.name for field in dataclasses.fields(_domain.TimeEntry)] + time_entry_fields.pop(8) + missing_keys = [field for field in time_entry_fields if field not in time_entry_data] + return [ + f'The {missing_key} key is missing in the input data' + for missing_key in missing_keys + ] diff --git a/V2/time_tracker/time_entries/_domain/__init__.py b/V2/time_tracker/time_entries/_domain/__init__.py new file mode 100644 index 00000000..a8b2081c --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/__init__.py @@ -0,0 +1,7 @@ +# flake8: noqa +from ._entities import TimeEntry +from ._persistence_contracts import TimeEntriesDao +from ._services import TimeEntryService +from ._use_cases import ( + CreateTimeEntryUseCase, +) \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_domain/_entities/__init__.py b/V2/time_tracker/time_entries/_domain/_entities/__init__.py new file mode 100644 index 00000000..88b4a739 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_entities/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._time_entry import TimeEntry \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_domain/_entities/_time_entry.py b/V2/time_tracker/time_entries/_domain/_entities/_time_entry.py new file mode 100644 index 00000000..aa73a879 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_entities/_time_entry.py @@ -0,0 +1,17 @@ +from dataclasses import dataclass +from typing import List + + +@dataclass(frozen=True) +class TimeEntry: + id: int + start_date: str + owner_id: int + description: str + activity_id: int + uri: str + technologies: List[str] + end_date: str + deleted: bool + timezone_offset: str + project_id: int diff --git a/V2/time_tracker/time_entries/_domain/_persistence_contracts/__init__.py b/V2/time_tracker/time_entries/_domain/_persistence_contracts/__init__.py new file mode 100644 index 00000000..e10700ce --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_persistence_contracts/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._time_entries_dao import TimeEntriesDao \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py b/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py new file mode 100644 index 00000000..5d04c861 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py @@ -0,0 +1,9 @@ +import abc + +from time_tracker.time_entries._domain import TimeEntry + + +class TimeEntriesDao(abc.ABC): + @abc.abstractmethod + def create(self, time_entry_data: TimeEntry) -> TimeEntry: + pass diff --git a/V2/time_tracker/time_entries/_domain/_services/__init__.py b/V2/time_tracker/time_entries/_domain/_services/__init__.py new file mode 100644 index 00000000..e5e6ba1b --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_services/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._time_entry import TimeEntryService \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_domain/_services/_time_entry.py b/V2/time_tracker/time_entries/_domain/_services/_time_entry.py new file mode 100644 index 00000000..d7aaf3ba --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_services/_time_entry.py @@ -0,0 +1,10 @@ +from time_tracker.time_entries._domain import TimeEntry, TimeEntriesDao + + +class TimeEntryService: + + def __init__(self, time_entry_dao: TimeEntriesDao): + self.time_entry_dao = time_entry_dao + + def create(self, time_entry_data: TimeEntry) -> TimeEntry: + return self.time_entry_dao.create(time_entry_data) diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py new file mode 100644 index 00000000..41aca738 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._create_time_entry_use_case import CreateTimeEntryUseCase diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/_create_time_entry_use_case.py b/V2/time_tracker/time_entries/_domain/_use_cases/_create_time_entry_use_case.py new file mode 100644 index 00000000..f2258468 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_use_cases/_create_time_entry_use_case.py @@ -0,0 +1,10 @@ +from time_tracker.time_entries._domain import TimeEntry, TimeEntryService + + +class CreateTimeEntryUseCase: + + def __init__(self, time_entry_service: TimeEntryService): + self.time_entry_service = time_entry_service + + def create_time_entry(self, time_entry_data: TimeEntry) -> TimeEntry: + return self.time_entry_service.create(time_entry_data) diff --git a/V2/time_tracker/time_entries/_infrastructure/__init__.py b/V2/time_tracker/time_entries/_infrastructure/__init__.py new file mode 100644 index 00000000..1c7a7d6d --- /dev/null +++ b/V2/time_tracker/time_entries/_infrastructure/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._data_persistence import TimeEntriesSQLDao diff --git a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/__init__.py b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/__init__.py new file mode 100644 index 00000000..b999febe --- /dev/null +++ b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._time_entries_dao import TimeEntriesSQLDao diff --git a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py new file mode 100644 index 00000000..d233f3e9 --- /dev/null +++ b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py @@ -0,0 +1,49 @@ +import dataclasses + +import sqlalchemy + +import time_tracker.time_entries._domain as domain +from time_tracker._infrastructure import _db + + +class TimeEntriesSQLDao(domain.TimeEntriesDao): + + def __init__(self, database: _db.DB): + self.time_entry_key = [field.name for field in dataclasses.fields(domain.TimeEntry)] + self.db = database + self.time_entry = sqlalchemy.Table( + 'time_entry', + self.db.metadata, + sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True, autoincrement=True), + sqlalchemy.Column('start_date', sqlalchemy.DateTime().with_variant(sqlalchemy.String, "sqlite")), + sqlalchemy.Column('owner_id', sqlalchemy.Integer), + sqlalchemy.Column('description', sqlalchemy.String), + sqlalchemy.Column('activity_id', sqlalchemy.Integer, sqlalchemy.ForeignKey('activity.id')), + sqlalchemy.Column('uri', sqlalchemy.String), + sqlalchemy.Column( + 'technologies', + sqlalchemy.ARRAY(sqlalchemy.String).with_variant(sqlalchemy.String, "sqlite") + ), + sqlalchemy.Column('end_date', sqlalchemy.DateTime().with_variant(sqlalchemy.String, "sqlite")), + sqlalchemy.Column('deleted', sqlalchemy.Boolean), + sqlalchemy.Column('timezone_offset', sqlalchemy.String), + sqlalchemy.Column('project_id', sqlalchemy.Integer), + extend_existing=True, + ) + + def create(self, time_entry_data: domain.TimeEntry) -> domain.TimeEntry: + try: + new_time_entry = time_entry_data.__dict__ + new_time_entry.pop('id', None) + + query = self.time_entry.insert().values(new_time_entry).return_defaults() + time_entry = self.db.get_session().execute(query) + new_time_entry.update({"id": time_entry.inserted_primary_key[0]}) + return self.__create_time_entry_dto(new_time_entry) + + except sqlalchemy.exc.SQLAlchemyError: + return None + + def __create_time_entry_dto(self, time_entry: dict) -> domain.TimeEntry: + time_entry = {key: time_entry.get(key) for key in self.time_entry_key} + return domain.TimeEntry(**time_entry) diff --git a/V2/time_tracker/time_entries/interface.py b/V2/time_tracker/time_entries/interface.py new file mode 100644 index 00000000..d0182780 --- /dev/null +++ b/V2/time_tracker/time_entries/interface.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._application import create_time_entry \ No newline at end of file diff --git a/V2/update_activity/function.json b/V2/update_activity/function.json deleted file mode 100644 index 97c9fb49..00000000 --- a/V2/update_activity/function.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "disabled": false, - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "route": "activities/{id}", - "authLevel": "anonymous", - "methods": [ - "PUT" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ], - "entryPoint": "update_activity", - "scriptFile": "../time_tracker/activities/interface.py" -} \ No newline at end of file From 60a0dc7015f98b24a3429b1ceabf31e722741649 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Rafael=20Arcos=20G=C3=B3mez?= <37599693+ararcos@users.noreply.github.com> Date: Mon, 22 Nov 2021 10:21:34 -0500 Subject: [PATCH 51/74] feat: TT-403 delete v2 time entries (#346) * feat: TT-401 Implemented service, end-point, dao, test- time entries * feat: TT-401 validated request create time entry * fix: TT-401 implemented faker url * feat: TT-403 created end-point to DELETE of time_entries * fix: TT-403 validation of id as integer * fix: TT-403 remove method POST * feat: TT-403 rebase with master * feat: TT-403 tests added * refactor: TT-403 correct flake8 lint syntax * fix: TT-403 comments solved * fix: TT-403 correction of rebase * refactor: TT-403 renamed of delete test Co-authored-by: mandres2015 --- V2/serverless.yml | 10 ++++ .../azure/time_entry_azure_endpoints_test.py | 50 +++++++++++++++++++ V2/tests/fixtures.py | 6 ++- .../integration/daos/time_entries_dao_test.py | 25 +++++++++- .../unit/services/time_entry_service_test.py | 17 +++++++ .../use_cases/time_entries_use_case_test.py | 14 ++++++ .../time_entries/_application/__init__.py | 2 +- .../_application/_time_entries/__init__.py | 3 +- .../_time_entries/_delete_time_entry.py | 36 +++++++++++++ .../time_entries/_domain/__init__.py | 1 + .../_time_entries_dao.py | 4 ++ .../_domain/_services/_time_entry.py | 3 ++ .../_domain/_use_cases/__init__.py | 1 + .../_use_cases/_delete_time_entry_use_case.py | 10 ++++ .../_data_persistence/_time_entries_dao.py | 11 ++++ V2/time_tracker/time_entries/interface.py | 3 +- 16 files changed, 191 insertions(+), 5 deletions(-) create mode 100644 V2/time_tracker/time_entries/_application/_time_entries/_delete_time_entry.py create mode 100644 V2/time_tracker/time_entries/_domain/_use_cases/_delete_time_entry_use_case.py diff --git a/V2/serverless.yml b/V2/serverless.yml index c6c5e34b..fc5942d9 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -86,3 +86,13 @@ functions: - POST route: time-entries/ authLevel: anonymous + + delete_time_entry: + handler: time_tracker/time_entries/interface.delete_time_entry + events: + - http: true + x-azure-settings: + methods: + - DELETE + route: time-entries/{id} + authLevel: anonymous diff --git a/V2/tests/api/azure/time_entry_azure_endpoints_test.py b/V2/tests/api/azure/time_entry_azure_endpoints_test.py index f801dad9..8422c4b5 100644 --- a/V2/tests/api/azure/time_entry_azure_endpoints_test.py +++ b/V2/tests/api/azure/time_entry_azure_endpoints_test.py @@ -1,12 +1,26 @@ +import pytest import json import azure.functions as func import time_tracker.time_entries._application._time_entries as azure_time_entries +from time_tracker._infrastructure import DB +from time_tracker.time_entries import _domain as domain_time_entries +from time_tracker.time_entries import _infrastructure as infrastructure_time_entries + TIME_ENTRY_URL = "/api/time-entries/" +@pytest.fixture(name='insert_time_entry') +def _insert_time_entry() -> domain_time_entries.TimeEntry: + def _new_time_entry(time_entry: domain_time_entries.TimeEntry, database: DB): + dao = infrastructure_time_entries.TimeEntriesSQLDao(database) + new_time_entry = dao.create(time_entry) + return new_time_entry + return _new_time_entry + + def test__time_entry_azure_endpoint__creates_an_time_entry__when_time_entry_has_all_attributes( test_db, time_entry_factory, activity_factory, insert_activity ): @@ -26,3 +40,39 @@ def test__time_entry_azure_endpoint__creates_an_time_entry__when_time_entry_has_ assert response.status_code == 201 assert time_entry_json_data == time_entry_body + + +def test__delete_time_entries_azure_endpoint__returns_an_time_entry_with_true_deleted__when_its_id_is_found( + test_db, time_entry_factory, insert_time_entry, insert_activity, activity_factory, +): + inserted_activity = insert_activity(activity_factory(), test_db).__dict__ + time_entry_body = time_entry_factory(activity_id=inserted_activity["id"], technologies="[jira,sql]") + inserted_time_entry = insert_time_entry(time_entry_body, test_db) + + req = func.HttpRequest( + method='DELETE', + body=None, + url=TIME_ENTRY_URL, + route_params={"id": inserted_time_entry.id}, + ) + + response = azure_time_entries._delete_time_entry.delete_time_entry(req) + time_entry_json_data = json.loads(response.get_body().decode("utf-8")) + + assert response.status_code == 200 + assert time_entry_json_data['deleted'] is True + + +def test__delete_time_entries_azure_endpoint__returns_a_status_code_400__when_time_entry_recive_invalid_id( +): + req = func.HttpRequest( + method="DELETE", + body=None, + url=TIME_ENTRY_URL, + route_params={"id": "invalid id"}, + ) + + response = azure_time_entries._delete_time_entry.delete_time_entry(req) + + assert response.status_code == 400 + assert response.get_body() == b'Invalid Format ID' diff --git a/V2/tests/fixtures.py b/V2/tests/fixtures.py index 51ee5e5d..8568bdb6 100644 --- a/V2/tests/fixtures.py +++ b/V2/tests/fixtures.py @@ -10,7 +10,10 @@ @pytest.fixture(name='activity_factory') def _activity_factory() -> activities_domain.Activity: def _make_activity( - name: str = Faker().name(), description: str = Faker().sentence(), deleted: bool = False, status: int = 1 + name: str = Faker().name(), + description: str = Faker().sentence(), + deleted: bool = False, + status: int = 1, ): activity = activities_domain.Activity( id=None, @@ -20,6 +23,7 @@ def _make_activity( status=status ) return activity + return _make_activity diff --git a/V2/tests/integration/daos/time_entries_dao_test.py b/V2/tests/integration/daos/time_entries_dao_test.py index 403f80c6..901bce34 100644 --- a/V2/tests/integration/daos/time_entries_dao_test.py +++ b/V2/tests/integration/daos/time_entries_dao_test.py @@ -1,5 +1,5 @@ import pytest - +from faker import Faker import time_tracker.time_entries._domain as domain import time_tracker.time_entries._infrastructure as infrastructure @@ -46,3 +46,26 @@ def test__time_entry__returns_None__when_not_saves_correctly( inserted_time_entry = dao.create(time_entry_to_insert) assert inserted_time_entry is None + + +def test_delete__returns_an_time_entry_with_true_deleted__when_an_time_entry_matching_its_id_is_found( + create_fake_dao, test_db, time_entry_factory, insert_activity, activity_factory +): + dao = create_fake_dao(test_db) + inserted_activity = insert_activity(activity_factory(), dao.db) + existent_time_entry = time_entry_factory(activity_id=inserted_activity.id, technologies="[jira,sql]") + inserted_time_entry = dao.create(existent_time_entry) + + result = dao.delete(inserted_time_entry.id) + + assert result.deleted is True + + +def test_delete__returns_none__when_no_time_entry_matching_its_id_is_found( + create_fake_dao, test_db +): + dao = create_fake_dao(test_db) + + result = dao.delete(Faker().pyint()) + + assert result is None diff --git a/V2/tests/unit/services/time_entry_service_test.py b/V2/tests/unit/services/time_entry_service_test.py index bd5ce085..e83b6afb 100644 --- a/V2/tests/unit/services/time_entry_service_test.py +++ b/V2/tests/unit/services/time_entry_service_test.py @@ -1,3 +1,5 @@ +from faker import Faker + from time_tracker.time_entries._domain import TimeEntryService @@ -12,3 +14,18 @@ def test__create_time_entries__uses_the_time_entry_dao__to_create_an_time_entry( assert time_entry_dao.create.called assert expected_time_entry == actual_time_entry + + +def test__delete_time_entry__uses_the_time_entry_dao__to_delete_time_entry_selected( + mocker, +): + expected_time_entry = mocker.Mock() + time_entry_dao = mocker.Mock( + delete=mocker.Mock(return_value=expected_time_entry) + ) + + time_entry_service = TimeEntryService(time_entry_dao) + deleted_time_entry = time_entry_service.delete(Faker().pyint()) + + assert time_entry_dao.delete.called + assert expected_time_entry == deleted_time_entry diff --git a/V2/tests/unit/use_cases/time_entries_use_case_test.py b/V2/tests/unit/use_cases/time_entries_use_case_test.py index d2a31eb7..e0994df4 100644 --- a/V2/tests/unit/use_cases/time_entries_use_case_test.py +++ b/V2/tests/unit/use_cases/time_entries_use_case_test.py @@ -1,4 +1,5 @@ from pytest_mock import MockFixture +from faker import Faker from time_tracker.time_entries._domain import _use_cases @@ -16,3 +17,16 @@ def test__create_time_entry_function__uses_the_time_entries_service__to_create_t assert time_entry_service.create.called assert expected_time_entry == actual_time_entry + + +def test__delete_time_entry_function__uses_the_time_entry_service__to_delete_time_entry_selected( + mocker: MockFixture, +): + expected_time_entry = mocker.Mock() + time_entry_service = mocker.Mock(delete=mocker.Mock(return_value=expected_time_entry)) + + time_entry_use_case = _use_cases.DeleteTimeEntryUseCase(time_entry_service) + deleted_time_entry = time_entry_use_case.delete_time_entry(Faker().pyint()) + + assert time_entry_service.delete.called + assert expected_time_entry == deleted_time_entry diff --git a/V2/time_tracker/time_entries/_application/__init__.py b/V2/time_tracker/time_entries/_application/__init__.py index 6e4ba9c3..2810c87d 100644 --- a/V2/time_tracker/time_entries/_application/__init__.py +++ b/V2/time_tracker/time_entries/_application/__init__.py @@ -1,2 +1,2 @@ # flake8: noqa -from ._time_entries import create_time_entry \ No newline at end of file +from ._time_entries import create_time_entry, delete_time_entry \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py index b46cddce..4cb4d4b0 100644 --- a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py +++ b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py @@ -1,2 +1,3 @@ # flake8: noqa -from ._create_time_entry import create_time_entry \ No newline at end of file +from ._create_time_entry import create_time_entry +from ._delete_time_entry import delete_time_entry \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_application/_time_entries/_delete_time_entry.py b/V2/time_tracker/time_entries/_application/_time_entries/_delete_time_entry.py new file mode 100644 index 00000000..bbf76eab --- /dev/null +++ b/V2/time_tracker/time_entries/_application/_time_entries/_delete_time_entry.py @@ -0,0 +1,36 @@ +import json + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + + +def delete_time_entry(req: func.HttpRequest) -> func.HttpResponse: + time_entry_dao = _infrastructure.TimeEntriesSQLDao(DB()) + time_entry_service = _domain.TimeEntryService(time_entry_dao) + use_case = _domain._use_cases.DeleteTimeEntryUseCase(time_entry_service) + + try: + time_entry_id = int(req.route_params.get("id")) + deleted_time_entry = use_case.delete_time_entry(time_entry_id) + if not deleted_time_entry: + return func.HttpResponse( + body="Not found", + status_code=404, + mimetype="application/json" + ) + + return func.HttpResponse( + body=json.dumps(deleted_time_entry.__dict__, default=str), + status_code=200, + mimetype="application/json", + ) + + except ValueError: + return func.HttpResponse( + body=b"Invalid Format ID", + status_code=400, + mimetype="application/json" + ) diff --git a/V2/time_tracker/time_entries/_domain/__init__.py b/V2/time_tracker/time_entries/_domain/__init__.py index a8b2081c..ad927811 100644 --- a/V2/time_tracker/time_entries/_domain/__init__.py +++ b/V2/time_tracker/time_entries/_domain/__init__.py @@ -4,4 +4,5 @@ from ._services import TimeEntryService from ._use_cases import ( CreateTimeEntryUseCase, + DeleteTimeEntryUseCase ) \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py b/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py index 5d04c861..e7d94608 100644 --- a/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py +++ b/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py @@ -7,3 +7,7 @@ class TimeEntriesDao(abc.ABC): @abc.abstractmethod def create(self, time_entry_data: TimeEntry) -> TimeEntry: pass + + @abc.abstractmethod + def delete(self, id: int) -> TimeEntry: + pass diff --git a/V2/time_tracker/time_entries/_domain/_services/_time_entry.py b/V2/time_tracker/time_entries/_domain/_services/_time_entry.py index d7aaf3ba..9d47d5e0 100644 --- a/V2/time_tracker/time_entries/_domain/_services/_time_entry.py +++ b/V2/time_tracker/time_entries/_domain/_services/_time_entry.py @@ -8,3 +8,6 @@ def __init__(self, time_entry_dao: TimeEntriesDao): def create(self, time_entry_data: TimeEntry) -> TimeEntry: return self.time_entry_dao.create(time_entry_data) + + def delete(self, id: int) -> TimeEntry: + return self.time_entry_dao.delete(id) diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py index 41aca738..17b2442a 100644 --- a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py +++ b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py @@ -1,2 +1,3 @@ # flake8: noqa from ._create_time_entry_use_case import CreateTimeEntryUseCase +from ._delete_time_entry_use_case import DeleteTimeEntryUseCase diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/_delete_time_entry_use_case.py b/V2/time_tracker/time_entries/_domain/_use_cases/_delete_time_entry_use_case.py new file mode 100644 index 00000000..a195c303 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_use_cases/_delete_time_entry_use_case.py @@ -0,0 +1,10 @@ +from time_tracker.time_entries._domain import TimeEntry, TimeEntryService + + +class DeleteTimeEntryUseCase: + + def __init__(self, time_entry_service: TimeEntryService): + self.time_entry_service = time_entry_service + + def delete_time_entry(self, id: int) -> TimeEntry: + return self.time_entry_service.delete(id) diff --git a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py index d233f3e9..6037af9f 100644 --- a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py +++ b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py @@ -47,3 +47,14 @@ def create(self, time_entry_data: domain.TimeEntry) -> domain.TimeEntry: def __create_time_entry_dto(self, time_entry: dict) -> domain.TimeEntry: time_entry = {key: time_entry.get(key) for key in self.time_entry_key} return domain.TimeEntry(**time_entry) + + def delete(self, time_entry_id: int) -> domain.TimeEntry: + query = ( + self.time_entry.update() + .where(self.time_entry.c.id == time_entry_id) + .values({"deleted": True}) + ) + self.db.get_session().execute(query) + query_deleted_time_entry = sqlalchemy.sql.select(self.time_entry).where(self.time_entry.c.id == time_entry_id) + time_entry = self.db.get_session().execute(query_deleted_time_entry).one_or_none() + return self.__create_time_entry_dto(dict(time_entry)) if time_entry else None diff --git a/V2/time_tracker/time_entries/interface.py b/V2/time_tracker/time_entries/interface.py index d0182780..773314bb 100644 --- a/V2/time_tracker/time_entries/interface.py +++ b/V2/time_tracker/time_entries/interface.py @@ -1,2 +1,3 @@ # flake8: noqa -from ._application import create_time_entry \ No newline at end of file +from ._application import create_time_entry +from ._application import delete_time_entry \ No newline at end of file From a6fcb3536a5fa6817a5dc28fab899557e5b9e4d8 Mon Sep 17 00:00:00 2001 From: Cristian Toaquiza Date: Mon, 22 Nov 2021 14:17:50 -0500 Subject: [PATCH 52/74] ci: TT-411 adds pull request (ci) workflow for time-tracker v1 (#356) * ci: [TT-412] adds ci workflow for time-tracker v1 * ci: [TT-412] injects secrets * ci: [TT-412] gets azure vault * ci: [TT-412] logins to azure * ci: [TT-412] adds other attempt to use secrets * ci: [TT-412] adds other attempt to use secrets as env var * ci: [TT-412] injects secrets to run tests * ci: [TT-412] injects USERID to run tests * ci: [TT-412] injects AZURE-APP-CONFIGURATION-CONNECTION-STRING to run tests * ci: [TT-412] injects DATABASE-ACCOUNT-URI to run tests * ci: [TT-412] injects DATABASE-MASTER-KEYO to run tests * ci: [TT-412] injects DATABASE-NAME to run tests * ci: [TT-412] injects AZURE-STORAGE-ACCOUNT-KEY to run tests * ci: [TT-412] adds step to build the app * ci: [TT-412] updates on section values * ci: [TT-412] renames file --- ...me-tracker-v1-on-pull-request-workflow.yml | 60 +++++++++++++++++++ 1 file changed, 60 insertions(+) create mode 100644 .github/workflows/time-tracker-v1-on-pull-request-workflow.yml diff --git a/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml b/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml new file mode 100644 index 00000000..af4d872a --- /dev/null +++ b/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml @@ -0,0 +1,60 @@ +name: Time Tacker V1 CI + +on: + pull_request: + branches: [master] + +jobs: + time-tracker-ci: + runs-on: ubuntu-latest + + strategy: + matrix: + python-version: [3.9] + + steps: + - name: Checking out code from the repository + uses: actions/checkout@v2 + + - name: Setting up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements/time_tracker_api/dev.txt + pip install -r requirements/time_tracker_events/dev.txt + + - name: Login to azure + uses: Azure/login@v1 + with: + creds: ${{ secrets.AZURE_CREDENTIALS }} + + - name: Get vault from azure + uses: Azure/get-keyvault-secrets@v1 + with: + keyvault: "time-tracker-secrets" + secrets: "MS-CLIENT-ID, MS-AUTHORITY, MS-SCOPE, MS-SECRET, MS-ENDPOINT, USERID, AZURE-APP-CONFIGURATION-CONNECTION-STRING, DATABASE-ACCOUNT-URI, DATABASE-MASTER-KEY, DATABASE-NAME, AZURE-STORAGE-ACCOUNT-KEY" + id: timeTrackerAzureVault + + - name: Run tests + env: + MS_AUTHORITY: ${{ steps.timeTrackerAzureVault.outputs.MS-AUTHORITY }} + MS_CLIENT_ID: ${{ steps.timeTrackerAzureVault.outputs.MS-CLIENT-ID }} + MS_SCOPE: ${{ steps.timeTrackerAzureVault.outputs.MS-SCOPE }} + MS_SECRET: ${{ steps.timeTrackerAzureVault.outputs.MS-SECRET }} + MS_ENDPOINT: ${{ steps.timeTrackerAzureVault.outputs.MS-ENDPOINT }} + USERID: ${{ steps.timeTrackerAzureVault.outputs.USERID }} + AZURE_APP_CONFIGURATION_CONNECTION_STRING: ${{ steps.timeTrackerAzureVault.outputs.AZURE-APP-CONFIGURATION-CONNECTION-STRING }} + DATABASE_ACCOUNT_URI: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-ACCOUNT-URI }} + DATABASE_MASTER_KEY: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-MASTER-KEY }} + DATABASE_NAME: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-NAME }} + AZURE_STORAGE_ACCOUNT_KEY: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-ACCOUNT-KEY }} + run: | + pytest tests + + - name: Test the build of the app + run: | + docker build . From 6de0063b7bedacd79d5bd7dac9ebfe06e15f5f6c Mon Sep 17 00:00:00 2001 From: Cristian Toaquiza Date: Tue, 23 Nov 2021 13:26:38 -0500 Subject: [PATCH 53/74] ci: [TT-412] adds ci workflow when push to master --- ...me-tracker-v1-on-pull-request-workflow.yml | 4 +- .../time-tracker-v1-on-push-workflow.yml | 67 +++++++++++++++++++ 2 files changed, 69 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/time-tracker-v1-on-push-workflow.yml diff --git a/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml b/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml index af4d872a..c35be604 100644 --- a/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml +++ b/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml @@ -1,11 +1,11 @@ -name: Time Tacker V1 CI +name: Time Tacker V1 CI - ON PR on: pull_request: branches: [master] jobs: - time-tracker-ci: + time-tracker-ci-v1-on-pr: runs-on: ubuntu-latest strategy: diff --git a/.github/workflows/time-tracker-v1-on-push-workflow.yml b/.github/workflows/time-tracker-v1-on-push-workflow.yml new file mode 100644 index 00000000..ed673b64 --- /dev/null +++ b/.github/workflows/time-tracker-v1-on-push-workflow.yml @@ -0,0 +1,67 @@ +name: Time Tacker V1 CI - ON PUSH + +on: + push: + # update to master + branches: [TT-412-onpush] + +jobs: + time-tracker-ci-v1-on-push: + runs-on: ubuntu-latest + + strategy: + matrix: + python-version: [3.9] + + steps: + - name: Checking out code from the repository + uses: actions/checkout@v2 + + - name: Setting up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements/time_tracker_api/dev.txt + pip install -r requirements/time_tracker_events/dev.txt + + - name: Login to azure + uses: Azure/login@v1 + with: + creds: ${{ secrets.AZURE_CREDENTIALS }} + + - name: Get vault from azure + uses: Azure/get-keyvault-secrets@v1 + with: + keyvault: "time-tracker-secrets" + secrets: "MS-CLIENT-ID, MS-AUTHORITY, MS-SCOPE, MS-SECRET, MS-ENDPOINT, USERID, AZURE-APP-CONFIGURATION-CONNECTION-STRING, DATABASE-ACCOUNT-URI, DATABASE-MASTER-KEY, DATABASE-NAME, AZURE-STORAGE-ACCOUNT-KEY" + id: timeTrackerAzureVault + + - name: Run tests + env: + MS_AUTHORITY: ${{ steps.timeTrackerAzureVault.outputs.MS-AUTHORITY }} + MS_CLIENT_ID: ${{ steps.timeTrackerAzureVault.outputs.MS-CLIENT-ID }} + MS_SCOPE: ${{ steps.timeTrackerAzureVault.outputs.MS-SCOPE }} + MS_SECRET: ${{ steps.timeTrackerAzureVault.outputs.MS-SECRET }} + MS_ENDPOINT: ${{ steps.timeTrackerAzureVault.outputs.MS-ENDPOINT }} + USERID: ${{ steps.timeTrackerAzureVault.outputs.USERID }} + AZURE_APP_CONFIGURATION_CONNECTION_STRING: ${{ steps.timeTrackerAzureVault.outputs.AZURE-APP-CONFIGURATION-CONNECTION-STRING }} + DATABASE_ACCOUNT_URI: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-ACCOUNT-URI }} + DATABASE_MASTER_KEY: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-MASTER-KEY }} + DATABASE_NAME: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-NAME }} + AZURE_STORAGE_ACCOUNT_KEY: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-ACCOUNT-KEY }} + run: | + pytest tests + + - name: Build and push image + uses: azure/docker-login@v1 + with: + login-server: ${{ secrets.REGISTRY_LOGIN_SERVER }} + username: ${{ secrets.REGISTRY_USERNAME }} + password: ${{ secrets.REGISTRY_PASSWORD }} + run: | + docker build . -t ${{ secrets.REGISTRY_LOGIN_SERVER }}/timetrackerapi:${{ github.sha }} + docker push ${{ secrets.REGISTRY_LOGIN_SERVER }}/timetrackerapi:${{ github.sha }} From d6c4c4d67e72db867f197af8c7f8147839d6c178 Mon Sep 17 00:00:00 2001 From: mandres2015 <32377408+mandres2015@users.noreply.github.com> Date: Wed, 24 Nov 2021 09:25:55 -0500 Subject: [PATCH 54/74] feat: TT-418 CRUD customer v2 (#359) * feat: TT-418 create added * feat: TT-418 create tests added * fix: TT-418 problems solved --- V2/serverless.yml | 10 ++++ .../azure/customer_azure_endpoints_test.py | 49 ++++++++++++++++ V2/tests/conftest.py | 1 + V2/tests/fixtures.py | 23 +++++++- .../integration/daos/customers_dao_test.py | 35 ++++++++++++ .../unit/services/customer_service_test.py | 14 +++++ .../unit/use_cases/customers_use_case_test.py | 18 ++++++ .../customers/_application/__init__.py | 2 + .../_application/_customers/__init__.py | 2 + .../_customers/_create_customer.py | 57 +++++++++++++++++++ V2/time_tracker/customers/_domain/__init__.py | 7 +++ .../customers/_domain/_entities/__init__.py | 2 + .../customers/_domain/_entities/_customer.py | 11 ++++ .../_persistence_contracts/__init__.py | 2 + .../_persistence_contracts/_customers_dao.py | 9 +++ .../customers/_domain/_services/__init__.py | 2 + .../customers/_domain/_services/_customer.py | 10 ++++ .../customers/_domain/_use_cases/__init__.py | 2 + .../_use_cases/_create_customer_use_case.py | 10 ++++ .../customers/_infrastructure/__init__.py | 2 + .../_data_persistence/__init__.py | 2 + .../_data_persistence/_customer_dao.py | 41 +++++++++++++ V2/time_tracker/customers/interface.py | 2 + 23 files changed, 312 insertions(+), 1 deletion(-) create mode 100644 V2/tests/api/azure/customer_azure_endpoints_test.py create mode 100644 V2/tests/integration/daos/customers_dao_test.py create mode 100644 V2/tests/unit/services/customer_service_test.py create mode 100644 V2/tests/unit/use_cases/customers_use_case_test.py create mode 100644 V2/time_tracker/customers/_application/__init__.py create mode 100644 V2/time_tracker/customers/_application/_customers/__init__.py create mode 100644 V2/time_tracker/customers/_application/_customers/_create_customer.py create mode 100644 V2/time_tracker/customers/_domain/__init__.py create mode 100644 V2/time_tracker/customers/_domain/_entities/__init__.py create mode 100644 V2/time_tracker/customers/_domain/_entities/_customer.py create mode 100644 V2/time_tracker/customers/_domain/_persistence_contracts/__init__.py create mode 100644 V2/time_tracker/customers/_domain/_persistence_contracts/_customers_dao.py create mode 100644 V2/time_tracker/customers/_domain/_services/__init__.py create mode 100644 V2/time_tracker/customers/_domain/_services/_customer.py create mode 100644 V2/time_tracker/customers/_domain/_use_cases/__init__.py create mode 100644 V2/time_tracker/customers/_domain/_use_cases/_create_customer_use_case.py create mode 100644 V2/time_tracker/customers/_infrastructure/__init__.py create mode 100644 V2/time_tracker/customers/_infrastructure/_data_persistence/__init__.py create mode 100644 V2/time_tracker/customers/_infrastructure/_data_persistence/_customer_dao.py create mode 100644 V2/time_tracker/customers/interface.py diff --git a/V2/serverless.yml b/V2/serverless.yml index fc5942d9..e5dea8e9 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -96,3 +96,13 @@ functions: - DELETE route: time-entries/{id} authLevel: anonymous + + create_customer: + handler: time_tracker/customers/interface.create_customer + events: + - http: true + x-azure-settings: + methods: + - POST + route: customers/ + authLevel: anonymous diff --git a/V2/tests/api/azure/customer_azure_endpoints_test.py b/V2/tests/api/azure/customer_azure_endpoints_test.py new file mode 100644 index 00000000..47a619d5 --- /dev/null +++ b/V2/tests/api/azure/customer_azure_endpoints_test.py @@ -0,0 +1,49 @@ +import json +from faker import Faker + +import azure.functions as func + +import time_tracker.customers._application._customers as azure_customers + +CUSTOMER_URL = "/api/customers/" + + +def test__customer_azure_endpoint__creates_a_customer__when_customer_has_all_necesary_attributes( + customer_factory +): + customer_body = customer_factory().__dict__ + + body = json.dumps(customer_body).encode("utf-8") + req = func.HttpRequest( + method='POST', + body=body, + url=CUSTOMER_URL, + ) + + response = azure_customers._create_customer.create_customer(req) + customer_json_data = json.loads(response.get_body()) + customer_body['id'] = customer_json_data['id'] + + assert response.status_code == 201 + assert customer_json_data == customer_body + + +def test__customer_azure_endpoint__returns_a_status_400__when_dont_recieve_all_necessary_attributes(): + customer_to_insert = { + "id": None, + "name": Faker().user_name(), + "deleted": False, + "status": 1 + } + + body = json.dumps(customer_to_insert).encode("utf-8") + req = func.HttpRequest( + method='POST', + body=body, + url=CUSTOMER_URL, + ) + + response = azure_customers._create_customer.create_customer(req) + + assert response.status_code == 400 + assert response.get_body() == b'Invalid format or structure of the attributes of the customer' diff --git a/V2/tests/conftest.py b/V2/tests/conftest.py index cf6e362f..4ad03c51 100644 --- a/V2/tests/conftest.py +++ b/V2/tests/conftest.py @@ -1,3 +1,4 @@ # flake8: noqa from fixtures import _activity_factory, _test_db, _insert_activity from fixtures import _time_entry_factory +from fixtures import _customer_factory diff --git a/V2/tests/fixtures.py b/V2/tests/fixtures.py index 8568bdb6..a02a74bb 100644 --- a/V2/tests/fixtures.py +++ b/V2/tests/fixtures.py @@ -2,8 +2,9 @@ from faker import Faker import time_tracker.activities._domain as activities_domain -import time_tracker.activities._infrastructure as activities_infrastructure import time_tracker.time_entries._domain as time_entries_domain +import time_tracker.customers._domain as customers_domain +import time_tracker.activities._infrastructure as activities_infrastructure from time_tracker._infrastructure import DB @@ -73,3 +74,23 @@ def _new_activity(activity: activities_domain.Activity, database: DB): new_activity = dao.create(activity) return new_activity return _new_activity + + +@pytest.fixture(name='customer_factory') +def _customer_factory() -> customers_domain.Customer: + def _make_customer( + name: str = Faker().name(), + description: str = Faker().sentence(), + deleted: bool = False, + status: int = 1, + ): + customer = customers_domain.Customer( + id=None, + name=name, + description=description, + deleted=deleted, + status=status + ) + return customer + + return _make_customer diff --git a/V2/tests/integration/daos/customers_dao_test.py b/V2/tests/integration/daos/customers_dao_test.py new file mode 100644 index 00000000..b85cd3e3 --- /dev/null +++ b/V2/tests/integration/daos/customers_dao_test.py @@ -0,0 +1,35 @@ +import pytest + +import time_tracker.customers._domain as domain +import time_tracker.customers._infrastructure as infrastructure +from time_tracker._infrastructure import DB + + +@pytest.fixture(name='create_fake_dao') +def _fake_dao() -> domain.CustomersDao: + def _create_fake_dao(db_fake: DB) -> domain.CustomersDao: + dao = infrastructure.CustomersSQLDao(db_fake) + return dao + return _create_fake_dao + + +@pytest.fixture(name='clean_database', autouse=True) +def _clean_database(): + yield + db_fake = DB() + dao = infrastructure.CustomersSQLDao(db_fake) + query = dao.customer.delete() + dao.db.get_session().execute(query) + + +def test__customer_dao__returns_a_customer_dto__when_saves_correctly_with_sql_database( + test_db, customer_factory, create_fake_dao +): + dao = create_fake_dao(test_db) + + customer_to_insert = customer_factory() + + inserted_customer = dao.create(customer_to_insert) + + assert isinstance(inserted_customer, domain.Customer) + assert inserted_customer == customer_to_insert diff --git a/V2/tests/unit/services/customer_service_test.py b/V2/tests/unit/services/customer_service_test.py new file mode 100644 index 00000000..bb25070f --- /dev/null +++ b/V2/tests/unit/services/customer_service_test.py @@ -0,0 +1,14 @@ +from time_tracker.customers._domain import CustomerService + + +def test__create_customer__uses_the_customer_dao__to_create_a_customer(mocker, customer_factory): + expected_customer = mocker.Mock() + customer_dao = mocker.Mock( + create=mocker.Mock(return_value=expected_customer) + ) + customer_service = CustomerService(customer_dao) + + new_customer = customer_service.create(customer_factory()) + + assert customer_dao.create.called + assert expected_customer == new_customer diff --git a/V2/tests/unit/use_cases/customers_use_case_test.py b/V2/tests/unit/use_cases/customers_use_case_test.py new file mode 100644 index 00000000..3b8566a9 --- /dev/null +++ b/V2/tests/unit/use_cases/customers_use_case_test.py @@ -0,0 +1,18 @@ +from pytest_mock import MockFixture + +from time_tracker.customers._domain import _use_cases + + +def test__create_customer_function__uses_the_customer_service__to_create_a_customer( + mocker: MockFixture, customer_factory +): + expected_customer = mocker.Mock() + customer_service = mocker.Mock( + create=mocker.Mock(return_value=expected_customer) + ) + + customer_use_case = _use_cases.CreateCustomerUseCase(customer_service) + new_customer = customer_use_case.create_customer(customer_factory()) + + assert customer_service.create.called + assert expected_customer == new_customer diff --git a/V2/time_tracker/customers/_application/__init__.py b/V2/time_tracker/customers/_application/__init__.py new file mode 100644 index 00000000..db2c2c15 --- /dev/null +++ b/V2/time_tracker/customers/_application/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._customers import create_customer \ No newline at end of file diff --git a/V2/time_tracker/customers/_application/_customers/__init__.py b/V2/time_tracker/customers/_application/_customers/__init__.py new file mode 100644 index 00000000..bf1f8460 --- /dev/null +++ b/V2/time_tracker/customers/_application/_customers/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._create_customer import create_customer \ No newline at end of file diff --git a/V2/time_tracker/customers/_application/_customers/_create_customer.py b/V2/time_tracker/customers/_application/_customers/_create_customer.py new file mode 100644 index 00000000..919c34cb --- /dev/null +++ b/V2/time_tracker/customers/_application/_customers/_create_customer.py @@ -0,0 +1,57 @@ +import dataclasses +import json +import typing + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + + +def create_customer(req: func.HttpRequest) -> func.HttpResponse: + try: + database = DB() + customer_dao = _infrastructure.CustomersSQLDao(database) + customer_service = _domain.CustomerService(customer_dao) + use_case = _domain._use_cases.CreateCustomerUseCase(customer_service) + customer_data = req.get_json() + + customer_is_valid = _validate_customer(customer_data) + if not customer_is_valid: + raise ValueError + + customer_to_create = _domain.Customer( + id=None, + deleted=None, + status=None, + name=str(customer_data["name"]).strip(), + description=str(customer_data["description"]), + ) + created_customer = use_case.create_customer(customer_to_create) + + if created_customer: + body = json.dumps(created_customer.__dict__) + status_code = 201 + else: + body = b'This customer already exists' + status_code = 409 + + return func.HttpResponse( + body=body, + status_code=status_code, + mimetype="application/json" + ) + except ValueError: + return func.HttpResponse( + body=b'Invalid format or structure of the attributes of the customer', + status_code=400, + mimetype="application/json" + ) + + +def _validate_customer(customer_data: dict) -> bool: + if [field.name for field in dataclasses.fields(_domain.Customer) + if (field.name not in customer_data) and (field.type != typing.Optional[field.type])]: + return False + return True diff --git a/V2/time_tracker/customers/_domain/__init__.py b/V2/time_tracker/customers/_domain/__init__.py new file mode 100644 index 00000000..8392b8e9 --- /dev/null +++ b/V2/time_tracker/customers/_domain/__init__.py @@ -0,0 +1,7 @@ +# flake8: noqa +from ._entities import Customer +from ._persistence_contracts import CustomersDao +from ._services import CustomerService +from ._use_cases import ( + CreateCustomerUseCase, +) \ No newline at end of file diff --git a/V2/time_tracker/customers/_domain/_entities/__init__.py b/V2/time_tracker/customers/_domain/_entities/__init__.py new file mode 100644 index 00000000..2a23e12c --- /dev/null +++ b/V2/time_tracker/customers/_domain/_entities/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._customer import Customer \ No newline at end of file diff --git a/V2/time_tracker/customers/_domain/_entities/_customer.py b/V2/time_tracker/customers/_domain/_entities/_customer.py new file mode 100644 index 00000000..fedc0835 --- /dev/null +++ b/V2/time_tracker/customers/_domain/_entities/_customer.py @@ -0,0 +1,11 @@ +from dataclasses import dataclass +import typing + + +@dataclass(frozen=True) +class Customer: + id: typing.Optional[int] + name: str + description: str + deleted: typing.Optional[bool] + status: typing.Optional[int] diff --git a/V2/time_tracker/customers/_domain/_persistence_contracts/__init__.py b/V2/time_tracker/customers/_domain/_persistence_contracts/__init__.py new file mode 100644 index 00000000..8b1b02fd --- /dev/null +++ b/V2/time_tracker/customers/_domain/_persistence_contracts/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._customers_dao import CustomersDao \ No newline at end of file diff --git a/V2/time_tracker/customers/_domain/_persistence_contracts/_customers_dao.py b/V2/time_tracker/customers/_domain/_persistence_contracts/_customers_dao.py new file mode 100644 index 00000000..35a7a7e9 --- /dev/null +++ b/V2/time_tracker/customers/_domain/_persistence_contracts/_customers_dao.py @@ -0,0 +1,9 @@ +import abc + +from time_tracker.customers._domain import Customer + + +class CustomersDao(abc.ABC): + @abc.abstractmethod + def create(self, data: Customer) -> Customer: + pass diff --git a/V2/time_tracker/customers/_domain/_services/__init__.py b/V2/time_tracker/customers/_domain/_services/__init__.py new file mode 100644 index 00000000..84ed66cf --- /dev/null +++ b/V2/time_tracker/customers/_domain/_services/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._customer import CustomerService \ No newline at end of file diff --git a/V2/time_tracker/customers/_domain/_services/_customer.py b/V2/time_tracker/customers/_domain/_services/_customer.py new file mode 100644 index 00000000..88633a08 --- /dev/null +++ b/V2/time_tracker/customers/_domain/_services/_customer.py @@ -0,0 +1,10 @@ +from time_tracker.customers._domain import Customer, CustomersDao + + +class CustomerService: + + def __init__(self, customer_dao: CustomersDao): + self.customer_dao = customer_dao + + def create(self, data: Customer) -> Customer: + return self.customer_dao.create(data) diff --git a/V2/time_tracker/customers/_domain/_use_cases/__init__.py b/V2/time_tracker/customers/_domain/_use_cases/__init__.py new file mode 100644 index 00000000..accd4281 --- /dev/null +++ b/V2/time_tracker/customers/_domain/_use_cases/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._create_customer_use_case import CreateCustomerUseCase \ No newline at end of file diff --git a/V2/time_tracker/customers/_domain/_use_cases/_create_customer_use_case.py b/V2/time_tracker/customers/_domain/_use_cases/_create_customer_use_case.py new file mode 100644 index 00000000..8aeefa2b --- /dev/null +++ b/V2/time_tracker/customers/_domain/_use_cases/_create_customer_use_case.py @@ -0,0 +1,10 @@ +from time_tracker.customers._domain import Customer, CustomerService + + +class CreateCustomerUseCase: + + def __init__(self, customer_service: CustomerService): + self.customer_service = customer_service + + def create_customer(self, data: Customer) -> Customer: + return self.customer_service.create(data) diff --git a/V2/time_tracker/customers/_infrastructure/__init__.py b/V2/time_tracker/customers/_infrastructure/__init__.py new file mode 100644 index 00000000..220e8f60 --- /dev/null +++ b/V2/time_tracker/customers/_infrastructure/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._data_persistence import CustomersSQLDao diff --git a/V2/time_tracker/customers/_infrastructure/_data_persistence/__init__.py b/V2/time_tracker/customers/_infrastructure/_data_persistence/__init__.py new file mode 100644 index 00000000..c3c24a98 --- /dev/null +++ b/V2/time_tracker/customers/_infrastructure/_data_persistence/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._customer_dao import CustomersSQLDao diff --git a/V2/time_tracker/customers/_infrastructure/_data_persistence/_customer_dao.py b/V2/time_tracker/customers/_infrastructure/_data_persistence/_customer_dao.py new file mode 100644 index 00000000..2b1f4c0d --- /dev/null +++ b/V2/time_tracker/customers/_infrastructure/_data_persistence/_customer_dao.py @@ -0,0 +1,41 @@ +import dataclasses + +import sqlalchemy as sq + +import time_tracker.customers._domain as domain +from time_tracker._infrastructure import _db + + +class CustomersSQLDao(domain.CustomersDao): + + def __init__(self, database: _db.DB): + self.customer_key = [field.name for field in dataclasses.fields(domain.Customer)] + self.db = database + self.customer = sq.Table( + 'customer', + self.db.metadata, + sq.Column('id', sq.Integer, primary_key=True, autoincrement=True), + sq.Column('name', sq.String, unique=True, nullable=False), + sq.Column('description', sq.String), + sq.Column('deleted', sq.Boolean), + sq.Column('status', sq.Integer), + extend_existing=True, + ) + + def create(self, data: domain.Customer) -> domain.Customer: + try: + new_customer = data.__dict__ + new_customer.pop('id', None) + new_customer['deleted'] = False + new_customer['status'] = 1 + + query = self.customer.insert().values(new_customer).return_defaults() + customer = self.db.get_session().execute(query) + new_customer.update({"id": customer.inserted_primary_key[0]}) + return self.__create_customer_dto(new_customer) + except sq.exc.IntegrityError: + return None + + def __create_customer_dto(self, customer: dict) -> domain.Customer: + customer = {key: customer.get(key) for key in self.customer_key} + return domain.Customer(**customer) diff --git a/V2/time_tracker/customers/interface.py b/V2/time_tracker/customers/interface.py new file mode 100644 index 00000000..e36b8172 --- /dev/null +++ b/V2/time_tracker/customers/interface.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._application import create_customer From d898e1b0ed42390133a7b5dded3bcf281fc2f1ba Mon Sep 17 00:00:00 2001 From: Cristian Toaquiza Date: Wed, 24 Nov 2021 12:06:52 -0500 Subject: [PATCH 55/74] ci: [TT-412] adds ci workflow when push to master (#358) --- .github/workflows/time-tracker-v1-on-push-workflow.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/time-tracker-v1-on-push-workflow.yml b/.github/workflows/time-tracker-v1-on-push-workflow.yml index ed673b64..152998b4 100644 --- a/.github/workflows/time-tracker-v1-on-push-workflow.yml +++ b/.github/workflows/time-tracker-v1-on-push-workflow.yml @@ -2,8 +2,7 @@ name: Time Tacker V1 CI - ON PUSH on: push: - # update to master - branches: [TT-412-onpush] + branches: [master] jobs: time-tracker-ci-v1-on-push: @@ -56,12 +55,13 @@ jobs: run: | pytest tests - - name: Build and push image + - name: Login to docker registry uses: azure/docker-login@v1 with: login-server: ${{ secrets.REGISTRY_LOGIN_SERVER }} username: ${{ secrets.REGISTRY_USERNAME }} password: ${{ secrets.REGISTRY_PASSWORD }} + - name: Build and push image run: | docker build . -t ${{ secrets.REGISTRY_LOGIN_SERVER }}/timetrackerapi:${{ github.sha }} docker push ${{ secrets.REGISTRY_LOGIN_SERVER }}/timetrackerapi:${{ github.sha }} From b81319fe12bff57816dac1d0354000bfc6674c1c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Rafael=20Arcos=20G=C3=B3mez?= <37599693+ararcos@users.noreply.github.com> Date: Wed, 24 Nov 2021 16:47:03 -0500 Subject: [PATCH 56/74] fix: TT-401 change in activity database instance and refactor test (#355) * fix: TT-401 change in activity database instance and refactor test * fix: TT-401 resolved comments --- V2/Makefile | 16 ++++++++++------ .../api/azure/activity_azure_endpoints_test.py | 5 ----- ...es_sql_dao_test.py => activities_dao_test.py} | 4 ++-- .../_application/_activities/_create_activity.py | 5 ++--- .../_application/_activities/_delete_activity.py | 5 ++--- .../_application/_activities/_get_activities.py | 15 +++++++-------- .../_application/_activities/_update_activity.py | 5 ++--- .../_data_persistence/__init__.py | 2 +- ..._activities_sql_dao.py => _activities_dao.py} | 0 .../_time_entries/_create_time_entry.py | 4 ++-- .../_domain/_entities/_time_entry.py | 6 +++--- 11 files changed, 31 insertions(+), 36 deletions(-) rename V2/tests/integration/daos/{activities_sql_dao_test.py => activities_dao_test.py} (98%) rename V2/time_tracker/activities/_infrastructure/_data_persistence/{_activities_sql_dao.py => _activities_dao.py} (100%) diff --git a/V2/Makefile b/V2/Makefile index 135e96d0..cf02904b 100644 --- a/V2/Makefile +++ b/V2/Makefile @@ -1,10 +1,11 @@ .PHONY: help help: @echo "---------------HELP-----------------" - @echo "To install the dependencies type make install" - @echo "To test the project type make test" - @echo "To run the local database type make start-local" - @echo "To run all comands type make ci" + @echo "- make install --> Install the dependencies" + @echo "- make test --> Run all tests" + @echo "- make test specific_test= --> Run specific test" + @echo "- make start-local --> Run local database" + @echo "- make ci --> Install the dependencies and run all tests" @echo "------------------------------------" .PHONY: install @@ -17,13 +18,16 @@ install: .PHONY: test test: export ENVIRONMENT = test -test: export TEST_DB_CONNECTION = sqlite:///:memory: test: @echo "=========================================Lint with flake8=========================================" flake8 . --show-source --statistics @echo "Completed flake8!" @echo "=========================================Test with pytest=========================================" - python -m pytest -v + @if [ "$(specific_test)" ]; then \ + python -m pytest -vv -s -k $(specific_test);\ + else \ + python -m pytest -v;\ + fi @echo "Completed test!" start-local: diff --git a/V2/tests/api/azure/activity_azure_endpoints_test.py b/V2/tests/api/azure/activity_azure_endpoints_test.py index 994c74c7..7c0de311 100644 --- a/V2/tests/api/azure/activity_azure_endpoints_test.py +++ b/V2/tests/api/azure/activity_azure_endpoints_test.py @@ -17,8 +17,6 @@ def test__activity_azure_endpoint__returns_all_activities( insert_activity(existent_activities[1], test_db).__dict__ ] - azure_activities._get_activities.DATABASE = test_db - req = func.HttpRequest(method='GET', body=None, url=ACTIVITY_URL) response = azure_activities._get_activities.get_activities(req) activities_json_data = response.get_body().decode("utf-8") @@ -33,7 +31,6 @@ def test__activity_azure_endpoint__returns_an_activity__when_activity_matches_it existent_activity = activity_factory() inserted_activity = insert_activity(existent_activity, test_db).__dict__ - azure_activities._get_activities.DATABASE = test_db req = func.HttpRequest( method='GET', body=None, @@ -54,7 +51,6 @@ def test__activity_azure_endpoint__returns_an_activity_with_inactive_status__whe existent_activity = activity_factory() inserted_activity = insert_activity(existent_activity, test_db).__dict__ - azure_activities._delete_activity.DATABASE = test_db req = func.HttpRequest( method='DELETE', body=None, @@ -76,7 +72,6 @@ def test__update_activity_azure_endpoint__returns_an_activity__when_found_an_act existent_activity = activity_factory() inserted_activity = insert_activity(existent_activity, test_db).__dict__ - azure_activities._update_activity.DATABASE = test_db activity_body = {"description": Faker().sentence()} req = func.HttpRequest( method='PUT', diff --git a/V2/tests/integration/daos/activities_sql_dao_test.py b/V2/tests/integration/daos/activities_dao_test.py similarity index 98% rename from V2/tests/integration/daos/activities_sql_dao_test.py rename to V2/tests/integration/daos/activities_dao_test.py index 0f0170af..637a7799 100644 --- a/V2/tests/integration/daos/activities_sql_dao_test.py +++ b/V2/tests/integration/daos/activities_dao_test.py @@ -9,7 +9,7 @@ @pytest.fixture(name='create_fake_dao') def _create_fake_dao() -> domain.ActivitiesDao: - db_fake = DB('sqlite:///:memory:') + db_fake = DB() dao = infrastructure.ActivitiesSQLDao(db_fake) return dao @@ -17,7 +17,7 @@ def _create_fake_dao() -> domain.ActivitiesDao: @pytest.fixture(name='clean_database', autouse=True) def _clean_database(): yield - db_fake = DB('sqlite:///:memory:') + db_fake = DB() dao = infrastructure.ActivitiesSQLDao(db_fake) query = dao.activity.delete() dao.db.get_session().execute(query) diff --git a/V2/time_tracker/activities/_application/_activities/_create_activity.py b/V2/time_tracker/activities/_application/_activities/_create_activity.py index 94f3701d..8d5b912d 100644 --- a/V2/time_tracker/activities/_application/_activities/_create_activity.py +++ b/V2/time_tracker/activities/_application/_activities/_create_activity.py @@ -8,11 +8,10 @@ from ... import _infrastructure from time_tracker._infrastructure import DB -DATABASE = DB() - def create_activity(req: func.HttpRequest) -> func.HttpResponse: - activity_dao = _infrastructure.ActivitiesSQLDao(DATABASE) + database = DB() + activity_dao = _infrastructure.ActivitiesSQLDao(database) activity_service = _domain.ActivityService(activity_dao) use_case = _domain._use_cases.CreateActivityUseCase(activity_service) diff --git a/V2/time_tracker/activities/_application/_activities/_delete_activity.py b/V2/time_tracker/activities/_application/_activities/_delete_activity.py index 14ada8ab..746b1073 100644 --- a/V2/time_tracker/activities/_application/_activities/_delete_activity.py +++ b/V2/time_tracker/activities/_application/_activities/_delete_activity.py @@ -7,8 +7,6 @@ from ... import _infrastructure from time_tracker._infrastructure import DB -DATABASE = DB() - def delete_activity(req: func.HttpRequest) -> func.HttpResponse: logging.info( @@ -29,8 +27,9 @@ def delete_activity(req: func.HttpRequest) -> func.HttpResponse: def _delete(activity_id: int) -> str: + database = DB() activity_use_case = _domain._use_cases.DeleteActivityUseCase( - _create_activity_service(DATABASE) + _create_activity_service(database) ) activity = activity_use_case.delete_activity(activity_id) return json.dumps(activity.__dict__) if activity else b'Not found' diff --git a/V2/time_tracker/activities/_application/_activities/_get_activities.py b/V2/time_tracker/activities/_application/_activities/_get_activities.py index d92503dd..dd6053b0 100644 --- a/V2/time_tracker/activities/_application/_activities/_get_activities.py +++ b/V2/time_tracker/activities/_application/_activities/_get_activities.py @@ -7,10 +7,9 @@ from ... import _infrastructure from time_tracker._infrastructure import DB -DATABASE = DB() - def get_activities(req: func.HttpRequest) -> func.HttpResponse: + database = DB() logging.info( 'Python HTTP trigger function processed a request to get an activity.' ) @@ -19,11 +18,11 @@ def get_activities(req: func.HttpRequest) -> func.HttpResponse: try: if activity_id: - response = _get_by_id(int(activity_id)) + response = _get_by_id(int(activity_id), database) if response == b'Not Found': status_code = 404 else: - response = _get_all() + response = _get_all(database) return func.HttpResponse( body=response, status_code=status_code, mimetype="application/json" @@ -34,18 +33,18 @@ def get_activities(req: func.HttpRequest) -> func.HttpResponse: ) -def _get_by_id(activity_id: int) -> str: +def _get_by_id(activity_id: int, database: DB) -> str: activity_use_case = _domain._use_cases.GetActivityUseCase( - _create_activity_service(DATABASE) + _create_activity_service(database) ) activity = activity_use_case.get_activity_by_id(activity_id) return json.dumps(activity.__dict__) if activity else b'Not Found' -def _get_all() -> str: +def _get_all(database: DB) -> str: activities_use_case = _domain._use_cases.GetActivitiesUseCase( - _create_activity_service(DATABASE) + _create_activity_service(database) ) return json.dumps( [ diff --git a/V2/time_tracker/activities/_application/_activities/_update_activity.py b/V2/time_tracker/activities/_application/_activities/_update_activity.py index 0933fd72..4717042c 100644 --- a/V2/time_tracker/activities/_application/_activities/_update_activity.py +++ b/V2/time_tracker/activities/_application/_activities/_update_activity.py @@ -8,8 +8,6 @@ from ... import _infrastructure from time_tracker._infrastructure import DB -DATABASE = DB() - def update_activity(req: func.HttpRequest) -> func.HttpResponse: logging.info( @@ -37,8 +35,9 @@ def update_activity(req: func.HttpRequest) -> func.HttpResponse: def _update(activity_id: int, activity_data: dict) -> str: + database = DB() activity_use_case = _domain._use_cases.UpdateActivityUseCase( - _create_activity_service(DATABASE) + _create_activity_service(database) ) activity = activity_use_case.update_activity( activity_id, activity_data.get("name"), diff --git a/V2/time_tracker/activities/_infrastructure/_data_persistence/__init__.py b/V2/time_tracker/activities/_infrastructure/_data_persistence/__init__.py index 1e7220c5..35c209db 100644 --- a/V2/time_tracker/activities/_infrastructure/_data_persistence/__init__.py +++ b/V2/time_tracker/activities/_infrastructure/_data_persistence/__init__.py @@ -1,2 +1,2 @@ # flake8: noqa -from ._activities_sql_dao import ActivitiesSQLDao +from ._activities_dao import ActivitiesSQLDao diff --git a/V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_sql_dao.py b/V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_dao.py similarity index 100% rename from V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_sql_dao.py rename to V2/time_tracker/activities/_infrastructure/_data_persistence/_activities_dao.py diff --git a/V2/time_tracker/time_entries/_application/_time_entries/_create_time_entry.py b/V2/time_tracker/time_entries/_application/_time_entries/_create_time_entry.py index a06c212c..95149c55 100644 --- a/V2/time_tracker/time_entries/_application/_time_entries/_create_time_entry.py +++ b/V2/time_tracker/time_entries/_application/_time_entries/_create_time_entry.py @@ -54,8 +54,8 @@ def create_time_entry(req: func.HttpRequest) -> func.HttpResponse: def _validate_time_entry(time_entry_data: dict) -> typing.List[str]: - time_entry_fields = [field.name for field in dataclasses.fields(_domain.TimeEntry)] - time_entry_fields.pop(8) + time_entry_fields = [field.name for field in dataclasses.fields(_domain.TimeEntry) + if field.type != typing.Optional[field.type]] missing_keys = [field for field in time_entry_fields if field not in time_entry_data] return [ f'The {missing_key} key is missing in the input data' diff --git a/V2/time_tracker/time_entries/_domain/_entities/_time_entry.py b/V2/time_tracker/time_entries/_domain/_entities/_time_entry.py index aa73a879..08df5f8f 100644 --- a/V2/time_tracker/time_entries/_domain/_entities/_time_entry.py +++ b/V2/time_tracker/time_entries/_domain/_entities/_time_entry.py @@ -1,10 +1,10 @@ from dataclasses import dataclass -from typing import List +from typing import List, Optional @dataclass(frozen=True) class TimeEntry: - id: int + id: Optional[int] start_date: str owner_id: int description: str @@ -12,6 +12,6 @@ class TimeEntry: uri: str technologies: List[str] end_date: str - deleted: bool + deleted: Optional[bool] timezone_offset: str project_id: int From 6dd85055b666888c7a22ffa1635b2e53903e7942 Mon Sep 17 00:00:00 2001 From: Sandro Castillo Date: Wed, 24 Nov 2021 17:00:25 -0500 Subject: [PATCH 57/74] docs: TT-419 update readme V2 (#357) --- V2/README.md | 103 ++++++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 94 insertions(+), 9 deletions(-) diff --git a/V2/README.md b/V2/README.md index f414079d..64726b30 100644 --- a/V2/README.md +++ b/V2/README.md @@ -1,23 +1,108 @@ -# time-tracker-api V2 +# **Time-tracker-api V2 Architecture** +Architecture +The application follows a DDD approach with a hexagonal clean architecture. BIG WORDS!, what does it mean? it means the following: -Refer to [Serverless docs](https://serverless.com/framework/docs/providers/azure/guide/intro/) for more information. +We have a directory for each domain entitiy (i.e. time entries, technologies, activities, etc) +Inside each entity directory we have other 3 directories (application, domain and infrastructure) +I'll leave this drawing to understand how these three folders work and what logic should be included in these directories -## Requirements to use makefile +![ddd.png](https://raw.githubusercontent.com/eguezgustavo/time_tracker_app_skeleton/master/ddd.png) +More information [Here](https://github.com/eguezgustavo/time_tracker_app_skeleton) -- Python version 3.6 or 3.7. +## **Stack Technologies** + - [Serverless](https://serverless.com/framework/docs/providers/azure/guide/intro/) + - Python + - Pytest + - Docker Compose + +Recommended link [tdd_dojo](https://github.com/eguezgustavo/tdd_dojo) -- Use an environment to install requirements (pyenv). +## **Setup environment** -## How to use makefile +### **Requeriments** + +- Install python 3.6 or 3.7 (recommendation to install python [pyenv](https://github.com/pyenv/pyenv)) +- Install node (recommendation to install node [nvm](https://www.digitalocean.com/community/tutorials/how-to-install-node-js-on-ubuntu-20-04-es#:~:text=de%20Node.js.-,Opci%C3%B3n%203%3A%20Instalar%20Node%20usando%20el%20administrador%20de%20versiones%20de%20Node,-Otra%20forma%20de)) + +### **Add variables** +In the root directory /time-tracker-backend create a file .env with these values + +``` +export MS_AUTHORITY=XXX +export MS_CLIENT_ID=XXX +export MS_SCOPE=XXX +export MS_SECRET=XXX +export MS_ENDPOINT=XXX +export DATABASE_ACCOUNT_URI=XXX +export DATABASE_MASTER_KEY=XXX +export DATABASE_NAME=XXX +export FLASK_APP=XXX +export FLASK_ENV=XXX +export AZURE_APP_CONFIGURATION_CONNECTION_STRING=XXX +export USERID=XXX +export FLASK_DEBUG=True +export PYTHONPATH=XXX +export DB_CONNECTION=XXX +export ENVIRONMENT=XXX +``` + +In the directory /V2 create a file .env with these values +``` +DB_USER=XXX +DB_PASS=XXX +DB_NAME=XXX +``` + +### **Install dependencies** +In the Directory /V2 +``` +make install +``` + +## **Start Project** +In the directory /V2 +``` +npm run offline +docker compose up or make start-local +``` + + +## **Makefile to run a locally CI** Execute the next command to show makefile help: ```shell -make help +$ make help ``` - To install the dependencies type the command ```make install``` - - To test the project type the command ```make test``` - - To run the local database type the command ```make start-local``` + +## **How to contribute to the project** +Clone the repository and from the master branch create a new branch for each new task. +### **Branch names format** +For example if your task in Jira is **TT-48 implement semantic versioning** your branch name is: +``` + TT-48-implement-semantic-versioning +``` +### **Commit messages format** + + + Below there are some common examples you can use for your commit messages [semantic version](https://semver.org/) : + + - **feat**: A new feature. + - **fix**: A bug fix. + - **perf**: A code change that improves performance. + - **build**: Changes that affect the build system or external dependencies (example scopes: npm, ts configuration). + - **ci**: Changes to our CI or CD configuration files and scripts (example scopes: Azure devops, github actions). + - **docs**: Documentation only changes. + - **refactor**: A code change that neither fixes a bug nor adds a feature. + It is important to mention that this key is not related to css styles. + - **test**: Adding missing tests or correcting existing tests. + + ### Example + fix: TT-48 implement semantic versioning + + Prefix to use in the space fix: + `(fix: |feat: |perf: |build: |ci: |docs: |refactor: |style: |test: )` \ No newline at end of file From 48f641170a968c7f12bc60f7882b0f4eda6cede2 Mon Sep 17 00:00:00 2001 From: Jipson Murillo <38593785+Jobzi@users.noreply.github.com> Date: Thu, 25 Nov 2021 12:38:09 -0500 Subject: [PATCH 58/74] feat: TT-402 put v2 time entries (#347) * feat: TT-402 add put time entries * refactor: TT-402 rebase post time entry * test: TT-402 add integration test of UPDATE * refactor: TT-402 delete time_entires_sql_dao_test * refactor: TT-404 revert changes _db.py * refactor: TT-402 Andres's resolve comments * fix: TT-402 refactor azure update endpoint * fix: TT-402 change name test --- V2/serverless.yml | 12 +++ .../azure/time_entry_azure_endpoints_test.py | 77 ++++++++++++++++++- V2/tests/fixtures.py | 2 +- .../integration/daos/time_entries_dao_test.py | 35 ++++++++- .../unit/services/time_entry_service_test.py | 17 ++++ .../use_cases/time_entries_use_case_test.py | 13 ++++ .../time_entries/_application/__init__.py | 4 +- .../_application/_time_entries/__init__.py | 3 +- .../_time_entries/_update_time_entry.py | 46 +++++++++++ .../time_entries/_domain/__init__.py | 3 +- .../_domain/_entities/__init__.py | 2 +- .../_persistence_contracts/__init__.py | 2 +- .../_time_entries_dao.py | 4 + .../_domain/_services/__init__.py | 2 +- .../_domain/_services/_time_entry.py | 3 + .../_domain/_use_cases/__init__.py | 1 + .../_use_cases/_update_time_entry_use_case.py | 11 +++ .../_data_persistence/_time_entries_dao.py | 23 +++++- V2/time_tracker/time_entries/interface.py | 3 +- 19 files changed, 247 insertions(+), 16 deletions(-) create mode 100644 V2/time_tracker/time_entries/_application/_time_entries/_update_time_entry.py create mode 100644 V2/time_tracker/time_entries/_domain/_use_cases/_update_time_entry_use_case.py diff --git a/V2/serverless.yml b/V2/serverless.yml index e5dea8e9..bac6bcc9 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -97,6 +97,17 @@ functions: route: time-entries/{id} authLevel: anonymous + + update_time_entry: + handler: time_tracker/time_entries/interface.update_time_entry + events: + - http: true + x-azure-settings: + methods: + - PUT + route: time-entries/{id} + authLevel: anonymous + create_customer: handler: time_tracker/customers/interface.create_customer events: @@ -106,3 +117,4 @@ functions: - POST route: customers/ authLevel: anonymous + \ No newline at end of file diff --git a/V2/tests/api/azure/time_entry_azure_endpoints_test.py b/V2/tests/api/azure/time_entry_azure_endpoints_test.py index 8422c4b5..f57db585 100644 --- a/V2/tests/api/azure/time_entry_azure_endpoints_test.py +++ b/V2/tests/api/azure/time_entry_azure_endpoints_test.py @@ -1,5 +1,6 @@ import pytest import json +from faker import Faker import azure.functions as func @@ -25,7 +26,7 @@ def test__time_entry_azure_endpoint__creates_an_time_entry__when_time_entry_has_ test_db, time_entry_factory, activity_factory, insert_activity ): inserted_activity = insert_activity(activity_factory(), test_db) - time_entry_body = time_entry_factory(activity_id=inserted_activity.id, technologies="[jira,sql]").__dict__ + time_entry_body = time_entry_factory(activity_id=inserted_activity.id).__dict__ body = json.dumps(time_entry_body).encode("utf-8") req = func.HttpRequest( @@ -46,7 +47,7 @@ def test__delete_time_entries_azure_endpoint__returns_an_time_entry_with_true_de test_db, time_entry_factory, insert_time_entry, insert_activity, activity_factory, ): inserted_activity = insert_activity(activity_factory(), test_db).__dict__ - time_entry_body = time_entry_factory(activity_id=inserted_activity["id"], technologies="[jira,sql]") + time_entry_body = time_entry_factory(activity_id=inserted_activity["id"]) inserted_time_entry = insert_time_entry(time_entry_body, test_db) req = func.HttpRequest( @@ -76,3 +77,75 @@ def test__delete_time_entries_azure_endpoint__returns_a_status_code_400__when_ti assert response.status_code == 400 assert response.get_body() == b'Invalid Format ID' + + +def test__update_time_entry_azure_endpoint__returns_an_time_entry__when_found_an_time_entry_to_update( + test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity +): + inserted_activity = insert_activity(activity_factory(), test_db) + existent_time_entries = time_entry_factory(activity_id=inserted_activity.id) + inserted_time_entries = insert_time_entry(existent_time_entries, test_db).__dict__ + + time_entry_body = {"description": Faker().sentence()} + + req = func.HttpRequest( + method='PUT', + body=json.dumps(time_entry_body).encode("utf-8"), + url=TIME_ENTRY_URL, + route_params={"id": inserted_time_entries["id"]}, + ) + + response = azure_time_entries._update_time_entry.update_time_entry(req) + activitiy_json_data = response.get_body().decode("utf-8") + inserted_time_entries.update(time_entry_body) + + assert response.status_code == 200 + assert activitiy_json_data == json.dumps(inserted_time_entries) + + +def test__update_time_entries_azure_endpoint__returns_a_status_code_400__when_time_entry_recive_invalid_format_id(): + time_entry_body = {"description": Faker().sentence()} + + req = func.HttpRequest( + method="PUT", + body=json.dumps(time_entry_body).encode("utf-8"), + url=TIME_ENTRY_URL, + route_params={"id": Faker().sentence()}, + ) + + response = azure_time_entries._update_time_entry.update_time_entry(req) + + assert response.status_code == 400 + assert response.get_body() == b'Invalid Format ID' + + +def test__update_time_entries_azure_endpoint__returns_a_status_code_404__when_not_found_an_time_entry_to_update(): + time_entry_body = {"description": Faker().sentence()} + + req = func.HttpRequest( + method="PUT", + body=json.dumps(time_entry_body).encode("utf-8"), + url=TIME_ENTRY_URL, + route_params={"id": Faker().pyint()}, + ) + + response = azure_time_entries._update_time_entry.update_time_entry(req) + + assert response.status_code == 404 + assert response.get_body() == b'Not found' + + +def test__update_time_entries_azure_endpoint__returns_a_status_code_400__when_time_entry_recive_invalid_body(): + + time_entry_body = Faker().pydict(5, True, str) + req = func.HttpRequest( + method="PUT", + body=json.dumps(time_entry_body).encode("utf-8"), + url=TIME_ENTRY_URL, + route_params={"id": Faker().pyint()}, + ) + + response = azure_time_entries._update_time_entry.update_time_entry(req) + + assert response.status_code == 400 + assert response.get_body() == b'Incorrect time entry body' diff --git a/V2/tests/fixtures.py b/V2/tests/fixtures.py index a02a74bb..91b0a801 100644 --- a/V2/tests/fixtures.py +++ b/V2/tests/fixtures.py @@ -44,7 +44,7 @@ def _make_time_entry( description=Faker().sentence(), activity_id=Faker().random_int(), uri=Faker().domain_name(), - technologies=["jira", "git"], + technologies=str(Faker().pylist()), end_date=str(Faker().date_time()), deleted=False, timezone_offset="300", diff --git a/V2/tests/integration/daos/time_entries_dao_test.py b/V2/tests/integration/daos/time_entries_dao_test.py index 901bce34..fbe5a7ed 100644 --- a/V2/tests/integration/daos/time_entries_dao_test.py +++ b/V2/tests/integration/daos/time_entries_dao_test.py @@ -29,7 +29,7 @@ def test__time_entry__returns_a_time_entry_dto__when_saves_correctly_with_sql_da dao = create_fake_dao(test_db) inserted_activity = insert_activity(activity_factory(), dao.db) - time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id, technologies="[jira,sql]") + time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id) inserted_time_entry = dao.create(time_entry_to_insert) @@ -41,7 +41,7 @@ def test__time_entry__returns_None__when_not_saves_correctly( time_entry_factory, create_fake_dao, test_db ): dao = create_fake_dao(test_db) - time_entry_to_insert = time_entry_factory(activity_id=1203, technologies="[jira,sql]") + time_entry_to_insert = time_entry_factory(activity_id=1203) inserted_time_entry = dao.create(time_entry_to_insert) @@ -53,7 +53,7 @@ def test_delete__returns_an_time_entry_with_true_deleted__when_an_time_entry_mat ): dao = create_fake_dao(test_db) inserted_activity = insert_activity(activity_factory(), dao.db) - existent_time_entry = time_entry_factory(activity_id=inserted_activity.id, technologies="[jira,sql]") + existent_time_entry = time_entry_factory(activity_id=inserted_activity.id) inserted_time_entry = dao.create(existent_time_entry) result = dao.delete(inserted_time_entry.id) @@ -69,3 +69,32 @@ def test_delete__returns_none__when_no_time_entry_matching_its_id_is_found( result = dao.delete(Faker().pyint()) assert result is None + + +def test_update__returns_an_time_entry_dto__when_found_one_time_entry_to_update( + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory +): + dao = create_fake_dao(test_db) + inserted_activity = insert_activity(activity_factory(), dao.db) + existent_time_entries = time_entry_factory(activity_id=inserted_activity.id) + inserted_time_entries = dao.create(existent_time_entries).__dict__ + time_entry_id = inserted_time_entries["id"] + inserted_time_entries.update({"description": "description updated"}) + + time_entry = dao.update(time_entry_id=time_entry_id, time_entry_data=inserted_time_entries) + + assert time_entry.id == time_entry_id + assert time_entry.description == inserted_time_entries.get("description") + + +def test_update__returns_none__when_doesnt_found_one_time_entry_to_update( + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory +): + dao = create_fake_dao(test_db) + inserted_activity = insert_activity(activity_factory(), dao.db) + existent_time_entries = time_entry_factory(activity_id=inserted_activity.id) + inserted_time_entries = dao.create(existent_time_entries).__dict__ + + time_entry = dao.update(0, inserted_time_entries) + + assert time_entry is None diff --git a/V2/tests/unit/services/time_entry_service_test.py b/V2/tests/unit/services/time_entry_service_test.py index e83b6afb..0952f8a9 100644 --- a/V2/tests/unit/services/time_entry_service_test.py +++ b/V2/tests/unit/services/time_entry_service_test.py @@ -29,3 +29,20 @@ def test__delete_time_entry__uses_the_time_entry_dao__to_delete_time_entry_selec assert time_entry_dao.delete.called assert expected_time_entry == deleted_time_entry + + +def test__update_time_entry__uses_the_time_entry_dao__to_update_one_time_entry( + mocker, +): + expected_time_entry = mocker.Mock() + time_entry_dao = mocker.Mock( + update=mocker.Mock(return_value=expected_time_entry) + ) + time_entry_service = TimeEntryService(time_entry_dao) + + updated_time_entry = time_entry_service.update( + Faker().pyint(), Faker().pydict() + ) + + assert time_entry_dao.update.called + assert expected_time_entry == updated_time_entry diff --git a/V2/tests/unit/use_cases/time_entries_use_case_test.py b/V2/tests/unit/use_cases/time_entries_use_case_test.py index e0994df4..1a679f37 100644 --- a/V2/tests/unit/use_cases/time_entries_use_case_test.py +++ b/V2/tests/unit/use_cases/time_entries_use_case_test.py @@ -30,3 +30,16 @@ def test__delete_time_entry_function__uses_the_time_entry_service__to_delete_tim assert time_entry_service.delete.called assert expected_time_entry == deleted_time_entry + + +def test__update_time_entries_function__uses_the_time_entry_service__to_update_an_time_entry( + mocker: MockFixture, +): + expected_time_entry = mocker.Mock() + time_entry_service = mocker.Mock(update=mocker.Mock(return_value=expected_time_entry)) + + time_entry_use_case = _use_cases.UpdateTimeEntryUseCase(time_entry_service) + updated_time_entry = time_entry_use_case.update_time_entry(Faker().uuid4(), Faker().pydict()) + + assert time_entry_service.update.called + assert expected_time_entry == updated_time_entry diff --git a/V2/time_tracker/time_entries/_application/__init__.py b/V2/time_tracker/time_entries/_application/__init__.py index 2810c87d..0ca4e272 100644 --- a/V2/time_tracker/time_entries/_application/__init__.py +++ b/V2/time_tracker/time_entries/_application/__init__.py @@ -1,2 +1,4 @@ # flake8: noqa -from ._time_entries import create_time_entry, delete_time_entry \ No newline at end of file +from ._time_entries import create_time_entry +from ._time_entries import delete_time_entry +from ._time_entries import update_time_entry diff --git a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py index 4cb4d4b0..0f6cf2db 100644 --- a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py +++ b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py @@ -1,3 +1,4 @@ # flake8: noqa from ._create_time_entry import create_time_entry -from ._delete_time_entry import delete_time_entry \ No newline at end of file +from ._delete_time_entry import delete_time_entry +from ._update_time_entry import update_time_entry diff --git a/V2/time_tracker/time_entries/_application/_time_entries/_update_time_entry.py b/V2/time_tracker/time_entries/_application/_time_entries/_update_time_entry.py new file mode 100644 index 00000000..63366869 --- /dev/null +++ b/V2/time_tracker/time_entries/_application/_time_entries/_update_time_entry.py @@ -0,0 +1,46 @@ +import dataclasses +import json + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + + +def update_time_entry(req: func.HttpRequest) -> func.HttpResponse: + database = DB() + time_entry_dao = _infrastructure.TimeEntriesSQLDao(database) + time_entry_service = _domain.TimeEntryService(time_entry_dao) + use_case = _domain._use_cases.UpdateTimeEntryUseCase(time_entry_service) + + try: + time_entry_id = int(req.route_params.get("id")) + time_entry_data = req.get_json() + + if not _validate_time_entry(time_entry_data): + status_code = 400 + response = b"Incorrect time entry body" + else: + updated_time_entry = use_case.update_time_entry(time_entry_id, time_entry_data) + status_code, response = [ + 404, b"Not found" + ] if not updated_time_entry else [200, json.dumps(updated_time_entry.__dict__)] + + return func.HttpResponse( + body=response, + status_code=status_code, + mimetype="application/json", + ) + + except ValueError: + return func.HttpResponse( + body=b"Invalid Format ID", + status_code=400, + mimetype="application/json" + ) + + +def _validate_time_entry(time_entry_data: dict) -> bool: + time_entry_keys = [field.name for field in dataclasses.fields(_domain.TimeEntry)] + return all(key in time_entry_keys for key in time_entry_data.keys()) diff --git a/V2/time_tracker/time_entries/_domain/__init__.py b/V2/time_tracker/time_entries/_domain/__init__.py index ad927811..de58675c 100644 --- a/V2/time_tracker/time_entries/_domain/__init__.py +++ b/V2/time_tracker/time_entries/_domain/__init__.py @@ -4,5 +4,6 @@ from ._services import TimeEntryService from ._use_cases import ( CreateTimeEntryUseCase, - DeleteTimeEntryUseCase + DeleteTimeEntryUseCase, + UpdateTimeEntryUseCase, ) \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_domain/_entities/__init__.py b/V2/time_tracker/time_entries/_domain/_entities/__init__.py index 88b4a739..3245a461 100644 --- a/V2/time_tracker/time_entries/_domain/_entities/__init__.py +++ b/V2/time_tracker/time_entries/_domain/_entities/__init__.py @@ -1,2 +1,2 @@ # flake8: noqa -from ._time_entry import TimeEntry \ No newline at end of file +from ._time_entry import TimeEntry diff --git a/V2/time_tracker/time_entries/_domain/_persistence_contracts/__init__.py b/V2/time_tracker/time_entries/_domain/_persistence_contracts/__init__.py index e10700ce..3f17d5ee 100644 --- a/V2/time_tracker/time_entries/_domain/_persistence_contracts/__init__.py +++ b/V2/time_tracker/time_entries/_domain/_persistence_contracts/__init__.py @@ -1,2 +1,2 @@ # flake8: noqa -from ._time_entries_dao import TimeEntriesDao \ No newline at end of file +from ._time_entries_dao import TimeEntriesDao diff --git a/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py b/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py index e7d94608..8c1dc9d9 100644 --- a/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py +++ b/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py @@ -11,3 +11,7 @@ def create(self, time_entry_data: TimeEntry) -> TimeEntry: @abc.abstractmethod def delete(self, id: int) -> TimeEntry: pass + + @abc.abstractmethod + def update(self, id: int, new_time_entry: dict) -> TimeEntry: + pass diff --git a/V2/time_tracker/time_entries/_domain/_services/__init__.py b/V2/time_tracker/time_entries/_domain/_services/__init__.py index e5e6ba1b..1a06f65b 100644 --- a/V2/time_tracker/time_entries/_domain/_services/__init__.py +++ b/V2/time_tracker/time_entries/_domain/_services/__init__.py @@ -1,2 +1,2 @@ # flake8: noqa -from ._time_entry import TimeEntryService \ No newline at end of file +from ._time_entry import TimeEntryService diff --git a/V2/time_tracker/time_entries/_domain/_services/_time_entry.py b/V2/time_tracker/time_entries/_domain/_services/_time_entry.py index 9d47d5e0..5c32c1e3 100644 --- a/V2/time_tracker/time_entries/_domain/_services/_time_entry.py +++ b/V2/time_tracker/time_entries/_domain/_services/_time_entry.py @@ -11,3 +11,6 @@ def create(self, time_entry_data: TimeEntry) -> TimeEntry: def delete(self, id: int) -> TimeEntry: return self.time_entry_dao.delete(id) + + def update(self, time_entry_id: int, new_time_entry: dict) -> TimeEntry: + return self.time_entry_dao.update(time_entry_id, new_time_entry) diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py index 17b2442a..4f0ac92e 100644 --- a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py +++ b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py @@ -1,3 +1,4 @@ # flake8: noqa from ._create_time_entry_use_case import CreateTimeEntryUseCase from ._delete_time_entry_use_case import DeleteTimeEntryUseCase +from ._update_time_entry_use_case import UpdateTimeEntryUseCase diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/_update_time_entry_use_case.py b/V2/time_tracker/time_entries/_domain/_use_cases/_update_time_entry_use_case.py new file mode 100644 index 00000000..0e2cdf70 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_use_cases/_update_time_entry_use_case.py @@ -0,0 +1,11 @@ +from time_tracker.time_entries._domain import TimeEntryService, TimeEntry + + +class UpdateTimeEntryUseCase: + def __init__(self, time_entry_service: TimeEntryService): + self.time_entry_service = time_entry_service + + def update_time_entry( + self, time_entry_id: int, new_time_entry: dict + ) -> TimeEntry: + return self.time_entry_service.update(time_entry_id, new_time_entry) diff --git a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py index 6037af9f..9c0740fa 100644 --- a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py +++ b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py @@ -44,9 +44,19 @@ def create(self, time_entry_data: domain.TimeEntry) -> domain.TimeEntry: except sqlalchemy.exc.SQLAlchemyError: return None - def __create_time_entry_dto(self, time_entry: dict) -> domain.TimeEntry: - time_entry = {key: time_entry.get(key) for key in self.time_entry_key} - return domain.TimeEntry(**time_entry) + def update(self, time_entry_id: int, time_entry_data: dict) -> domain.TimeEntry: + try: + query = self.time_entry.update().where(self.time_entry.c.id == time_entry_id).values(time_entry_data) + self.db.get_session().execute(query) + query_updated_time_entry = ( + sqlalchemy.sql.select(self.time_entry) + .where(self.time_entry.c.id == time_entry_id) + ) + time_entry = self.db.get_session().execute(query_updated_time_entry).one_or_none() + + return self.__create_time_entry_dto(dict(time_entry)) if time_entry else None + except sqlalchemy.exc.SQLAlchemyError: + return None def delete(self, time_entry_id: int) -> domain.TimeEntry: query = ( @@ -58,3 +68,10 @@ def delete(self, time_entry_id: int) -> domain.TimeEntry: query_deleted_time_entry = sqlalchemy.sql.select(self.time_entry).where(self.time_entry.c.id == time_entry_id) time_entry = self.db.get_session().execute(query_deleted_time_entry).one_or_none() return self.__create_time_entry_dto(dict(time_entry)) if time_entry else None + + def __create_time_entry_dto(self, time_entry: dict) -> domain.TimeEntry: + time_entry.update({ + "start_date": str(time_entry.get("start_date")), + "end_date": str(time_entry.get("end_date"))}) + time_entry = {key: time_entry.get(key) for key in self.time_entry_key} + return domain.TimeEntry(**time_entry) diff --git a/V2/time_tracker/time_entries/interface.py b/V2/time_tracker/time_entries/interface.py index 773314bb..7e1be4ef 100644 --- a/V2/time_tracker/time_entries/interface.py +++ b/V2/time_tracker/time_entries/interface.py @@ -1,3 +1,4 @@ # flake8: noqa from ._application import create_time_entry -from ._application import delete_time_entry \ No newline at end of file +from ._application import delete_time_entry +from ._application import update_time_entry From 10ec2bb9e2b5f67358c00b549a376b7f610041de Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Rafael=20Arcos=20G=C3=B3mez?= <37599693+ararcos@users.noreply.github.com> Date: Thu, 25 Nov 2021 17:57:44 -0500 Subject: [PATCH 59/74] feat: TT-417-crud-v2-projects (#360) * feat: TT-417 created CRUD project * test: TT-417 add test with customer id * refactor: TT-417 created enums and use * test: TT-417 add missing tests and resolve comments * refactor: TT-417 add HTTPStatus from http * refactor: TT-417 test name correction Co-authored-by: Jipson Murillo --- V2/serverless.yml | 42 ++- .../api/azure/project_azure_endpoints_test.py | 251 ++++++++++++++++++ V2/tests/conftest.py | 3 +- V2/tests/fixtures.py | 39 ++- .../integration/daos/projects_dao_test.py | 149 +++++++++++ .../unit/services/project_service_test.py | 74 ++++++ .../unit/use_cases/projects_use_case_test.py | 80 ++++++ .../projects/_application/__init__.py | 5 + .../_application/_projects/__init__.py | 5 + .../_application/_projects/_create_project.py | 57 ++++ .../_application/_projects/_delete_project.py | 35 +++ .../_application/_projects/_get_projects.py | 56 ++++ .../_application/_projects/_update_project.py | 53 ++++ V2/time_tracker/projects/_domain/__init__.py | 11 + .../projects/_domain/_entities/__init__.py | 2 + .../projects/_domain/_entities/_project.py | 14 + .../_persistence_contracts/__init__.py | 2 + .../_persistence_contracts/_projects_dao.py | 25 ++ .../projects/_domain/_services/__init__.py | 2 + .../projects/_domain/_services/_project.py | 24 ++ .../projects/_domain/_use_cases/__init__.py | 6 + .../_use_cases/_create_project_use_case.py | 10 + .../_use_cases/_delete_project_use_case.py | 10 + .../_use_cases/_get_project_by_id_use_case.py | 9 + .../_use_cases/_get_projects_use_case.py | 11 + .../_use_cases/_update_project_use_case.py | 9 + .../projects/_infrastructure/__init__.py | 2 + .../_data_persistence/__init__.py | 2 + .../_data_persistence/_projects_dao.py | 77 ++++++ V2/time_tracker/projects/interface.py | 5 + 30 files changed, 1066 insertions(+), 4 deletions(-) create mode 100644 V2/tests/api/azure/project_azure_endpoints_test.py create mode 100644 V2/tests/integration/daos/projects_dao_test.py create mode 100644 V2/tests/unit/services/project_service_test.py create mode 100644 V2/tests/unit/use_cases/projects_use_case_test.py create mode 100644 V2/time_tracker/projects/_application/__init__.py create mode 100644 V2/time_tracker/projects/_application/_projects/__init__.py create mode 100644 V2/time_tracker/projects/_application/_projects/_create_project.py create mode 100644 V2/time_tracker/projects/_application/_projects/_delete_project.py create mode 100644 V2/time_tracker/projects/_application/_projects/_get_projects.py create mode 100644 V2/time_tracker/projects/_application/_projects/_update_project.py create mode 100644 V2/time_tracker/projects/_domain/__init__.py create mode 100644 V2/time_tracker/projects/_domain/_entities/__init__.py create mode 100644 V2/time_tracker/projects/_domain/_entities/_project.py create mode 100644 V2/time_tracker/projects/_domain/_persistence_contracts/__init__.py create mode 100644 V2/time_tracker/projects/_domain/_persistence_contracts/_projects_dao.py create mode 100644 V2/time_tracker/projects/_domain/_services/__init__.py create mode 100644 V2/time_tracker/projects/_domain/_services/_project.py create mode 100644 V2/time_tracker/projects/_domain/_use_cases/__init__.py create mode 100644 V2/time_tracker/projects/_domain/_use_cases/_create_project_use_case.py create mode 100644 V2/time_tracker/projects/_domain/_use_cases/_delete_project_use_case.py create mode 100644 V2/time_tracker/projects/_domain/_use_cases/_get_project_by_id_use_case.py create mode 100644 V2/time_tracker/projects/_domain/_use_cases/_get_projects_use_case.py create mode 100644 V2/time_tracker/projects/_domain/_use_cases/_update_project_use_case.py create mode 100644 V2/time_tracker/projects/_infrastructure/__init__.py create mode 100644 V2/time_tracker/projects/_infrastructure/_data_persistence/__init__.py create mode 100644 V2/time_tracker/projects/_infrastructure/_data_persistence/_projects_dao.py create mode 100644 V2/time_tracker/projects/interface.py diff --git a/V2/serverless.yml b/V2/serverless.yml index bac6bcc9..e02e8fb6 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -97,7 +97,6 @@ functions: route: time-entries/{id} authLevel: anonymous - update_time_entry: handler: time_tracker/time_entries/interface.update_time_entry events: @@ -117,4 +116,43 @@ functions: - POST route: customers/ authLevel: anonymous - \ No newline at end of file + + get_projects: + handler: time_tracker/projects/interface.get_projects + events: + - http: true + x-azure-settings: + methods: + - GET + route: projects/{id:?} + authLevel: anonymous + + delete_project: + handler: time_tracker/projects/interface.delete_project + events: + - http: true + x-azure-settings: + methods: + - DELETE + route: projects/{id} + authLevel: anonymous + + update_project: + handler: time_tracker/projects/interface.update_project + events: + - http: true + x-azure-settings: + methods: + - PUT + route: projects/{id} + authLevel: anonymous + + create_project: + handler: time_tracker/projects/interface.create_project + events: + - http: true + x-azure-settings: + methods: + - POST + route: projects/ + authLevel: anonymous diff --git a/V2/tests/api/azure/project_azure_endpoints_test.py b/V2/tests/api/azure/project_azure_endpoints_test.py new file mode 100644 index 00000000..232462b7 --- /dev/null +++ b/V2/tests/api/azure/project_azure_endpoints_test.py @@ -0,0 +1,251 @@ +import json +from http import HTTPStatus + +import pytest +from faker import Faker +import azure.functions as func + +from time_tracker.projects._application import _projects as azure_projects +from time_tracker.projects import _domain as domain +from time_tracker.projects import _infrastructure as infrastructure + +PROJECT_URL = '/api/projects/' + + +@pytest.fixture(name='insert_project') +def _insert_project(test_db, insert_customer, project_factory, customer_factory) -> domain.Project: + inserted_customer = insert_customer(customer_factory(), test_db) + + def _new_project(): + project_to_insert = project_factory(customer_id=inserted_customer.id) + dao = infrastructure.ProjectsSQLDao(test_db) + inserted_project = dao.create(project_to_insert) + return inserted_project + return _new_project + + +def test__project_azure_endpoint__returns_all_projects( + insert_project +): + inserted_projects = [ + insert_project().__dict__, + insert_project().__dict__ + ] + + req = func.HttpRequest(method='GET', body=None, url=PROJECT_URL) + response = azure_projects._get_projects.get_projects(req) + projects_json_data = response.get_body().decode("utf-8") + + assert response.status_code == HTTPStatus.OK + assert projects_json_data == json.dumps(inserted_projects) + + +def test__project_azure_endpoint__returns_a_project__when_project_matches_its_id( + insert_project +): + inserted_project = insert_project().__dict__ + + req = func.HttpRequest( + method='GET', + body=None, + url=PROJECT_URL, + route_params={"id": inserted_project["id"]}, + ) + + response = azure_projects._get_projects.get_projects(req) + activitiy_json_data = response.get_body().decode("utf-8") + + assert response.status_code == HTTPStatus.OK + assert activitiy_json_data == json.dumps(inserted_project) + + +def test__projects_azure_endpoint__returns_a_status_code_400__when_project_receive_invalid_id( +): + req = func.HttpRequest( + method="GET", + body=None, + url=PROJECT_URL, + route_params={"id": "invalid id"}, + ) + + response = azure_projects._get_projects.get_projects(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b"Invalid Format ID" + + +def test__project_azure_endpoint__returns_a_project_with_inactive_status__when_a_project_matching_its_id_is_found( + insert_project +): + inserted_project = insert_project().__dict__ + + req = func.HttpRequest( + method='DELETE', + body=None, + url=PROJECT_URL, + route_params={"id": inserted_project["id"]}, + ) + + response = azure_projects._delete_project.delete_project(req) + project_json_data = json.loads(response.get_body().decode("utf-8")) + + assert response.status_code == HTTPStatus.OK + assert project_json_data['status'] == 0 + assert project_json_data['deleted'] is True + + +def test__delete_projects_azure_endpoint__returns_a_status_code_400__when_project_receive_invalid_id( +): + req = func.HttpRequest( + method="DELETE", + body=None, + url=PROJECT_URL, + route_params={"id": "invalid id"}, + ) + + response = azure_projects._delete_project.delete_project(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b"Invalid Format ID" + + +def test__delete_projects_azure_endpoint__returns_a_status_code_404__when_no_found_a_project_to_delete( +): + req = func.HttpRequest( + method="DELETE", + body=None, + url=PROJECT_URL, + route_params={"id": Faker().pyint()}, + ) + + response = azure_projects._delete_project.delete_project(req) + + assert response.status_code == HTTPStatus.NOT_FOUND + assert response.get_body() == b"Not found" + + +def test__update_project_azure_endpoint__returns_a_project__when_found_a_project_to_update( + insert_project +): + inserted_project = insert_project().__dict__ + + project_body = {"description": Faker().sentence()} + req = func.HttpRequest( + method='PUT', + body=json.dumps(project_body).encode("utf-8"), + url=PROJECT_URL, + route_params={"id": inserted_project["id"]}, + ) + + response = azure_projects._update_project.update_project(req) + activitiy_json_data = response.get_body().decode("utf-8") + inserted_project.update(project_body) + + assert response.status_code == HTTPStatus.OK + assert activitiy_json_data == json.dumps(inserted_project) + + +def test__update_projects_azure_endpoint__returns_a_status_code_404__when_no_found_a_project_to_update( + project_factory +): + project_body = project_factory().__dict__ + + req = func.HttpRequest( + method="PUT", + body=json.dumps(project_body).encode("utf-8"), + url=PROJECT_URL, + route_params={"id": project_body["id"]}, + ) + + response = azure_projects._update_project.update_project(req) + + assert response.status_code == HTTPStatus.NOT_FOUND + assert response.get_body() == b"Not found" + + +def test__update_projects_azure_endpoint__returns_a_status_code_400__when_receive_an_incorrect_body( +): + project_body = Faker().pydict(5, True, str) + req = func.HttpRequest( + method="PUT", + body=json.dumps(project_body).encode("utf-8"), + url=PROJECT_URL, + route_params={"id": Faker().pyint()}, + ) + + response = azure_projects._update_project.update_project(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b"Incorrect body" + + +def test__update_projects_azure_endpoint__returns_a_status_code_400__when_project_receive_invalid_id( +): + req = func.HttpRequest( + method="PUT", + body=None, + url=PROJECT_URL, + route_params={"id": "invalid id"}, + ) + + response = azure_projects._update_project.update_project(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b"Invalid Format ID" + + +def test__project_azure_endpoint__creates_a_project__when_project_has_all_attributes( + test_db, project_factory, insert_customer, customer_factory +): + inserted = insert_customer(customer_factory(), test_db) + project_body = project_factory(inserted.id).__dict__ + + req = func.HttpRequest( + method='POST', + body=json.dumps(project_body).encode("utf-8"), + url=PROJECT_URL, + ) + + response = azure_projects._create_project.create_project(req) + project_json_data = json.loads(response.get_body()) + project_body['id'] = project_json_data['id'] + + assert response.status_code == HTTPStatus.CREATED + assert project_json_data == project_body + + +def test__project_azure_endpoint__returns_a_status_code_400__when_project_does_not_all_attributes( + test_db, project_factory, insert_customer, customer_factory +): + inserted_customer = insert_customer(customer_factory(), test_db) + project_body = project_factory(customer_id=inserted_customer.id).__dict__ + project_body.pop('name') + + req = func.HttpRequest( + method='POST', + body=json.dumps(project_body).encode("utf-8"), + url=PROJECT_URL, + ) + + response = azure_projects._create_project.create_project(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == json.dumps(['The name key is missing in the input data']).encode() + + +def test__project_azure_endpoint__returns_a_status_code_500__when_project_receive_incorrect_type_data( + project_factory, insert_customer, customer_factory, test_db +): + insert_customer(customer_factory(), test_db) + project_body = project_factory(technologies=Faker().pylist(2, True, str)).__dict__ + + req = func.HttpRequest( + method='POST', + body=json.dumps(project_body).encode("utf-8"), + url=PROJECT_URL, + ) + + response = azure_projects._create_project.create_project(req) + + assert response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR + assert response.get_body() == b"could not be created" diff --git a/V2/tests/conftest.py b/V2/tests/conftest.py index 4ad03c51..ff67203c 100644 --- a/V2/tests/conftest.py +++ b/V2/tests/conftest.py @@ -1,4 +1,5 @@ # flake8: noqa from fixtures import _activity_factory, _test_db, _insert_activity from fixtures import _time_entry_factory -from fixtures import _customer_factory +from fixtures import _customer_factory, _insert_customer +from fixtures import _project_factory diff --git a/V2/tests/fixtures.py b/V2/tests/fixtures.py index 91b0a801..2eae7b16 100644 --- a/V2/tests/fixtures.py +++ b/V2/tests/fixtures.py @@ -2,9 +2,11 @@ from faker import Faker import time_tracker.activities._domain as activities_domain +import time_tracker.activities._infrastructure as activities_infrastructure import time_tracker.time_entries._domain as time_entries_domain import time_tracker.customers._domain as customers_domain -import time_tracker.activities._infrastructure as activities_infrastructure +import time_tracker.customers._infrastructure as customers_infrastructure +import time_tracker.projects._domain as projects_domain from time_tracker._infrastructure import DB @@ -94,3 +96,38 @@ def _make_customer( return customer return _make_customer + + +@pytest.fixture(name='project_factory') +def _project_factory() -> projects_domain.Project: + def _make_project( + id=Faker().pyint(), + name=Faker().name(), + description=Faker().sentence(), + project_type_id=Faker().pyint(), + customer_id=Faker().pyint(), + status=Faker().pyint(), + deleted=False, + technologies=str(Faker().pylist()) + ): + project = projects_domain.Project( + id=id, + name=name, + description=description, + project_type_id=project_type_id, + customer_id=customer_id, + status=status, + deleted=deleted, + technologies=technologies + ) + return project + return _make_project + + +@pytest.fixture(name='insert_customer') +def _insert_customer() -> customers_domain.Customer: + def _new_customer(customer: customers_domain.Customer, database: DB): + dao = customers_infrastructure.CustomersSQLDao(database) + new_customer = dao.create(customer) + return new_customer + return _new_customer diff --git a/V2/tests/integration/daos/projects_dao_test.py b/V2/tests/integration/daos/projects_dao_test.py new file mode 100644 index 00000000..64837e37 --- /dev/null +++ b/V2/tests/integration/daos/projects_dao_test.py @@ -0,0 +1,149 @@ +import pytest +import typing +from faker import Faker + +from time_tracker.projects import _domain as domain +from time_tracker.projects import _infrastructure as infrastructure +from time_tracker._infrastructure import DB + + +@pytest.fixture(name='insert_project') +def _insert_project(customer_factory, test_db, insert_customer, create_fake_dao, project_factory) -> domain.Project: + inserted_customer = insert_customer(customer_factory(), test_db) + + def _new_project(): + project_to_insert = project_factory(customer_id=inserted_customer.id) + inserted_project = create_fake_dao.create(project_to_insert) + return inserted_project + + return _new_project + + +@pytest.fixture(name='create_fake_dao') +def _create_fake_dao() -> domain.ProjectsDao: + db_fake = DB() + dao = infrastructure.ProjectsSQLDao(db_fake) + return dao + + +@pytest.fixture(name='clean_database', autouse=True) +def _clean_database(): + yield + db_fake = DB() + dao = infrastructure.ProjectsSQLDao(db_fake) + query = dao.project.delete() + dao.db.get_session().execute(query) + + +def test__create_project__returns_a_project_dto__when_saves_correctly_with_sql_database( + create_fake_dao, project_factory, insert_customer, customer_factory +): + dao = create_fake_dao + inserted_customer = insert_customer(customer_factory(), dao.db) + project_to_insert = project_factory(customer_id=inserted_customer.id) + + inserted_project = dao.create(project_to_insert) + + assert isinstance(inserted_project, domain.Project) + assert inserted_project == project_to_insert + + +def test_update__returns_an_update_project__when_an_project_matching_its_id_is_found_with_sql_database( + create_fake_dao, insert_project +): + dao = create_fake_dao + + inserted_project = insert_project() + + expected_description = Faker().sentence() + updated_project = dao.update(inserted_project.id, {"description": expected_description}) + + assert isinstance(updated_project, domain.Project) + assert updated_project.id == inserted_project.id + assert updated_project.description == expected_description + + +def test_update__returns_none__when_no_project_matching_its_id_is_found_with_sql_database( + create_fake_dao, project_factory +): + dao = create_fake_dao + project_to_insert = project_factory() + + results = dao.update(project_to_insert.id, {"description": Faker().sentence()}) + + assert results is None + + +def test__get_all__returns_a_list_of_project_dto_objects__when_one_or_more_projects_are_found_with_sql_database( + create_fake_dao, insert_project +): + dao = create_fake_dao + + inserted_projects = [ + insert_project(), + insert_project() + ] + + projects = dao.get_all() + assert isinstance(projects, typing.List) + assert projects == inserted_projects + + +def test_get_by_id__returns_an_project_dto__when_found_one_project_that_matches_its_id_with_sql_database( + create_fake_dao, insert_project +): + dao = create_fake_dao + + inserted_project = insert_project() + + project = dao.get_by_id(inserted_project.id) + + assert isinstance(project, domain.Project) + assert project.id == inserted_project.id + assert project == inserted_project + + +def test__get_by_id__returns_none__when_no_project_matches_its_id_with_sql_database( + create_fake_dao, project_factory +): + dao = create_fake_dao + project_to_insert = project_factory() + + project = dao.get_by_id(project_to_insert.id) + + assert project is None + + +def test_get_all__returns_an_empty_list__when_doesnt_found_any_projects_with_sql_database( + create_fake_dao +): + projects = create_fake_dao.get_all() + + assert isinstance(projects, typing.List) + assert projects == [] + + +def test_delete__returns_an_project_with_inactive_status__when_an_project_matching_its_id_is_found_with_sql_database( + create_fake_dao, insert_project +): + dao = create_fake_dao + + inserted_project = insert_project() + + project = dao.delete(inserted_project.id) + + assert isinstance(project, domain.Project) + assert project.id == inserted_project.id + assert project.status == 0 + assert project.deleted is True + + +def test_delete__returns_none__when_no_project_matching_its_id_is_found_with_sql_database( + create_fake_dao, project_factory +): + dao = create_fake_dao + project_to_insert = project_factory() + + results = dao.delete(project_to_insert.id) + + assert results is None diff --git a/V2/tests/unit/services/project_service_test.py b/V2/tests/unit/services/project_service_test.py new file mode 100644 index 00000000..9baf657e --- /dev/null +++ b/V2/tests/unit/services/project_service_test.py @@ -0,0 +1,74 @@ +from faker import Faker + +from time_tracker.projects._domain import ProjectService + + +def test__get_all__uses_the_project_dao__to_retrieve_projects(mocker): + expected_projects = mocker.Mock() + project_dao = mocker.Mock( + get_all=mocker.Mock(return_value=expected_projects) + ) + project_service = ProjectService(project_dao) + + actual_projects = project_service.get_all() + + assert project_dao.get_all.called + assert expected_projects == actual_projects + + +def test__get_by_id__uses_the_project_dao__to_retrieve_one_project(mocker): + expected_project = mocker.Mock() + project_dao = mocker.Mock( + get_by_id=mocker.Mock(return_value=expected_project) + ) + project_service = ProjectService(project_dao) + + actual_project = project_service.get_by_id(Faker().pyint()) + + assert project_dao.get_by_id.called + assert expected_project == actual_project + + +def test__delete_project__uses_the_project_dao__to_change_project_status( + mocker, +): + expected_project = mocker.Mock() + project_dao = mocker.Mock( + delete=mocker.Mock(return_value=expected_project) + ) + + project_service = ProjectService(project_dao) + deleted_project = project_service.delete(Faker().pyint()) + + assert project_dao.delete.called + assert expected_project == deleted_project + + +def test__update_project__uses_the_project_dao__to_update_one_project( + mocker, +): + expected_project = mocker.Mock() + project_dao = mocker.Mock( + update=mocker.Mock(return_value=expected_project) + ) + project_service = ProjectService(project_dao) + + updated_project = project_service.update( + Faker().pyint(), Faker().pydict() + ) + + assert project_dao.update.called + assert expected_project == updated_project + + +def test__create_project__uses_the_project_dao__to_create_an_project(mocker, project_factory): + expected_project = mocker.Mock() + project_dao = mocker.Mock( + create=mocker.Mock(return_value=expected_project) + ) + project_service = ProjectService(project_dao) + + actual_project = project_service.create(project_factory()) + + assert project_dao.create.called + assert expected_project == actual_project diff --git a/V2/tests/unit/use_cases/projects_use_case_test.py b/V2/tests/unit/use_cases/projects_use_case_test.py new file mode 100644 index 00000000..22167418 --- /dev/null +++ b/V2/tests/unit/use_cases/projects_use_case_test.py @@ -0,0 +1,80 @@ +from pytest_mock import MockFixture +from faker import Faker + +from time_tracker.projects._domain import _use_cases + + +def test__create_project_function__uses_the_projects_service__to_create_project( + mocker: MockFixture, project_factory +): + expected_project = mocker.Mock() + project_service = mocker.Mock( + create=mocker.Mock(return_value=expected_project) + ) + + project_use_case = _use_cases.CreateProjectUseCase(project_service) + actual_project = project_use_case.create_project(project_factory()) + + assert project_service.create.called + assert expected_project == actual_project + + +def test__delete_project_function__uses_the_project_service__to_delete_project_selected( + mocker: MockFixture, +): + expected_project = mocker.Mock() + project_service = mocker.Mock(delete=mocker.Mock(return_value=expected_project)) + + project_use_case = _use_cases.DeleteProjectUseCase(project_service) + deleted_project = project_use_case.delete_project(Faker().pyint()) + + assert project_service.delete.called + assert expected_project == deleted_project + + +def test__get_list_projects_function__uses_the_project_service__to_retrieve_projects( + mocker: MockFixture, +): + expected_projects = mocker.Mock() + project_service = mocker.Mock( + get_all=mocker.Mock(return_value=expected_projects) + ) + + projects_use_case = _use_cases.GetProjectsUseCase(project_service) + actual_projects = projects_use_case.get_projects() + + assert project_service.get_all.called + assert expected_projects == actual_projects + + +def test__get_project_by_id_function__uses_the_project_service__to_retrieve_project( + mocker: MockFixture, +): + expected_project = mocker.Mock() + project_service = mocker.Mock( + get_by_id=mocker.Mock(return_value=expected_project) + ) + + project_use_case = _use_cases.GetProjectUseCase(project_service) + actual_project = project_use_case.get_project_by_id(Faker().pyint()) + + assert project_service.get_by_id.called + assert expected_project == actual_project + + +def test__update_project_function__uses_the_projects_service__to_update_an_project( + mocker: MockFixture, project_factory +): + expected_project = mocker.Mock() + project_service = mocker.Mock( + update=mocker.Mock(return_value=expected_project) + ) + project_to_update = project_factory() + + project_use_case = _use_cases.UpdateProjectUseCase(project_service) + updated_project = project_use_case.update_project( + Faker().pyint(), project_to_update.__dict__ + ) + + assert project_service.update.called + assert expected_project == updated_project diff --git a/V2/time_tracker/projects/_application/__init__.py b/V2/time_tracker/projects/_application/__init__.py new file mode 100644 index 00000000..6b48fb8a --- /dev/null +++ b/V2/time_tracker/projects/_application/__init__.py @@ -0,0 +1,5 @@ +# flake8: noqa +from ._projects import create_project +from ._projects import delete_project +from ._projects import get_projects +from ._projects import update_project \ No newline at end of file diff --git a/V2/time_tracker/projects/_application/_projects/__init__.py b/V2/time_tracker/projects/_application/_projects/__init__.py new file mode 100644 index 00000000..9f87eef2 --- /dev/null +++ b/V2/time_tracker/projects/_application/_projects/__init__.py @@ -0,0 +1,5 @@ +# flake8: noqa +from ._create_project import create_project +from ._delete_project import delete_project +from ._get_projects import get_projects +from ._update_project import update_project \ No newline at end of file diff --git a/V2/time_tracker/projects/_application/_projects/_create_project.py b/V2/time_tracker/projects/_application/_projects/_create_project.py new file mode 100644 index 00000000..559ba864 --- /dev/null +++ b/V2/time_tracker/projects/_application/_projects/_create_project.py @@ -0,0 +1,57 @@ +import dataclasses +import json +import typing +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB as database + + +def create_project(req: func.HttpRequest) -> func.HttpResponse: + + project_dao = _infrastructure.ProjectsSQLDao(database()) + project_service = _domain.ProjectService(project_dao) + use_case = _domain._use_cases.CreateProjectUseCase(project_service) + + project_data = req.get_json() + + validation_errors = _validate_project(project_data) + if validation_errors: + status_code = HTTPStatus.BAD_REQUEST + response = json.dumps(validation_errors) + else: + project_to_create = _domain.Project( + id=None, + name=project_data["name"], + description=project_data["description"], + project_type_id=project_data["project_type_id"], + customer_id=project_data["customer_id"], + status=project_data["status"], + deleted=False, + technologies=project_data["technologies"] + ) + + created_project = use_case.create_project(project_to_create) + + status_code, response = [ + HTTPStatus.INTERNAL_SERVER_ERROR, b"could not be created" + ] if not created_project else [HTTPStatus.CREATED, json.dumps(created_project.__dict__)] + + return func.HttpResponse( + body=response, + status_code=status_code, + mimetype="application/json" + ) + + +def _validate_project(project_data: dict) -> typing.List[str]: + project_fields = [field.name for field in dataclasses.fields(_domain.Project) + if field.type != typing.Optional[field.type]] + missing_keys = [field for field in project_fields if field not in project_data] + return [ + f'The {missing_key} key is missing in the input data' + for missing_key in missing_keys + ] diff --git a/V2/time_tracker/projects/_application/_projects/_delete_project.py b/V2/time_tracker/projects/_application/_projects/_delete_project.py new file mode 100644 index 00000000..5274b79f --- /dev/null +++ b/V2/time_tracker/projects/_application/_projects/_delete_project.py @@ -0,0 +1,35 @@ +import json +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB as database + + +def delete_project(req: func.HttpRequest) -> func.HttpResponse: + project_dao = _infrastructure.ProjectsSQLDao(database()) + project_service = _domain.ProjectService(project_dao) + use_case = _domain._use_cases.DeleteProjectUseCase(project_service) + + try: + project_id = int(req.route_params.get("id")) + deleted_project = use_case.delete_project(project_id) + + status_code, response = [ + HTTPStatus.NOT_FOUND, b"Not found" + ] if not deleted_project else [HTTPStatus.OK, json.dumps(deleted_project.__dict__)] + + return func.HttpResponse( + body=response, + status_code=status_code, + mimetype="application/json", + ) + + except ValueError: + return func.HttpResponse( + body=b"Invalid Format ID", + status_code=HTTPStatus.BAD_REQUEST, + mimetype="application/json" + ) diff --git a/V2/time_tracker/projects/_application/_projects/_get_projects.py b/V2/time_tracker/projects/_application/_projects/_get_projects.py new file mode 100644 index 00000000..c15efa1c --- /dev/null +++ b/V2/time_tracker/projects/_application/_projects/_get_projects.py @@ -0,0 +1,56 @@ +import json +import typing +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB as database + + +def get_projects(req: func.HttpRequest) -> func.HttpResponse: + project_dao = _infrastructure.ProjectsSQLDao(database()) + project_service = _domain.ProjectService(project_dao) + + project_id = req.route_params.get("id") + + try: + if project_id: + response = _get_by_id(int(project_id), project_service) + if not response: + return func.HttpResponse( + body=b"Not found", + status_code=HTTPStatus.NOT_FOUND, + mimetype="application/json" + ) + else: + response = _get_all(project_service) + + return func.HttpResponse( + body=json.dumps(response), + status_code=HTTPStatus.OK, + mimetype="application/json", + ) + + except ValueError: + return func.HttpResponse( + body=b"Invalid Format ID", + status_code=HTTPStatus.BAD_REQUEST, + mimetype="application/json" + ) + + +def _get_by_id(project_id: int, project_service: _domain.ProjectService) -> str: + use_case = _domain._use_cases.GetProjectUseCase(project_service) + project = use_case.get_project_by_id(project_id) + + return project.__dict__ if project else None + + +def _get_all(project_service: _domain.ProjectService) -> typing.List: + use_case = _domain._use_cases.GetProjectsUseCase(project_service) + return [ + project.__dict__ + for project in use_case.get_projects() + ] diff --git a/V2/time_tracker/projects/_application/_projects/_update_project.py b/V2/time_tracker/projects/_application/_projects/_update_project.py new file mode 100644 index 00000000..b2cc1e57 --- /dev/null +++ b/V2/time_tracker/projects/_application/_projects/_update_project.py @@ -0,0 +1,53 @@ +import dataclasses +import json +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB as database + + +def update_project(req: func.HttpRequest) -> func.HttpResponse: + project_dao = _infrastructure.ProjectsSQLDao(database()) + project_service = _domain.ProjectService(project_dao) + use_case = _domain._use_cases.UpdateProjectUseCase(project_service) + + try: + project_id = int(req.route_params.get("id")) + project_data = req.get_json() + + if not _validate_project(project_data): + status_code = HTTPStatus.BAD_REQUEST + response = b"Incorrect body" + + else: + updated_project = use_case.update_project(project_id, project_data) + status_code, response = [ + HTTPStatus.NOT_FOUND, b"Not found" + ] if not updated_project else [HTTPStatus.OK, json.dumps(updated_project.__dict__)] + + return func.HttpResponse( + body=response, + status_code=status_code, + mimetype="application/json", + ) + + except ValueError: + return func.HttpResponse( + body=b"Invalid Format ID", + status_code=HTTPStatus.BAD_REQUEST, + mimetype="application/json", + ) + except Exception as error: + return func.HttpResponse( + body=str(error).encode(), + status_code=HTTPStatus.BAD_REQUEST, + mimetype="application/json", + ) + + +def _validate_project(project_data: dict) -> bool: + project_keys = [field.name for field in dataclasses.fields(_domain.Project)] + return all(key in project_keys for key in project_data.keys()) diff --git a/V2/time_tracker/projects/_domain/__init__.py b/V2/time_tracker/projects/_domain/__init__.py new file mode 100644 index 00000000..c90dbcaf --- /dev/null +++ b/V2/time_tracker/projects/_domain/__init__.py @@ -0,0 +1,11 @@ +# flake8: noqa +from ._entities import Project +from ._persistence_contracts import ProjectsDao +from ._services import ProjectService +from ._use_cases import ( + CreateProjectUseCase, + DeleteProjectUseCase, + GetProjectsUseCase, + GetProjectUseCase, + UpdateProjectUseCase +) \ No newline at end of file diff --git a/V2/time_tracker/projects/_domain/_entities/__init__.py b/V2/time_tracker/projects/_domain/_entities/__init__.py new file mode 100644 index 00000000..693c3a41 --- /dev/null +++ b/V2/time_tracker/projects/_domain/_entities/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._project import Project \ No newline at end of file diff --git a/V2/time_tracker/projects/_domain/_entities/_project.py b/V2/time_tracker/projects/_domain/_entities/_project.py new file mode 100644 index 00000000..0b2ffe1a --- /dev/null +++ b/V2/time_tracker/projects/_domain/_entities/_project.py @@ -0,0 +1,14 @@ +from dataclasses import dataclass +from typing import List, Optional + + +@dataclass(frozen=True) +class Project: + id: Optional[int] + name: str + description: str + project_type_id: int + customer_id: int + status: int + deleted: Optional[bool] + technologies: List[str] diff --git a/V2/time_tracker/projects/_domain/_persistence_contracts/__init__.py b/V2/time_tracker/projects/_domain/_persistence_contracts/__init__.py new file mode 100644 index 00000000..b17214a7 --- /dev/null +++ b/V2/time_tracker/projects/_domain/_persistence_contracts/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._projects_dao import ProjectsDao \ No newline at end of file diff --git a/V2/time_tracker/projects/_domain/_persistence_contracts/_projects_dao.py b/V2/time_tracker/projects/_domain/_persistence_contracts/_projects_dao.py new file mode 100644 index 00000000..f38c8ebd --- /dev/null +++ b/V2/time_tracker/projects/_domain/_persistence_contracts/_projects_dao.py @@ -0,0 +1,25 @@ +import abc + +from .. import Project + + +class ProjectsDao(abc.ABC): + @abc.abstractmethod + def create(self, time_entry_data: Project) -> Project: + pass + + @abc.abstractmethod + def get_all(self) -> Project: + pass + + @abc.abstractmethod + def get_by_id(self, id: int) -> Project: + pass + + @abc.abstractmethod + def update(self, id: int, project_data: dict) -> Project: + pass + + @abc.abstractmethod + def delete(self, id: int) -> Project: + pass diff --git a/V2/time_tracker/projects/_domain/_services/__init__.py b/V2/time_tracker/projects/_domain/_services/__init__.py new file mode 100644 index 00000000..5eb9532b --- /dev/null +++ b/V2/time_tracker/projects/_domain/_services/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._project import ProjectService \ No newline at end of file diff --git a/V2/time_tracker/projects/_domain/_services/_project.py b/V2/time_tracker/projects/_domain/_services/_project.py new file mode 100644 index 00000000..0f99dafb --- /dev/null +++ b/V2/time_tracker/projects/_domain/_services/_project.py @@ -0,0 +1,24 @@ +import typing + +from .. import Project, ProjectsDao + + +class ProjectService: + + def __init__(self, project_dao: ProjectsDao): + self.project_dao = project_dao + + def create(self, project_data: Project) -> Project: + return self.project_dao.create(project_data) + + def get_all(self) -> typing.List[Project]: + return self.project_dao.get_all() + + def get_by_id(self, id: int) -> Project: + return self.project_dao.get_by_id(id) + + def update(self, id: int, project_data: dict) -> Project: + return self.project_dao.update(id, project_data) + + def delete(self, id: int) -> Project: + return self.project_dao.delete(id) diff --git a/V2/time_tracker/projects/_domain/_use_cases/__init__.py b/V2/time_tracker/projects/_domain/_use_cases/__init__.py new file mode 100644 index 00000000..defb127d --- /dev/null +++ b/V2/time_tracker/projects/_domain/_use_cases/__init__.py @@ -0,0 +1,6 @@ +# flake8: noqa +from ._create_project_use_case import CreateProjectUseCase +from ._delete_project_use_case import DeleteProjectUseCase +from ._get_project_by_id_use_case import GetProjectUseCase +from ._get_projects_use_case import GetProjectsUseCase +from ._update_project_use_case import UpdateProjectUseCase diff --git a/V2/time_tracker/projects/_domain/_use_cases/_create_project_use_case.py b/V2/time_tracker/projects/_domain/_use_cases/_create_project_use_case.py new file mode 100644 index 00000000..60b50687 --- /dev/null +++ b/V2/time_tracker/projects/_domain/_use_cases/_create_project_use_case.py @@ -0,0 +1,10 @@ +from .. import Project, ProjectService + + +class CreateProjectUseCase: + + def __init__(self, project_service: ProjectService): + self.project_service = project_service + + def create_project(self, project_data: Project) -> Project: + return self.project_service.create(project_data) diff --git a/V2/time_tracker/projects/_domain/_use_cases/_delete_project_use_case.py b/V2/time_tracker/projects/_domain/_use_cases/_delete_project_use_case.py new file mode 100644 index 00000000..9dd91d4b --- /dev/null +++ b/V2/time_tracker/projects/_domain/_use_cases/_delete_project_use_case.py @@ -0,0 +1,10 @@ +from .. import Project, ProjectService + + +class DeleteProjectUseCase: + + def __init__(self, project_service: ProjectService): + self.project_service = project_service + + def delete_project(self, id: int) -> Project: + return self.project_service.delete(id) diff --git a/V2/time_tracker/projects/_domain/_use_cases/_get_project_by_id_use_case.py b/V2/time_tracker/projects/_domain/_use_cases/_get_project_by_id_use_case.py new file mode 100644 index 00000000..94573496 --- /dev/null +++ b/V2/time_tracker/projects/_domain/_use_cases/_get_project_by_id_use_case.py @@ -0,0 +1,9 @@ +from .. import ProjectService, Project + + +class GetProjectUseCase: + def __init__(self, project_service: ProjectService): + self.project_service = project_service + + def get_project_by_id(self, id: int) -> Project: + return self.project_service.get_by_id(id) diff --git a/V2/time_tracker/projects/_domain/_use_cases/_get_projects_use_case.py b/V2/time_tracker/projects/_domain/_use_cases/_get_projects_use_case.py new file mode 100644 index 00000000..ccf0b3a4 --- /dev/null +++ b/V2/time_tracker/projects/_domain/_use_cases/_get_projects_use_case.py @@ -0,0 +1,11 @@ +import typing + +from .. import Project, ProjectService + + +class GetProjectsUseCase: + def __init__(self, project_service: ProjectService): + self.project_service = project_service + + def get_projects(self) -> typing.List[Project]: + return self.project_service.get_all() diff --git a/V2/time_tracker/projects/_domain/_use_cases/_update_project_use_case.py b/V2/time_tracker/projects/_domain/_use_cases/_update_project_use_case.py new file mode 100644 index 00000000..628d7437 --- /dev/null +++ b/V2/time_tracker/projects/_domain/_use_cases/_update_project_use_case.py @@ -0,0 +1,9 @@ +from .. import ProjectService, Project + + +class UpdateProjectUseCase: + def __init__(self, projects_service: ProjectService): + self.projects_service = projects_service + + def update_project(self, id: int, project_data: dict) -> Project: + return self.projects_service.update(id, project_data) diff --git a/V2/time_tracker/projects/_infrastructure/__init__.py b/V2/time_tracker/projects/_infrastructure/__init__.py new file mode 100644 index 00000000..b940cba3 --- /dev/null +++ b/V2/time_tracker/projects/_infrastructure/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._data_persistence import ProjectsSQLDao diff --git a/V2/time_tracker/projects/_infrastructure/_data_persistence/__init__.py b/V2/time_tracker/projects/_infrastructure/_data_persistence/__init__.py new file mode 100644 index 00000000..b73fcf44 --- /dev/null +++ b/V2/time_tracker/projects/_infrastructure/_data_persistence/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from ._projects_dao import ProjectsSQLDao diff --git a/V2/time_tracker/projects/_infrastructure/_data_persistence/_projects_dao.py b/V2/time_tracker/projects/_infrastructure/_data_persistence/_projects_dao.py new file mode 100644 index 00000000..2ec61186 --- /dev/null +++ b/V2/time_tracker/projects/_infrastructure/_data_persistence/_projects_dao.py @@ -0,0 +1,77 @@ +import typing +import dataclasses + +import sqlalchemy as sq + +from ... import _domain as domain +from time_tracker._infrastructure import _db + + +class ProjectsSQLDao(domain.ProjectsDao): + + def __init__(self, database: _db.DB): + self.project_key = [field.name for field in dataclasses.fields(domain.Project)] + self.db = database + self.project = sq.Table( + 'project', + self.db.metadata, + sq.Column('id', sq.Integer, primary_key=True, autoincrement=True), + sq.Column('name', sq.String), + sq.Column('description', sq.String), + sq.Column('project_type_id', sq.Integer), + sq.Column('customer_id', sq.Integer, sq.ForeignKey('customer.id')), + sq.Column('status', sq.SmallInteger), + sq.Column('deleted', sq.BOOLEAN), + sq.Column( + 'technologies', + sq.ARRAY(sq.String).with_variant(sq.String, "sqlite") + ), + extend_existing=True, + ) + + def create(self, project_data: domain.Project) -> domain.Project: + try: + new_project = project_data.__dict__ + new_project.pop('id', None) + + query = self.project.insert().values(new_project).return_defaults() + project = self.db.get_session().execute(query) + new_project.update({"id": project.inserted_primary_key[0]}) + return self.__create_project_dto(new_project) + + except sq.exc.SQLAlchemyError: + return None + + def get_by_id(self, id: int) -> domain.Project: + query = sq.sql.select(self.project).where(self.project.c.id == id) + project = self.db.get_session().execute(query).one_or_none() + return self.__create_project_dto(dict(project)) if project else None + + def get_all(self) -> typing.List[domain.Project]: + query = sq.sql.select(self.project) + result = self.db.get_session().execute(query) + return [ + self.__create_project_dto(dict(project)) + for project in result + ] + + def delete(self, id: int) -> domain.Project: + query = ( + self.project.update() + .where(self.project.c.id == id) + .values({"deleted": True, "status": 0}) + ) + self.db.get_session().execute(query) + return self.get_by_id(id) + + def update(self, id: int, project_data: dict) -> domain.Project: + try: + query = self.project.update().where(self.project.c.id == id).values(project_data) + self.db.get_session().execute(query) + return self.get_by_id(id) + except sq.exc.SQLAlchemyError as error: + raise Exception(error.orig) + + def __create_project_dto(self, project: dict) -> domain.Project: + project = {key: project.get(key) for key in self.project_key} + return domain.Project(**project) diff --git a/V2/time_tracker/projects/interface.py b/V2/time_tracker/projects/interface.py new file mode 100644 index 00000000..2fb3244b --- /dev/null +++ b/V2/time_tracker/projects/interface.py @@ -0,0 +1,5 @@ +# flake8: noqa +from ._application import create_project +from ._application import delete_project +from ._application import get_projects +from ._application import update_project \ No newline at end of file From c8a31341b120792f46442815fad2d463262302ab Mon Sep 17 00:00:00 2001 From: Sandro Castillo Date: Thu, 25 Nov 2021 19:47:16 -0500 Subject: [PATCH 60/74] feat: TT-404 GET Time Entries (#341) --- V2/serverless.yml | 14 +++- .../azure/time_entry_azure_endpoints_test.py | 65 ++++++++++++++++++- .../integration/daos/time_entries_dao_test.py | 55 ++++++++++++++++ .../unit/services/time_entry_service_test.py | 26 ++++++++ .../use_cases/time_entries_use_case_test.py | 32 +++++++++ .../time_entries/_application/__init__.py | 1 + .../_application/_time_entries/__init__.py | 1 + .../_time_entries/_get_time_entries.py | 61 +++++++++++++++++ .../time_entries/_domain/__init__.py | 4 +- .../_time_entries_dao.py | 8 +++ .../_domain/_services/_time_entry.py | 9 ++- .../_domain/_use_cases/__init__.py | 2 + .../_get_time_entry_by_id_use_case.py | 9 +++ .../_use_cases/_get_time_entry_use_case.py | 11 ++++ .../_data_persistence/__init__.py | 2 +- ...ntries_dao.py => _time_entries_sql_dao.py} | 15 +++++ V2/time_tracker/time_entries/interface.py | 1 + 17 files changed, 308 insertions(+), 8 deletions(-) create mode 100644 V2/time_tracker/time_entries/_application/_time_entries/_get_time_entries.py create mode 100644 V2/time_tracker/time_entries/_domain/_use_cases/_get_time_entry_by_id_use_case.py create mode 100644 V2/time_tracker/time_entries/_domain/_use_cases/_get_time_entry_use_case.py rename V2/time_tracker/time_entries/_infrastructure/_data_persistence/{_time_entries_dao.py => _time_entries_sql_dao.py} (85%) diff --git a/V2/serverless.yml b/V2/serverless.yml index e02e8fb6..ba8edb52 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -63,9 +63,9 @@ functions: - http: true x-azure-settings: methods: - - PUT + - PUT route: activities/{id} - authLevel: anonymous + authLevel: anonymous create_activity: handler: time_tracker/activities/interface.create_activity @@ -87,6 +87,16 @@ functions: route: time-entries/ authLevel: anonymous + get_time_entries: + handler: time_tracker/time_entries/interface.get_time_entries + events: + - http: true + x-azure-settings: + methods: + - GET + route: time-entries/{id:?} + authLevel: anonymous + delete_time_entry: handler: time_tracker/time_entries/interface.delete_time_entry events: diff --git a/V2/tests/api/azure/time_entry_azure_endpoints_test.py b/V2/tests/api/azure/time_entry_azure_endpoints_test.py index f57db585..fcc8dea0 100644 --- a/V2/tests/api/azure/time_entry_azure_endpoints_test.py +++ b/V2/tests/api/azure/time_entry_azure_endpoints_test.py @@ -1,6 +1,7 @@ import pytest import json from faker import Faker +from http import HTTPStatus import azure.functions as func @@ -39,7 +40,7 @@ def test__time_entry_azure_endpoint__creates_an_time_entry__when_time_entry_has_ time_entry_json_data = json.loads(response.get_body()) time_entry_body['id'] = time_entry_json_data['id'] - assert response.status_code == 201 + assert response.status_code == HTTPStatus.CREATED assert time_entry_json_data == time_entry_body @@ -60,7 +61,7 @@ def test__delete_time_entries_azure_endpoint__returns_an_time_entry_with_true_de response = azure_time_entries._delete_time_entry.delete_time_entry(req) time_entry_json_data = json.loads(response.get_body().decode("utf-8")) - assert response.status_code == 200 + assert response.status_code == HTTPStatus.OK assert time_entry_json_data['deleted'] is True @@ -75,7 +76,65 @@ def test__delete_time_entries_azure_endpoint__returns_a_status_code_400__when_ti response = azure_time_entries._delete_time_entry.delete_time_entry(req) - assert response.status_code == 400 + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b'Invalid Format ID' + + +def test__time_entry_azure_endpoint__returns_all_time_entries( + test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity +): + inserted_activity = insert_activity(activity_factory(), test_db) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + inserted_time_entries = insert_time_entry(time_entries_to_insert, test_db).__dict__ + + req = func.HttpRequest(method="GET", body=None, url=TIME_ENTRY_URL) + + response = azure_time_entries.get_time_entries(req) + time_entries_json_data = response.get_body().decode("utf-8") + time_entry_list = json.loads(time_entries_json_data) + + assert response.status_code == HTTPStatus.OK + assert time_entry_list.pop() == inserted_time_entries + + +def test__time_entry_azure_endpoint__returns_an_time_entry__when_time_entry_matches_its_id( + test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity +): + inserted_activity = insert_activity(activity_factory(), test_db) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + inserted_time_entries = insert_time_entry(time_entries_to_insert, test_db).__dict__ + + req = func.HttpRequest( + method="GET", + body=None, + url=TIME_ENTRY_URL, + route_params={"id": inserted_time_entries["id"]}, + ) + + response = azure_time_entries.get_time_entries(req) + time_entry_json_data = response.get_body().decode("utf-8") + + assert response.status_code == HTTPStatus.OK + assert time_entry_json_data == json.dumps(inserted_time_entries) + + +def test__get_time_entries_azure_endpoint__returns_a_status_code_400__when_time_entry_recive_invalid_id( + test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity +): + inserted_activity = insert_activity(activity_factory(), test_db) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + insert_time_entry(time_entries_to_insert, test_db).__dict__ + + req = func.HttpRequest( + method="GET", + body=None, + url=TIME_ENTRY_URL, + route_params={"id": "invalid id"}, + ) + + response = azure_time_entries.get_time_entries(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST assert response.get_body() == b'Invalid Format ID' diff --git a/V2/tests/integration/daos/time_entries_dao_test.py b/V2/tests/integration/daos/time_entries_dao_test.py index fbe5a7ed..e78af556 100644 --- a/V2/tests/integration/daos/time_entries_dao_test.py +++ b/V2/tests/integration/daos/time_entries_dao_test.py @@ -1,4 +1,6 @@ import pytest +import typing + from faker import Faker import time_tracker.time_entries._domain as domain @@ -98,3 +100,56 @@ def test_update__returns_none__when_doesnt_found_one_time_entry_to_update( time_entry = dao.update(0, inserted_time_entries) assert time_entry is None + + +def test__get_all__returns_a_list_of_time_entries_dto_objects__when_one_or_more_time_entries_are_found_in_sql_database( + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory +): + + dao = create_fake_dao(test_db) + inserted_activity = insert_activity(activity_factory(), dao.db) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + inserted_time_entries = [dao.create(time_entries_to_insert)] + + time_entry = dao.get_all() + + assert isinstance(time_entry, typing.List) + assert time_entry == inserted_time_entries + + +def test__get_all__returns_an_empty_list__when_doesnt_found_any_time_entries_in_sql_database( + test_db, create_fake_dao, insert_activity, activity_factory +): + dao = create_fake_dao(test_db) + insert_activity(activity_factory(), dao.db) + + time_entry = dao.get_all() + assert time_entry == [] + + +def test__get_by_id__returns_a_time_entry_dto__when_found_one_time_entry_that_match_id_with_sql_database( + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory +): + dao = create_fake_dao(test_db) + inserted_activity = insert_activity(activity_factory(), dao.db) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + inserted_time_entries = dao.create(time_entries_to_insert) + + time_entry = dao.get_by_id(time_entries_to_insert.id) + + assert isinstance(time_entry, domain.TimeEntry) + assert time_entry.id == inserted_time_entries.id + assert time_entry == inserted_time_entries + + +def test__get_by_id__returns_none__when_no_time_entry_matches_by_id( + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory +): + dao = create_fake_dao(test_db) + inserted_activity = insert_activity(activity_factory(), dao.db) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + dao.create(time_entries_to_insert) + + time_entry = dao.get_by_id(Faker().pyint()) + + assert time_entry is None diff --git a/V2/tests/unit/services/time_entry_service_test.py b/V2/tests/unit/services/time_entry_service_test.py index 0952f8a9..1992324f 100644 --- a/V2/tests/unit/services/time_entry_service_test.py +++ b/V2/tests/unit/services/time_entry_service_test.py @@ -46,3 +46,29 @@ def test__update_time_entry__uses_the_time_entry_dao__to_update_one_time_entry( assert time_entry_dao.update.called assert expected_time_entry == updated_time_entry + + +def test__get_all__uses_the_time_entry_dao__to_retrieve_time_entries(mocker): + expected_time_entries = mocker.Mock() + time_entry_dao = mocker.Mock( + get_all=mocker.Mock(return_value=expected_time_entries) + ) + time_activity_service = TimeEntryService(time_entry_dao) + + actual_activities = time_activity_service.get_all() + + assert time_entry_dao.get_all.called + assert expected_time_entries == actual_activities + + +def test__get_by_id__uses_the_time_entry_dao__to_retrieve_one_time_entry(mocker): + expected_time_entry = mocker.Mock() + time_entry_dao = mocker.Mock( + get_by_id=mocker.Mock(return_value=expected_time_entry) + ) + time_entry_service = TimeEntryService(time_entry_dao) + + actual_time_entry = time_entry_service.get_by_id(Faker().uuid4()) + + assert time_entry_dao.get_by_id.called + assert expected_time_entry == actual_time_entry diff --git a/V2/tests/unit/use_cases/time_entries_use_case_test.py b/V2/tests/unit/use_cases/time_entries_use_case_test.py index 1a679f37..05937789 100644 --- a/V2/tests/unit/use_cases/time_entries_use_case_test.py +++ b/V2/tests/unit/use_cases/time_entries_use_case_test.py @@ -3,6 +3,8 @@ from time_tracker.time_entries._domain import _use_cases +fake = Faker() + def test__create_time_entry_function__uses_the_time_entries_service__to_create_time_entry( mocker: MockFixture, time_entry_factory @@ -43,3 +45,33 @@ def test__update_time_entries_function__uses_the_time_entry_service__to_update_a assert time_entry_service.update.called assert expected_time_entry == updated_time_entry + + +def test__get_all_time_entries_function__using_the_use_case_get_time_entries__to_get_all_time_entries( + mocker: MockFixture, +): + expected_time_entries = mocker.Mock() + time_entry_service = mocker.Mock( + get_all=mocker.Mock(return_value=expected_time_entries) + ) + + time_entries_use_case = _use_cases.GetTimeEntriesUseCase(time_entry_service) + actual_time_entries = time_entries_use_case.get_time_entries() + + assert time_entry_service.get_all.called + assert expected_time_entries == actual_time_entries + + +def test__get_time_entry_by_id_function__uses_the_time_entry_service__to_retrieve_time_entry( + mocker: MockFixture, +): + expected_time_entries = mocker.Mock() + time_entry_service = mocker.Mock( + get_by_id=mocker.Mock(return_value=expected_time_entries) + ) + + time_entry_use_case = _use_cases.GetTimeEntryUseCase(time_entry_service) + actual_time_entry = time_entry_use_case.get_time_entry_by_id(fake.uuid4()) + + assert time_entry_service.get_by_id.called + assert expected_time_entries == actual_time_entry diff --git a/V2/time_tracker/time_entries/_application/__init__.py b/V2/time_tracker/time_entries/_application/__init__.py index 0ca4e272..eb817c22 100644 --- a/V2/time_tracker/time_entries/_application/__init__.py +++ b/V2/time_tracker/time_entries/_application/__init__.py @@ -2,3 +2,4 @@ from ._time_entries import create_time_entry from ._time_entries import delete_time_entry from ._time_entries import update_time_entry +from ._time_entries import get_time_entries diff --git a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py index 0f6cf2db..382fbbe4 100644 --- a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py +++ b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py @@ -2,3 +2,4 @@ from ._create_time_entry import create_time_entry from ._delete_time_entry import delete_time_entry from ._update_time_entry import update_time_entry +from ._get_time_entries import get_time_entries diff --git a/V2/time_tracker/time_entries/_application/_time_entries/_get_time_entries.py b/V2/time_tracker/time_entries/_application/_time_entries/_get_time_entries.py new file mode 100644 index 00000000..37574d32 --- /dev/null +++ b/V2/time_tracker/time_entries/_application/_time_entries/_get_time_entries.py @@ -0,0 +1,61 @@ +import json +from http import HTTPStatus + +import azure.functions as func + +from time_tracker.time_entries._infrastructure import TimeEntriesSQLDao +from time_tracker.time_entries._domain import TimeEntryService, _use_cases +from time_tracker._infrastructure import DB + + +NOT_FOUND = b'Not Found' +INVALID_FORMAT_ID = b'Invalid Format ID' + + +def get_time_entries(req: func.HttpRequest) -> func.HttpResponse: + + time_entry_id = req.route_params.get('id') + status_code = HTTPStatus.OK + + if time_entry_id: + try: + response = _get_by_id(int(time_entry_id)) + if response == NOT_FOUND: + status_code = HTTPStatus.NOT_FOUND + except ValueError: + response = INVALID_FORMAT_ID + status_code = HTTPStatus.BAD_REQUEST + else: + response = _get_all() + + return func.HttpResponse( + body=response, status_code=status_code, mimetype="application/json" + ) + + +def _get_by_id(id: int) -> str: + database = DB() + time_entry_use_case = _use_cases.GetTimeEntryUseCase( + _create_time_entry_service(database) + ) + time_entry = time_entry_use_case.get_time_entry_by_id(id) + + return json.dumps(time_entry.__dict__) if time_entry else NOT_FOUND + + +def _get_all() -> str: + database = DB() + time_entries_use_case = _use_cases.GetTimeEntriesUseCase( + _create_time_entry_service(database) + ) + return json.dumps( + [ + time_entry.__dict__ + for time_entry in time_entries_use_case.get_time_entries() + ] + ) + + +def _create_time_entry_service(db: DB): + time_entry_sql = TimeEntriesSQLDao(db) + return TimeEntryService(time_entry_sql) diff --git a/V2/time_tracker/time_entries/_domain/__init__.py b/V2/time_tracker/time_entries/_domain/__init__.py index de58675c..2034f8d3 100644 --- a/V2/time_tracker/time_entries/_domain/__init__.py +++ b/V2/time_tracker/time_entries/_domain/__init__.py @@ -6,4 +6,6 @@ CreateTimeEntryUseCase, DeleteTimeEntryUseCase, UpdateTimeEntryUseCase, -) \ No newline at end of file + GetTimeEntriesUseCase, + GetTimeEntryUseCase +) diff --git a/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py b/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py index 8c1dc9d9..ca4ceb98 100644 --- a/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py +++ b/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py @@ -1,4 +1,5 @@ import abc +import typing from time_tracker.time_entries._domain import TimeEntry @@ -15,3 +16,10 @@ def delete(self, id: int) -> TimeEntry: @abc.abstractmethod def update(self, id: int, new_time_entry: dict) -> TimeEntry: pass + + def get_by_id(self, id: int) -> TimeEntry: + pass + + @abc.abstractmethod + def get_all(self) -> typing.List[TimeEntry]: + pass diff --git a/V2/time_tracker/time_entries/_domain/_services/_time_entry.py b/V2/time_tracker/time_entries/_domain/_services/_time_entry.py index 5c32c1e3..5b3f4115 100644 --- a/V2/time_tracker/time_entries/_domain/_services/_time_entry.py +++ b/V2/time_tracker/time_entries/_domain/_services/_time_entry.py @@ -1,8 +1,9 @@ +import typing + from time_tracker.time_entries._domain import TimeEntry, TimeEntriesDao class TimeEntryService: - def __init__(self, time_entry_dao: TimeEntriesDao): self.time_entry_dao = time_entry_dao @@ -14,3 +15,9 @@ def delete(self, id: int) -> TimeEntry: def update(self, time_entry_id: int, new_time_entry: dict) -> TimeEntry: return self.time_entry_dao.update(time_entry_id, new_time_entry) + + def get_by_id(self, id: int) -> TimeEntry: + return self.time_entry_dao.get_by_id(id) + + def get_all(self) -> typing.List[TimeEntry]: + return self.time_entry_dao.get_all() diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py index 4f0ac92e..fdd1258d 100644 --- a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py +++ b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py @@ -2,3 +2,5 @@ from ._create_time_entry_use_case import CreateTimeEntryUseCase from ._delete_time_entry_use_case import DeleteTimeEntryUseCase from ._update_time_entry_use_case import UpdateTimeEntryUseCase +from ._get_time_entry_use_case import GetTimeEntriesUseCase +from ._get_time_entry_by_id_use_case import GetTimeEntryUseCase diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/_get_time_entry_by_id_use_case.py b/V2/time_tracker/time_entries/_domain/_use_cases/_get_time_entry_by_id_use_case.py new file mode 100644 index 00000000..410233e1 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_use_cases/_get_time_entry_by_id_use_case.py @@ -0,0 +1,9 @@ +from time_tracker.time_entries._domain import TimeEntryService, TimeEntry + + +class GetTimeEntryUseCase: + def __init__(self, time_entry_service: TimeEntryService): + self.time_entry_service = time_entry_service + + def get_time_entry_by_id(self, id: int) -> TimeEntry: + return self.time_entry_service.get_by_id(id) diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/_get_time_entry_use_case.py b/V2/time_tracker/time_entries/_domain/_use_cases/_get_time_entry_use_case.py new file mode 100644 index 00000000..c7bd3f27 --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_use_cases/_get_time_entry_use_case.py @@ -0,0 +1,11 @@ +import typing + +from time_tracker.time_entries._domain import TimeEntryService, TimeEntry + + +class GetTimeEntriesUseCase: + def __init__(self, time_entry_service: TimeEntryService): + self.time_entry_service = time_entry_service + + def get_time_entries(self) -> typing.List[TimeEntry]: + return self.time_entry_service.get_all() diff --git a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/__init__.py b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/__init__.py index b999febe..76b56455 100644 --- a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/__init__.py +++ b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/__init__.py @@ -1,2 +1,2 @@ # flake8: noqa -from ._time_entries_dao import TimeEntriesSQLDao +from ._time_entries_sql_dao import TimeEntriesSQLDao diff --git a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py similarity index 85% rename from V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py rename to V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py index 9c0740fa..9e7016d4 100644 --- a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_dao.py +++ b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py @@ -1,6 +1,8 @@ import dataclasses +import typing import sqlalchemy +import sqlalchemy.sql as sql import time_tracker.time_entries._domain as domain from time_tracker._infrastructure import _db @@ -31,6 +33,19 @@ def __init__(self, database: _db.DB): extend_existing=True, ) + def get_by_id(self, time_entry_id: int) -> domain.TimeEntry: + query = sql.select(self.time_entry).where(self.time_entry.c.id == time_entry_id) + time_entry = self.db.get_session().execute(query).one_or_none() + return self.__create_time_entry_dto(dict(time_entry)) if time_entry else None + + def get_all(self) -> typing.List[domain.TimeEntry]: + query = sql.select(self.time_entry) + result = self.db.get_session().execute(query) + return [ + self.__create_time_entry_dto(dict(time_entry)) + for time_entry in result + ] + def create(self, time_entry_data: domain.TimeEntry) -> domain.TimeEntry: try: new_time_entry = time_entry_data.__dict__ diff --git a/V2/time_tracker/time_entries/interface.py b/V2/time_tracker/time_entries/interface.py index 7e1be4ef..8873b93d 100644 --- a/V2/time_tracker/time_entries/interface.py +++ b/V2/time_tracker/time_entries/interface.py @@ -2,3 +2,4 @@ from ._application import create_time_entry from ._application import delete_time_entry from ._application import update_time_entry +from ._application import get_time_entries From fd0bc986fcc074bd5f6d6e9b7b602951375f2aee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Rafael=20Arcos=20G=C3=B3mez?= <37599693+ararcos@users.noreply.github.com> Date: Fri, 26 Nov 2021 10:34:06 -0500 Subject: [PATCH 61/74] feat: TT-429 created enums for response messages (#362) --- V2/time_tracker/utils/enums/__init__.py | 2 ++ V2/time_tracker/utils/enums/response_enums.py | 10 ++++++++++ 2 files changed, 12 insertions(+) create mode 100644 V2/time_tracker/utils/enums/__init__.py create mode 100644 V2/time_tracker/utils/enums/response_enums.py diff --git a/V2/time_tracker/utils/enums/__init__.py b/V2/time_tracker/utils/enums/__init__.py new file mode 100644 index 00000000..317ca876 --- /dev/null +++ b/V2/time_tracker/utils/enums/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from .response_enums import ResponseEnums \ No newline at end of file diff --git a/V2/time_tracker/utils/enums/response_enums.py b/V2/time_tracker/utils/enums/response_enums.py new file mode 100644 index 00000000..6509ad4c --- /dev/null +++ b/V2/time_tracker/utils/enums/response_enums.py @@ -0,0 +1,10 @@ +from enum import Enum + + +class ResponseEnums(Enum): + INVALID_ID = "Invalid Format ID" + NOT_FOUND = "Not found" + NOT_CREATED = "could not be created" + INCORRECT_BODY = "Incorrect body" + + MIME_TYPE = "application/json" From f9e1403aadd8abca2de270fcf8a952439b525ff7 Mon Sep 17 00:00:00 2001 From: mandres2015 <32377408+mandres2015@users.noreply.github.com> Date: Fri, 26 Nov 2021 14:55:58 -0500 Subject: [PATCH 62/74] feat:TT-407 list latest v2 time entries (#353) * reactor: TT-407 rebase with master - DELETE * fix: TT-407 sintax flake8 correted * fix: TT-407 problems solved * fix: TT-407 enums added in responses --- V2/serverless.yml | 37 +++++++++----- .../azure/time_entry_azure_endpoints_test.py | 40 +++++++++++++++ .../integration/daos/time_entries_dao_test.py | 28 ++++++++++- .../unit/services/time_entry_service_test.py | 15 ++++++ .../use_cases/time_entries_use_case_test.py | 13 +++++ .../time_entries/_application/__init__.py | 1 + .../_application/_time_entries/__init__.py | 1 + .../_time_entries/_get_latest_entries.py | 49 +++++++++++++++++++ .../time_entries/_domain/__init__.py | 3 +- .../_time_entries_dao.py | 5 ++ .../_domain/_services/_time_entry.py | 3 ++ .../_domain/_use_cases/__init__.py | 1 + .../_get_latest_entries_use_case.py | 11 +++++ .../_time_entries_sql_dao.py | 14 ++++++ V2/time_tracker/time_entries/interface.py | 1 + 15 files changed, 207 insertions(+), 15 deletions(-) create mode 100644 V2/time_tracker/time_entries/_application/_time_entries/_get_latest_entries.py create mode 100644 V2/time_tracker/time_entries/_domain/_use_cases/_get_latest_entries_use_case.py diff --git a/V2/serverless.yml b/V2/serverless.yml index ba8edb52..9b31ee0b 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -1,6 +1,6 @@ service: azure-time-tracker -frameworkVersion: "2" +frameworkVersion: '2' provider: name: azure @@ -23,18 +23,18 @@ plugins: package: patterns: - - "!env/**" - - "!.env/**" - - "!local.settings.json" - - "!.vscode/**" - - "!__pycache__/**" - - "!node_modules/**" - - "!.python_packages/**" - - "!.funcignore" - - "!package.json" - - "!package-lock.json" - - "!.gitignore" - - "!.git/**" + - '!env/**' + - '!.env/**' + - '!local.settings.json' + - '!.vscode/**' + - '!__pycache__/**' + - '!node_modules/**' + - '!.python_packages/**' + - '!.funcignore' + - '!package.json' + - '!package-lock.json' + - '!.gitignore' + - '!.git/**' functions: get_activities: @@ -117,6 +117,16 @@ functions: route: time-entries/{id} authLevel: anonymous + get_latest_time_entry: + handler: time_tracker/time_entries/interface.get_latest_entries + events: + - http: true + x-azure-settings: + methods: + - GET + route: time-entries/latest/ + authLevel: anonymous + create_customer: handler: time_tracker/customers/interface.create_customer events: @@ -165,4 +175,5 @@ functions: methods: - POST route: projects/ + authLevel: anonymous diff --git a/V2/tests/api/azure/time_entry_azure_endpoints_test.py b/V2/tests/api/azure/time_entry_azure_endpoints_test.py index fcc8dea0..42e3d5ec 100644 --- a/V2/tests/api/azure/time_entry_azure_endpoints_test.py +++ b/V2/tests/api/azure/time_entry_azure_endpoints_test.py @@ -9,6 +9,7 @@ from time_tracker._infrastructure import DB from time_tracker.time_entries import _domain as domain_time_entries from time_tracker.time_entries import _infrastructure as infrastructure_time_entries +from time_tracker.utils.enums import ResponseEnums TIME_ENTRY_URL = "/api/time-entries/" @@ -208,3 +209,42 @@ def test__update_time_entries_azure_endpoint__returns_a_status_code_400__when_ti assert response.status_code == 400 assert response.get_body() == b'Incorrect time entry body' + + +def test__get_latest_entries_azure_endpoint__returns_a_list_of_latest_time_entries__when_an_owner_id_match( + test_db, time_entry_factory, insert_time_entry, insert_activity, activity_factory, +): + inserted_activity = insert_activity(activity_factory(), test_db).__dict__ + time_entry_body = time_entry_factory(activity_id=inserted_activity["id"], technologies="[jira,sql]") + inserted_time_entry = insert_time_entry(time_entry_body, test_db).__dict__ + + req = func.HttpRequest( + method='GET', + body=None, + url=TIME_ENTRY_URL+"latest/", + params={"owner_id": inserted_time_entry["owner_id"]}, + ) + + response = azure_time_entries._get_latest_entries.get_latest_entries(req) + time_entry_json_data = json.loads(response.get_body().decode("utf-8")) + + assert response.status_code == HTTPStatus.OK + assert time_entry_json_data == [inserted_time_entry] + + +def test__get_latest_entries_azure_endpoint__returns_not_found__when_recieve_an_invalid_owner_id( + test_db, insert_activity, activity_factory, +): + insert_activity(activity_factory(), test_db) + + req = func.HttpRequest( + method='GET', + body=None, + url=TIME_ENTRY_URL+"latest/", + params={"owner_id": Faker().pyint()}, + ) + + response = azure_time_entries._get_latest_entries.get_latest_entries(req) + + assert response.status_code == HTTPStatus.NOT_FOUND + assert response.get_body().decode("utf-8") == ResponseEnums.NOT_FOUND.value diff --git a/V2/tests/integration/daos/time_entries_dao_test.py b/V2/tests/integration/daos/time_entries_dao_test.py index e78af556..e48241cc 100644 --- a/V2/tests/integration/daos/time_entries_dao_test.py +++ b/V2/tests/integration/daos/time_entries_dao_test.py @@ -90,7 +90,7 @@ def test_update__returns_an_time_entry_dto__when_found_one_time_entry_to_update( def test_update__returns_none__when_doesnt_found_one_time_entry_to_update( - test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory ): dao = create_fake_dao(test_db) inserted_activity = insert_activity(activity_factory(), dao.db) @@ -153,3 +153,29 @@ def test__get_by_id__returns_none__when_no_time_entry_matches_by_id( time_entry = dao.get_by_id(Faker().pyint()) assert time_entry is None + + +def test_get_latest_entries__returns_a_list_of_latest_time_entries__when_an_owner_id_match( + create_fake_dao, time_entry_factory, insert_activity, activity_factory, test_db +): + dao = create_fake_dao(test_db) + inserted_activity = insert_activity(activity_factory(), dao.db) + time_entry_to_insert = time_entry_factory( + activity_id=inserted_activity.id, + technologies="[jira,sql]") + inserted_time_entry = dao.create(time_entry_to_insert) + + result = dao.get_latest_entries(int(inserted_time_entry.owner_id)) + + assert result == [inserted_time_entry.__dict__] + + +def test_get_latest_entries__returns_none__when_an_owner_id_is_not_found( + create_fake_dao, test_db, insert_activity, activity_factory +): + dao = create_fake_dao(test_db) + insert_activity(activity_factory(), dao.db) + + result = dao.get_latest_entries(Faker().pyint()) + + assert result is None diff --git a/V2/tests/unit/services/time_entry_service_test.py b/V2/tests/unit/services/time_entry_service_test.py index 1992324f..d1596471 100644 --- a/V2/tests/unit/services/time_entry_service_test.py +++ b/V2/tests/unit/services/time_entry_service_test.py @@ -72,3 +72,18 @@ def test__get_by_id__uses_the_time_entry_dao__to_retrieve_one_time_entry(mocker) assert time_entry_dao.get_by_id.called assert expected_time_entry == actual_time_entry + + +def test__get_latest_entries__uses_the_time_entry_dao__to_get_last_entries( + mocker, +): + expected_latest_time_entries = mocker.Mock() + time_entry_dao = mocker.Mock( + get_latest_entries=mocker.Mock(return_value=expected_latest_time_entries) + ) + + time_entry_service = TimeEntryService(time_entry_dao) + latest_time_entries = time_entry_service.get_latest_entries(Faker().pyint(), Faker().pyint()) + + assert expected_latest_time_entries == latest_time_entries + assert time_entry_dao.get_latest_entries.called diff --git a/V2/tests/unit/use_cases/time_entries_use_case_test.py b/V2/tests/unit/use_cases/time_entries_use_case_test.py index 05937789..f96666cb 100644 --- a/V2/tests/unit/use_cases/time_entries_use_case_test.py +++ b/V2/tests/unit/use_cases/time_entries_use_case_test.py @@ -75,3 +75,16 @@ def test__get_time_entry_by_id_function__uses_the_time_entry_service__to_retriev assert time_entry_service.get_by_id.called assert expected_time_entries == actual_time_entry + + +def test__get_latest_entries_function__uses_the_time_entry_service__to_get_last_entries( + mocker: MockFixture, +): + expected_latest_time_entries = mocker.Mock() + time_entry_service = mocker.Mock(get_latest_entries=mocker.Mock(return_value=expected_latest_time_entries)) + + time_entry_use_case = _use_cases.GetLastestTimeEntryUseCase(time_entry_service) + latest_time_entries = time_entry_use_case.get_latest_entries(Faker().pyint(), Faker().pyint()) + + assert time_entry_service.get_latest_entries.called + assert expected_latest_time_entries == latest_time_entries diff --git a/V2/time_tracker/time_entries/_application/__init__.py b/V2/time_tracker/time_entries/_application/__init__.py index eb817c22..4d003056 100644 --- a/V2/time_tracker/time_entries/_application/__init__.py +++ b/V2/time_tracker/time_entries/_application/__init__.py @@ -3,3 +3,4 @@ from ._time_entries import delete_time_entry from ._time_entries import update_time_entry from ._time_entries import get_time_entries +from ._time_entries import get_latest_entries diff --git a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py index 382fbbe4..29631650 100644 --- a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py +++ b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py @@ -3,3 +3,4 @@ from ._delete_time_entry import delete_time_entry from ._update_time_entry import update_time_entry from ._get_time_entries import get_time_entries +from ._get_latest_entries import get_latest_entries diff --git a/V2/time_tracker/time_entries/_application/_time_entries/_get_latest_entries.py b/V2/time_tracker/time_entries/_application/_time_entries/_get_latest_entries.py new file mode 100644 index 00000000..b813fb4f --- /dev/null +++ b/V2/time_tracker/time_entries/_application/_time_entries/_get_latest_entries.py @@ -0,0 +1,49 @@ +import json +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB +from time_tracker.utils.enums import ResponseEnums + + +def get_latest_entries(req: func.HttpRequest) -> func.HttpResponse: + database = DB() + time_entry_dao = _infrastructure.TimeEntriesSQLDao(database) + time_entry_service = _domain.TimeEntryService(time_entry_dao) + use_case = _domain._use_cases.GetLastestTimeEntryUseCase(time_entry_service) + + try: + owner_id = req.params.get("owner_id") + limit = req.params.get("limit") + + if not owner_id: + return func.HttpResponse( + body=ResponseEnums.NOT_FOUND.value, + status_code=HTTPStatus.NOT_FOUND, + mimetype=ResponseEnums.MIME_TYPE.value, + ) + + time_entries = use_case.get_latest_entries(int(owner_id), int(limit) if limit and int(limit) > 0 else None) + + if not time_entries or len(time_entries) == 0: + return func.HttpResponse( + body=ResponseEnums.NOT_FOUND.value, + status_code=HTTPStatus.NOT_FOUND, + mimetype=ResponseEnums.MIME_TYPE.value, + ) + + return func.HttpResponse( + body=json.dumps(time_entries, default=str), + status_code=HTTPStatus.OK, + mimetype=ResponseEnums.MIME_TYPE.value, + ) + + except ValueError: + return func.HttpResponse( + body=ResponseEnums.INVALID_ID.value, + status_code=HTTPStatus.BAD_REQUEST, + mimetype=ResponseEnums.MIME_TYPE.value, + ) diff --git a/V2/time_tracker/time_entries/_domain/__init__.py b/V2/time_tracker/time_entries/_domain/__init__.py index 2034f8d3..513877bf 100644 --- a/V2/time_tracker/time_entries/_domain/__init__.py +++ b/V2/time_tracker/time_entries/_domain/__init__.py @@ -7,5 +7,6 @@ DeleteTimeEntryUseCase, UpdateTimeEntryUseCase, GetTimeEntriesUseCase, - GetTimeEntryUseCase + GetTimeEntryUseCase, + GetLastestTimeEntryUseCase, ) diff --git a/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py b/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py index ca4ceb98..59b9d975 100644 --- a/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py +++ b/V2/time_tracker/time_entries/_domain/_persistence_contracts/_time_entries_dao.py @@ -17,9 +17,14 @@ def delete(self, id: int) -> TimeEntry: def update(self, id: int, new_time_entry: dict) -> TimeEntry: pass + @abc.abstractmethod def get_by_id(self, id: int) -> TimeEntry: pass @abc.abstractmethod def get_all(self) -> typing.List[TimeEntry]: pass + + @abc.abstractmethod + def get_latest_entries(self, owner_id: int, limit: int) -> typing.List[TimeEntry]: + pass diff --git a/V2/time_tracker/time_entries/_domain/_services/_time_entry.py b/V2/time_tracker/time_entries/_domain/_services/_time_entry.py index 5b3f4115..0c2b8b9b 100644 --- a/V2/time_tracker/time_entries/_domain/_services/_time_entry.py +++ b/V2/time_tracker/time_entries/_domain/_services/_time_entry.py @@ -21,3 +21,6 @@ def get_by_id(self, id: int) -> TimeEntry: def get_all(self) -> typing.List[TimeEntry]: return self.time_entry_dao.get_all() + + def get_latest_entries(self, owner_id: int, limit: int) -> typing.List[TimeEntry]: + return self.time_entry_dao.get_latest_entries(owner_id, limit) diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py index fdd1258d..055cd850 100644 --- a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py +++ b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py @@ -4,3 +4,4 @@ from ._update_time_entry_use_case import UpdateTimeEntryUseCase from ._get_time_entry_use_case import GetTimeEntriesUseCase from ._get_time_entry_by_id_use_case import GetTimeEntryUseCase +from ._get_latest_entries_use_case import GetLastestTimeEntryUseCase diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/_get_latest_entries_use_case.py b/V2/time_tracker/time_entries/_domain/_use_cases/_get_latest_entries_use_case.py new file mode 100644 index 00000000..c070f8ac --- /dev/null +++ b/V2/time_tracker/time_entries/_domain/_use_cases/_get_latest_entries_use_case.py @@ -0,0 +1,11 @@ +from time_tracker.time_entries._domain import TimeEntry, TimeEntryService +import typing + + +class GetLastestTimeEntryUseCase: + + def __init__(self, time_entry_service: TimeEntryService): + self.time_entry_service = time_entry_service + + def get_latest_entries(self, owner_id: int, limit: int) -> typing.List[TimeEntry]: + return self.time_entry_service.get_latest_entries(owner_id, limit) diff --git a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py index 9e7016d4..5d368e26 100644 --- a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py +++ b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py @@ -90,3 +90,17 @@ def __create_time_entry_dto(self, time_entry: dict) -> domain.TimeEntry: "end_date": str(time_entry.get("end_date"))}) time_entry = {key: time_entry.get(key) for key in self.time_entry_key} return domain.TimeEntry(**time_entry) + + def get_latest_entries(self, owner_id: int, limit: int = 20) -> typing.List[domain.TimeEntry]: + query = ( + self.time_entry.select() + .where(sqlalchemy.and_( + self.time_entry.c.owner_id == owner_id, + self.time_entry.c.deleted.is_(False) + )) + .order_by(self.time_entry.c.start_date.desc()) + .limit(limit) + ) + time_entries_data = self.db.get_session().execute(query) + list_time_entries = [dict(entry) for entry in time_entries_data] + return list_time_entries if len(list_time_entries) > 0 else None diff --git a/V2/time_tracker/time_entries/interface.py b/V2/time_tracker/time_entries/interface.py index 8873b93d..87876204 100644 --- a/V2/time_tracker/time_entries/interface.py +++ b/V2/time_tracker/time_entries/interface.py @@ -3,3 +3,4 @@ from ._application import delete_time_entry from ._application import update_time_entry from ._application import get_time_entries +from ._application import get_latest_entries From 1db51d68824a429730fedd1cbf58ee334ee00fa0 Mon Sep 17 00:00:00 2001 From: mandres2015 <32377408+mandres2015@users.noreply.github.com> Date: Mon, 29 Nov 2021 10:32:24 -0500 Subject: [PATCH 63/74] feat: TT-418 crud customer v2 (#361) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: TT-418 create added * feat: TT-418 create tests added * feat: TT-418 create get_all, get_by_id and delete methods * feat: TT-418 solve conflicts after add tests * feat: TT-418 crud and tests added * fix: TT-418 renamed tests and problems solved * fix: TT-418 code smell resolved * fix: TT-418 serverless corrected and github files retored Co-authored-by: Gabriel Cobeña --- ...me-tracker-v1-on-pull-request-workflow.yml | 4 +- .../time-tracker-v1-on-push-workflow.yml | 4 +- V2/serverless.yml | 51 ++++++ .../azure/customer_azure_endpoints_test.py | 173 +++++++++++++++++- V2/tests/fixtures.py | 2 +- .../integration/daos/customers_dao_test.py | 106 ++++++++++- .../unit/services/customer_service_test.py | 56 ++++++ .../unit/use_cases/customers_use_case_test.py | 59 ++++++ .../customers/_application/__init__.py | 5 +- .../_application/_customers/__init__.py | 5 +- .../_customers/_create_customer.py | 19 +- .../_customers/_delete_customer.py | 39 ++++ .../_application/_customers/_get_customers.py | 55 ++++++ .../_customers/_update_customer.py | 53 ++++++ V2/time_tracker/customers/_domain/__init__.py | 4 + .../_persistence_contracts/_customers_dao.py | 17 ++ .../customers/_domain/_services/_customer.py | 14 ++ .../customers/_domain/_use_cases/__init__.py | 6 +- .../_use_cases/_delete_customer_use_case.py | 10 + .../_use_cases/_get_all_customer_use_case.py | 12 ++ .../_get_by_id_customer_use_case.py | 10 + .../_use_cases/_update_customer_use_case.py | 10 + .../_data_persistence/_customer_dao.py | 39 ++++ V2/time_tracker/customers/interface.py | 3 + 24 files changed, 731 insertions(+), 25 deletions(-) create mode 100644 V2/time_tracker/customers/_application/_customers/_delete_customer.py create mode 100644 V2/time_tracker/customers/_application/_customers/_get_customers.py create mode 100644 V2/time_tracker/customers/_application/_customers/_update_customer.py create mode 100644 V2/time_tracker/customers/_domain/_use_cases/_delete_customer_use_case.py create mode 100644 V2/time_tracker/customers/_domain/_use_cases/_get_all_customer_use_case.py create mode 100644 V2/time_tracker/customers/_domain/_use_cases/_get_by_id_customer_use_case.py create mode 100644 V2/time_tracker/customers/_domain/_use_cases/_update_customer_use_case.py diff --git a/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml b/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml index c35be604..766f09bf 100644 --- a/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml +++ b/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml @@ -26,7 +26,6 @@ jobs: python -m pip install --upgrade pip pip install -r requirements/time_tracker_api/dev.txt pip install -r requirements/time_tracker_events/dev.txt - - name: Login to azure uses: Azure/login@v1 with: @@ -54,7 +53,6 @@ jobs: AZURE_STORAGE_ACCOUNT_KEY: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-ACCOUNT-KEY }} run: | pytest tests - - name: Test the build of the app run: | - docker build . + docker build . \ No newline at end of file diff --git a/.github/workflows/time-tracker-v1-on-push-workflow.yml b/.github/workflows/time-tracker-v1-on-push-workflow.yml index 152998b4..095712b9 100644 --- a/.github/workflows/time-tracker-v1-on-push-workflow.yml +++ b/.github/workflows/time-tracker-v1-on-push-workflow.yml @@ -26,7 +26,6 @@ jobs: python -m pip install --upgrade pip pip install -r requirements/time_tracker_api/dev.txt pip install -r requirements/time_tracker_events/dev.txt - - name: Login to azure uses: Azure/login@v1 with: @@ -54,7 +53,6 @@ jobs: AZURE_STORAGE_ACCOUNT_KEY: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-ACCOUNT-KEY }} run: | pytest tests - - name: Login to docker registry uses: azure/docker-login@v1 with: @@ -64,4 +62,4 @@ jobs: - name: Build and push image run: | docker build . -t ${{ secrets.REGISTRY_LOGIN_SERVER }}/timetrackerapi:${{ github.sha }} - docker push ${{ secrets.REGISTRY_LOGIN_SERVER }}/timetrackerapi:${{ github.sha }} + docker push ${{ secrets.REGISTRY_LOGIN_SERVER }}/timetrackerapi:${{ github.sha }} \ No newline at end of file diff --git a/V2/serverless.yml b/V2/serverless.yml index 9b31ee0b..f7ae5fd7 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -36,7 +36,12 @@ package: - '!.gitignore' - '!.git/**' +#region start Functions + functions: + +#region Start Functions Activities + get_activities: handler: time_tracker/activities/interface.get_activities events: @@ -77,6 +82,10 @@ functions: route: activities/ authLevel: anonymous +#endregion End Functions Activities + +#region Start Functions Time-Entries + create_time_entry: handler: time_tracker/time_entries/interface.create_time_entry events: @@ -127,6 +136,10 @@ functions: route: time-entries/latest/ authLevel: anonymous +#endregion End Functions Time-Entries + +#region Start Functions Customers + create_customer: handler: time_tracker/customers/interface.create_customer events: @@ -137,6 +150,40 @@ functions: route: customers/ authLevel: anonymous + get_customers: + handler: time_tracker/customers/interface.get_customers + events: + - http: true + x-azure-settings: + methods: + - GET + route: customers/{id:?} + authLevel: anonymous + + update_customer: + handler: time_tracker/customers/interface.update_customer + events: + - http: true + x-azure-settings: + methods: + - PUT + route: customers/{id} + authLevel: anonymous + + delete_customer: + handler: time_tracker/customers/interface.delete_customer + events: + - http: true + x-azure-settings: + methods: + - DELETE + route: customers/{id} + authLevel: anonymous + +#endregion End Functions Customers + +#region Start Functions Projects + get_projects: handler: time_tracker/projects/interface.get_projects events: @@ -177,3 +224,7 @@ functions: route: projects/ authLevel: anonymous + +#endregion End Functions Projects + +#endregion End Functions \ No newline at end of file diff --git a/V2/tests/api/azure/customer_azure_endpoints_test.py b/V2/tests/api/azure/customer_azure_endpoints_test.py index 47a619d5..f1f35d4f 100644 --- a/V2/tests/api/azure/customer_azure_endpoints_test.py +++ b/V2/tests/api/azure/customer_azure_endpoints_test.py @@ -1,3 +1,4 @@ +from http import HTTPStatus import json from faker import Faker @@ -8,7 +9,7 @@ CUSTOMER_URL = "/api/customers/" -def test__customer_azure_endpoint__creates_a_customer__when_customer_has_all_necesary_attributes( +def test__create_customer_azure_endpoint__creates_a_customer__when_customer_has_all_necesary_attributes( customer_factory ): customer_body = customer_factory().__dict__ @@ -24,11 +25,11 @@ def test__customer_azure_endpoint__creates_a_customer__when_customer_has_all_nec customer_json_data = json.loads(response.get_body()) customer_body['id'] = customer_json_data['id'] - assert response.status_code == 201 + assert response.status_code == HTTPStatus.CREATED assert customer_json_data == customer_body -def test__customer_azure_endpoint__returns_a_status_400__when_dont_recieve_all_necessary_attributes(): +def test__create_customer_azure_endpoint__returns_a_status_400__when_dont_recieve_all_necessary_attributes(): customer_to_insert = { "id": None, "name": Faker().user_name(), @@ -45,5 +46,169 @@ def test__customer_azure_endpoint__returns_a_status_400__when_dont_recieve_all_n response = azure_customers._create_customer.create_customer(req) - assert response.status_code == 400 + assert response.status_code == HTTPStatus.BAD_REQUEST assert response.get_body() == b'Invalid format or structure of the attributes of the customer' + + +def test__delete_customer_azure_endpoint__returns_a_customer_with_true_deleted__when_its_id_is_found( + test_db, customer_factory, insert_customer +): + customer_preinsert = customer_factory() + inserted_customer = insert_customer(customer_preinsert, test_db).__dict__ + + req = func.HttpRequest( + method='DELETE', + body=None, + url=CUSTOMER_URL, + route_params={"id": inserted_customer["id"]}, + ) + + response = azure_customers._delete_customer.delete_customer(req) + customer_json_data = json.loads(response.get_body().decode("utf-8")) + + assert response.status_code == HTTPStatus.OK + assert customer_json_data['deleted'] is True + + +def test__delete_customer_azure_endpoint__returns_not_found__when_its_id_is_not_found(): + req = func.HttpRequest( + method='DELETE', + body=None, + url=CUSTOMER_URL, + route_params={"id": Faker().pyint()}, + ) + + response = azure_customers._delete_customer.delete_customer(req) + + assert response.status_code == HTTPStatus.NOT_FOUND + assert response.get_body() == b'Not found' + + +def test__update_customer_azure_endpoint__returns_an_updated_customer__when_customer_has_all_necesary_attributes( + test_db, customer_factory, insert_customer +): + existent_customer = customer_factory() + inserted_customer = insert_customer(existent_customer, test_db).__dict__ + + inserted_customer["description"] = Faker().sentence() + + body = json.dumps(inserted_customer).encode("utf-8") + req = func.HttpRequest( + method='PUT', + body=body, + url=CUSTOMER_URL, + route_params={"id": inserted_customer["id"]}, + ) + + response = azure_customers._update_customer.update_customer(req) + customer_json_data = json.loads(response.get_body()) + + assert response.status_code == HTTPStatus.OK + assert customer_json_data == inserted_customer + + +def test__update_customer_azure_endpoint__returns_update_a_customer__when_customer_has_all_necesary_attributes( + customer_factory +): + existent_customer = customer_factory().__dict__ + + body = json.dumps(existent_customer).encode("utf-8") + req = func.HttpRequest( + method='PUT', + body=body, + url=CUSTOMER_URL, + route_params={"id": Faker().pyint()}, + ) + + response = azure_customers._update_customer.update_customer(req) + + assert response.status_code == HTTPStatus.CONFLICT + assert response.get_body() == b'This customer does not exist or is duplicated' + + +def test__update_customer_azure_endpoint__returns_invalid_format__when_customer_doesnt_have_all_necesary_attributes( + customer_factory, insert_customer, test_db +): + existent_customer = customer_factory() + inserted_customer = insert_customer(existent_customer, test_db).__dict__ + + inserted_customer.pop("name") + + body = json.dumps(inserted_customer).encode("utf-8") + req = func.HttpRequest( + method='PUT', + body=body, + url=CUSTOMER_URL, + route_params={"id": inserted_customer["id"]}, + ) + + response = azure_customers._update_customer.update_customer(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b'Invalid format or structure of the attributes of the customer' + + +def test__delete_customers_azure_endpoint__returns_a_status_code_400__when_customer_recive_invalid_id( +): + req = func.HttpRequest( + method="DELETE", + body=None, + url=CUSTOMER_URL, + route_params={"id": "invalid id"}, + ) + + response = azure_customers._delete_customer.delete_customer(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b'Invalid Format ID' + + +def test__customers_azure_endpoint__returns_all_customers( + test_db, customer_factory, insert_customer +): + customer_to_insert = customer_factory() + + inserted_customer = insert_customer(customer_to_insert, test_db).__dict__ + + req = func.HttpRequest(method='GET', body=None, url=CUSTOMER_URL) + response = azure_customers._get_customers.get_customers(req) + customers_json_data = response.get_body().decode("utf-8") + customer_list = json.loads(customers_json_data) + + assert response.status_code == HTTPStatus.OK + assert customers_json_data <= json.dumps(inserted_customer) + assert customer_list.pop() == inserted_customer + + +def test__customer_azure_endpoint__returns_a_customer__when_customer_matches_its_id( + test_db, customer_factory, insert_customer +): + existent_customer = customer_factory() + inserted_customer = insert_customer(existent_customer, test_db).__dict__ + + req = func.HttpRequest( + method='GET', + body=None, + url=CUSTOMER_URL, + route_params={"id": inserted_customer["id"]}, + ) + + response = azure_customers._get_customers.get_customers(req) + customer_json_data = response.get_body().decode("utf-8") + + assert response.status_code == HTTPStatus.OK + assert customer_json_data == json.dumps(inserted_customer) + + +def test__customer_azure_endpoint__returns_invalid_id__when_customer_not_matches_its_id(): + req = func.HttpRequest( + method='GET', + body=None, + url=CUSTOMER_URL, + route_params={"id": "Invalid ID"}, + ) + + response = azure_customers._get_customers.get_customers(req) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.get_body() == b'The id has an invalid format' diff --git a/V2/tests/fixtures.py b/V2/tests/fixtures.py index 2eae7b16..e4e52fb3 100644 --- a/V2/tests/fixtures.py +++ b/V2/tests/fixtures.py @@ -2,9 +2,9 @@ from faker import Faker import time_tracker.activities._domain as activities_domain -import time_tracker.activities._infrastructure as activities_infrastructure import time_tracker.time_entries._domain as time_entries_domain import time_tracker.customers._domain as customers_domain +import time_tracker.activities._infrastructure as activities_infrastructure import time_tracker.customers._infrastructure as customers_infrastructure import time_tracker.projects._domain as projects_domain from time_tracker._infrastructure import DB diff --git a/V2/tests/integration/daos/customers_dao_test.py b/V2/tests/integration/daos/customers_dao_test.py index b85cd3e3..496aaf47 100644 --- a/V2/tests/integration/daos/customers_dao_test.py +++ b/V2/tests/integration/daos/customers_dao_test.py @@ -1,4 +1,7 @@ +import typing + import pytest +from faker import Faker import time_tracker.customers._domain as domain import time_tracker.customers._infrastructure as infrastructure @@ -22,7 +25,7 @@ def _clean_database(): dao.db.get_session().execute(query) -def test__customer_dao__returns_a_customer_dto__when_saves_correctly_with_sql_database( +def test__create_customer_dao__returns_a_customer_dto__when_saves_correctly_with_sql_database( test_db, customer_factory, create_fake_dao ): dao = create_fake_dao(test_db) @@ -33,3 +36,104 @@ def test__customer_dao__returns_a_customer_dto__when_saves_correctly_with_sql_da assert isinstance(inserted_customer, domain.Customer) assert inserted_customer == customer_to_insert + + +def test__get_all__returns_a_list_of_customer_dto_objects__when_one_or_more_customers_are_found_with_sql_database( + test_db, create_fake_dao, customer_factory, insert_customer +): + dao = create_fake_dao(test_db) + customer_to_insert = customer_factory() + inserted_customer = [dao.create(customer_to_insert)] + + customers = dao.get_all() + + assert isinstance(customers, typing.List) + assert customers == inserted_customer + + +def test_get_by_id__returns_a_customer_dto__when_found_one_customer_that_matches_its_id_with_sql_database( + test_db, create_fake_dao, customer_factory, insert_customer +): + dao = create_fake_dao(test_db) + existent_customer = customer_factory() + inserted_customer = insert_customer(existent_customer, dao.db) + + customer = dao.get_by_id(inserted_customer.id) + + assert isinstance(customer, domain.Customer) + assert customer.id == inserted_customer.id + assert customer == inserted_customer + + +def test__get_by_id__returns_none__when_no_customer_matches_its_id_with_sql_database( + test_db, create_fake_dao, customer_factory +): + dao = create_fake_dao(test_db) + existent_customer = customer_factory() + + customer = dao.get_by_id(existent_customer.id) + + assert customer is None + + +def test_get_all__returns_an_empty_list__when_doesnt_found_any_customers_with_sql_database( + test_db, create_fake_dao +): + customers = create_fake_dao(test_db).get_all() + + assert isinstance(customers, typing.List) + assert customers == [] + + +def test_delete__returns_a_customer_with_inactive_status__when_a_customer_matching_its_id_is_found_with_sql_database( + test_db, create_fake_dao, customer_factory, insert_customer +): + dao = create_fake_dao(test_db) + existent_customer = customer_factory() + inserted_customer = insert_customer(existent_customer, dao.db) + + customer = dao.delete(inserted_customer.id) + + assert isinstance(customer, domain.Customer) + assert customer.id == inserted_customer.id + assert customer.status == 1 + assert customer.deleted is True + + +def test_delete__returns_none__when_no_customer_matching_its_id_is_found_with_sql_database( + test_db, create_fake_dao, customer_factory +): + dao = create_fake_dao(test_db) + existent_customer = customer_factory() + + results = dao.delete(existent_customer.id) + + assert results is None + + +def test__update_customer_dao__returns_an_updated_customer_dto__when_updates_correctly_with_sql_database( + test_db, customer_factory, create_fake_dao, insert_customer +): + dao = create_fake_dao(test_db) + + existent_customer = customer_factory() + inserted_customer = insert_customer(existent_customer, dao.db).__dict__ + + inserted_customer["description"] = Faker().sentence() + + updated_customer = dao.update(inserted_customer["id"], domain.Customer(**inserted_customer)) + + assert isinstance(updated_customer, domain.Customer) + assert updated_customer.description == inserted_customer["description"] + assert updated_customer.__dict__ == inserted_customer + + +def test__update_customer_dao__returns_none__when_an_incorrect_id_is_passed( + test_db, customer_factory, create_fake_dao, insert_customer +): + dao = create_fake_dao(test_db) + existent_customer = customer_factory() + + updated_customer = dao.update(Faker().pyint(), existent_customer) + + assert updated_customer is None diff --git a/V2/tests/unit/services/customer_service_test.py b/V2/tests/unit/services/customer_service_test.py index bb25070f..776d18ee 100644 --- a/V2/tests/unit/services/customer_service_test.py +++ b/V2/tests/unit/services/customer_service_test.py @@ -1,3 +1,5 @@ +from faker import Faker + from time_tracker.customers._domain import CustomerService @@ -12,3 +14,57 @@ def test__create_customer__uses_the_customer_dao__to_create_a_customer(mocker, c assert customer_dao.create.called assert expected_customer == new_customer + + +def test__delete_customer__uses_the_customer_dao__to_delete_customer_selected( + mocker, +): + expected_customer = mocker.Mock() + customer_dao = mocker.Mock( + delete=mocker.Mock(return_value=expected_customer) + ) + + customer_service = CustomerService(customer_dao) + deleted_customer = customer_service.delete(Faker().pyint()) + + assert customer_dao.delete.called + assert expected_customer == deleted_customer + + +def test__get_all__uses_the_customer_dao__to_retrieve_customers(mocker): + expected_customers = mocker.Mock() + customer_dao = mocker.Mock( + get_all=mocker.Mock(return_value=expected_customers) + ) + customer_service = CustomerService(customer_dao) + + actual_customers = customer_service.get_all() + + assert customer_dao.get_all.called + assert expected_customers == actual_customers + + +def test__get_by_id__uses_the_customer_dao__to_retrieve_one_customer(mocker): + expected_customer = mocker.Mock() + customer_dao = mocker.Mock( + get_by_id=mocker.Mock(return_value=expected_customer) + ) + customer_service = CustomerService(customer_dao) + + actual_customer = customer_service.get_by_id(Faker().pyint()) + + assert customer_dao.get_by_id.called + assert expected_customer == actual_customer + + +def test__update_customer__uses_the_customer_dao__to_update_a_customer(mocker, customer_factory): + expected_customer = mocker.Mock() + customer_dao = mocker.Mock( + update=mocker.Mock(return_value=expected_customer) + ) + customer_service = CustomerService(customer_dao) + + updated_customer = customer_service.update(Faker().pyint(), customer_factory()) + + assert customer_dao.update.called + assert expected_customer == updated_customer diff --git a/V2/tests/unit/use_cases/customers_use_case_test.py b/V2/tests/unit/use_cases/customers_use_case_test.py index 3b8566a9..63e03081 100644 --- a/V2/tests/unit/use_cases/customers_use_case_test.py +++ b/V2/tests/unit/use_cases/customers_use_case_test.py @@ -1,4 +1,5 @@ from pytest_mock import MockFixture +from faker import Faker from time_tracker.customers._domain import _use_cases @@ -16,3 +17,61 @@ def test__create_customer_function__uses_the_customer_service__to_create_a_custo assert customer_service.create.called assert expected_customer == new_customer + + +def test__delete_customer_function__uses_the_customer_service__to_delete_customer_selected( + mocker: MockFixture, +): + expected_customer = mocker.Mock() + customer_service = mocker.Mock(delete=mocker.Mock(return_value=expected_customer)) + + customer_use_case = _use_cases.DeleteCustomerUseCase(customer_service) + deleted_customer = customer_use_case.delete_customer(Faker().pyint()) + + assert customer_service.delete.called + assert expected_customer == deleted_customer + + +def test__get_list_customers_function__uses_the_customer_service__to_retrieve_customers( + mocker: MockFixture, +): + expected_customers = mocker.Mock() + customer_service = mocker.Mock( + get_all=mocker.Mock(return_value=expected_customers) + ) + + customers_use_case = _use_cases.GetAllCustomerUseCase(customer_service) + actual_customers = customers_use_case.get_all_customer() + + assert customer_service.get_all.called + assert expected_customers == actual_customers + + +def test__get_customer_by_id_function__uses_the_customer_service__to_retrieve_customer( + mocker: MockFixture, +): + expected_customer = mocker.Mock() + customer_service = mocker.Mock( + get_by_id=mocker.Mock(return_value=expected_customer) + ) + + customer_use_case = _use_cases.GetByIdCustomerUseCase(customer_service) + actual_customer = customer_use_case.get_customer_by_id(Faker().pyint()) + + assert customer_service.get_by_id.called + assert expected_customer == actual_customer + + +def test__update_customer_function__uses_the_customer_service__to_update_a_customer( + mocker: MockFixture, customer_factory +): + expected_customer = mocker.Mock() + customer_service = mocker.Mock( + update=mocker.Mock(return_value=expected_customer) + ) + + customer_use_case = _use_cases.UpdateCustomerUseCase(customer_service) + updated_customer = customer_use_case.update_customer(Faker().pyint(), customer_factory()) + + assert customer_service.update.called + assert expected_customer == updated_customer diff --git a/V2/time_tracker/customers/_application/__init__.py b/V2/time_tracker/customers/_application/__init__.py index db2c2c15..d9ba1676 100644 --- a/V2/time_tracker/customers/_application/__init__.py +++ b/V2/time_tracker/customers/_application/__init__.py @@ -1,2 +1,5 @@ # flake8: noqa -from ._customers import create_customer \ No newline at end of file +from ._customers import create_customer +from ._customers import get_customers +from ._customers import delete_customer +from ._customers import update_customer diff --git a/V2/time_tracker/customers/_application/_customers/__init__.py b/V2/time_tracker/customers/_application/_customers/__init__.py index bf1f8460..b07840ce 100644 --- a/V2/time_tracker/customers/_application/_customers/__init__.py +++ b/V2/time_tracker/customers/_application/_customers/__init__.py @@ -1,2 +1,5 @@ # flake8: noqa -from ._create_customer import create_customer \ No newline at end of file +from ._create_customer import create_customer +from ._get_customers import get_customers +from ._delete_customer import delete_customer +from ._update_customer import update_customer diff --git a/V2/time_tracker/customers/_application/_customers/_create_customer.py b/V2/time_tracker/customers/_application/_customers/_create_customer.py index 919c34cb..48e39dc3 100644 --- a/V2/time_tracker/customers/_application/_customers/_create_customer.py +++ b/V2/time_tracker/customers/_application/_customers/_create_customer.py @@ -1,6 +1,7 @@ import dataclasses import json import typing +from http import HTTPStatus import azure.functions as func @@ -17,8 +18,8 @@ def create_customer(req: func.HttpRequest) -> func.HttpResponse: use_case = _domain._use_cases.CreateCustomerUseCase(customer_service) customer_data = req.get_json() - customer_is_valid = _validate_customer(customer_data) - if not customer_is_valid: + customer_is_invalid = _validate_customer(customer_data) + if customer_is_invalid: raise ValueError customer_to_create = _domain.Customer( @@ -32,10 +33,10 @@ def create_customer(req: func.HttpRequest) -> func.HttpResponse: if created_customer: body = json.dumps(created_customer.__dict__) - status_code = 201 + status_code = HTTPStatus.CREATED else: body = b'This customer already exists' - status_code = 409 + status_code = HTTPStatus.CONFLICT return func.HttpResponse( body=body, @@ -45,13 +46,11 @@ def create_customer(req: func.HttpRequest) -> func.HttpResponse: except ValueError: return func.HttpResponse( body=b'Invalid format or structure of the attributes of the customer', - status_code=400, + status_code=HTTPStatus.BAD_REQUEST, mimetype="application/json" ) -def _validate_customer(customer_data: dict) -> bool: - if [field.name for field in dataclasses.fields(_domain.Customer) - if (field.name not in customer_data) and (field.type != typing.Optional[field.type])]: - return False - return True +def _validate_customer(customer_data: dict) -> typing.List[str]: + return [field.name for field in dataclasses.fields(_domain.Customer) + if (field.name not in customer_data) and (field.type != typing.Optional[field.type])] diff --git a/V2/time_tracker/customers/_application/_customers/_delete_customer.py b/V2/time_tracker/customers/_application/_customers/_delete_customer.py new file mode 100644 index 00000000..41fc3464 --- /dev/null +++ b/V2/time_tracker/customers/_application/_customers/_delete_customer.py @@ -0,0 +1,39 @@ +import json +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + +DATATYPE = "application/json" + + +def delete_customer(req: func.HttpRequest) -> func.HttpResponse: + customer_dao = _infrastructure.CustomersSQLDao(DB()) + customer_service = _domain.CustomerService(customer_dao) + use_case = _domain._use_cases.DeleteCustomerUseCase(customer_service) + + try: + customer_id = int(req.route_params.get("id")) + deleted_customer = use_case.delete_customer(customer_id) + if not deleted_customer: + return func.HttpResponse( + body="Not found", + status_code=HTTPStatus.NOT_FOUND, + mimetype=DATATYPE + ) + + return func.HttpResponse( + body=json.dumps(deleted_customer.__dict__, default=str), + status_code=HTTPStatus.OK, + mimetype=DATATYPE, + ) + + except ValueError: + return func.HttpResponse( + body=b"Invalid Format ID", + status_code=HTTPStatus.BAD_REQUEST, + mimetype=DATATYPE + ) diff --git a/V2/time_tracker/customers/_application/_customers/_get_customers.py b/V2/time_tracker/customers/_application/_customers/_get_customers.py new file mode 100644 index 00000000..8cb9635f --- /dev/null +++ b/V2/time_tracker/customers/_application/_customers/_get_customers.py @@ -0,0 +1,55 @@ +from http import HTTPStatus +import json + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + + +def get_customers(req: func.HttpRequest) -> func.HttpResponse: + customer_id = req.route_params.get('id') + status_code = HTTPStatus.OK + + try: + if customer_id: + response = _get_by_id(int(customer_id)) + if response == b'This customer does not exist': + status_code = HTTPStatus.NOT_FOUND + else: + response = _get_all() + + return func.HttpResponse( + body=response, status_code=status_code, mimetype="application/json" + ) + except ValueError: + return func.HttpResponse( + body=b"The id has an invalid format", status_code=HTTPStatus.BAD_REQUEST, mimetype="application/json" + ) + + +def _get_by_id(customer_id: int) -> str: + customer_use_case = _domain._use_cases.GetByIdCustomerUseCase( + _create_customer_service(DB()) + ) + customer = customer_use_case.get_customer_by_id(customer_id) + + return json.dumps(customer.__dict__) if customer else b'This customer does not exist' + + +def _get_all() -> str: + customer_sql = _domain._use_cases.GetAllCustomerUseCase( + _create_customer_service(DB()) + ) + return json.dumps( + [ + customer.__dict__ + for customer in customer_sql.get_all_customer() + ] + ) + + +def _create_customer_service(db: DB) -> _domain.CustomerService: + customer_sql = _infrastructure.CustomersSQLDao(db) + return _domain.CustomerService(customer_sql) diff --git a/V2/time_tracker/customers/_application/_customers/_update_customer.py b/V2/time_tracker/customers/_application/_customers/_update_customer.py new file mode 100644 index 00000000..93524c65 --- /dev/null +++ b/V2/time_tracker/customers/_application/_customers/_update_customer.py @@ -0,0 +1,53 @@ +import dataclasses +import json +import typing +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB + + +def update_customer(req: func.HttpRequest) -> func.HttpResponse: + try: + database = DB() + customer_id = int(req.route_params.get('id')) + customer_dao = _infrastructure.CustomersSQLDao(database) + customer_service = _domain.CustomerService(customer_dao) + use_case = _domain._use_cases.UpdateCustomerUseCase(customer_service) + + customer_data = req.get_json() + customer_is_invalid = _validate_customer(customer_data) + if customer_is_invalid: + raise ValueError + + customer_to_update = _domain.Customer( + **{field.name: customer_data.get(field.name) for field in dataclasses.fields(_domain.Customer)} + ) + updated_customer = use_case.update_customer(customer_id, customer_to_update) + + if updated_customer: + body = json.dumps(updated_customer.__dict__) + status_code = HTTPStatus.OK + else: + body = b'This customer does not exist or is duplicated' + status_code = HTTPStatus.CONFLICT + + return func.HttpResponse( + body=body, + status_code=status_code, + mimetype="application/json" + ) + except ValueError: + return func.HttpResponse( + body=b'Invalid format or structure of the attributes of the customer', + status_code=HTTPStatus.BAD_REQUEST, + mimetype="application/json" + ) + + +def _validate_customer(customer_data: dict) -> typing.List[str]: + return [field.name for field in dataclasses.fields(_domain.Customer) + if field.name not in customer_data] diff --git a/V2/time_tracker/customers/_domain/__init__.py b/V2/time_tracker/customers/_domain/__init__.py index 8392b8e9..a2e8014b 100644 --- a/V2/time_tracker/customers/_domain/__init__.py +++ b/V2/time_tracker/customers/_domain/__init__.py @@ -4,4 +4,8 @@ from ._services import CustomerService from ._use_cases import ( CreateCustomerUseCase, + UpdateCustomerUseCase, + GetAllCustomerUseCase, + GetByIdCustomerUseCase, + DeleteCustomerUseCase ) \ No newline at end of file diff --git a/V2/time_tracker/customers/_domain/_persistence_contracts/_customers_dao.py b/V2/time_tracker/customers/_domain/_persistence_contracts/_customers_dao.py index 35a7a7e9..186d5c86 100644 --- a/V2/time_tracker/customers/_domain/_persistence_contracts/_customers_dao.py +++ b/V2/time_tracker/customers/_domain/_persistence_contracts/_customers_dao.py @@ -1,4 +1,5 @@ import abc +import typing from time_tracker.customers._domain import Customer @@ -7,3 +8,19 @@ class CustomersDao(abc.ABC): @abc.abstractmethod def create(self, data: Customer) -> Customer: pass + + @abc.abstractmethod + def update(self, id: int, data: Customer) -> Customer: + pass + + @abc.abstractmethod + def get_by_id(self, id: int) -> Customer: + pass + + @abc.abstractmethod + def get_all(self) -> typing.List[Customer]: + pass + + @abc.abstractmethod + def delete(self, id: int) -> Customer: + pass diff --git a/V2/time_tracker/customers/_domain/_services/_customer.py b/V2/time_tracker/customers/_domain/_services/_customer.py index 88633a08..082a7b08 100644 --- a/V2/time_tracker/customers/_domain/_services/_customer.py +++ b/V2/time_tracker/customers/_domain/_services/_customer.py @@ -1,3 +1,5 @@ +import typing + from time_tracker.customers._domain import Customer, CustomersDao @@ -8,3 +10,15 @@ def __init__(self, customer_dao: CustomersDao): def create(self, data: Customer) -> Customer: return self.customer_dao.create(data) + + def update(self, id: int, data: Customer) -> Customer: + return self.customer_dao.update(id, data) + + def get_by_id(self, id: int) -> Customer: + return self.customer_dao.get_by_id(id) + + def get_all(self) -> typing.List[Customer]: + return self.customer_dao.get_all() + + def delete(self, id: int) -> Customer: + return self.customer_dao.delete(id) diff --git a/V2/time_tracker/customers/_domain/_use_cases/__init__.py b/V2/time_tracker/customers/_domain/_use_cases/__init__.py index accd4281..4dcb8239 100644 --- a/V2/time_tracker/customers/_domain/_use_cases/__init__.py +++ b/V2/time_tracker/customers/_domain/_use_cases/__init__.py @@ -1,2 +1,6 @@ # flake8: noqa -from ._create_customer_use_case import CreateCustomerUseCase \ No newline at end of file +from ._create_customer_use_case import CreateCustomerUseCase +from ._update_customer_use_case import UpdateCustomerUseCase +from ._get_by_id_customer_use_case import GetByIdCustomerUseCase +from ._get_all_customer_use_case import GetAllCustomerUseCase +from ._delete_customer_use_case import DeleteCustomerUseCase diff --git a/V2/time_tracker/customers/_domain/_use_cases/_delete_customer_use_case.py b/V2/time_tracker/customers/_domain/_use_cases/_delete_customer_use_case.py new file mode 100644 index 00000000..0477a1f2 --- /dev/null +++ b/V2/time_tracker/customers/_domain/_use_cases/_delete_customer_use_case.py @@ -0,0 +1,10 @@ +from time_tracker.customers._domain import Customer, CustomerService + + +class DeleteCustomerUseCase: + + def __init__(self, customer_service: CustomerService): + self.customer_service = customer_service + + def delete_customer(self, id: int) -> Customer: + return self.customer_service.delete(id) diff --git a/V2/time_tracker/customers/_domain/_use_cases/_get_all_customer_use_case.py b/V2/time_tracker/customers/_domain/_use_cases/_get_all_customer_use_case.py new file mode 100644 index 00000000..d3780449 --- /dev/null +++ b/V2/time_tracker/customers/_domain/_use_cases/_get_all_customer_use_case.py @@ -0,0 +1,12 @@ +import typing + +from time_tracker.customers._domain import Customer, CustomerService + + +class GetAllCustomerUseCase: + + def __init__(self, customer_service: CustomerService): + self.customer_service = customer_service + + def get_all_customer(self) -> typing.List[Customer]: + return self.customer_service.get_all() diff --git a/V2/time_tracker/customers/_domain/_use_cases/_get_by_id_customer_use_case.py b/V2/time_tracker/customers/_domain/_use_cases/_get_by_id_customer_use_case.py new file mode 100644 index 00000000..2372029a --- /dev/null +++ b/V2/time_tracker/customers/_domain/_use_cases/_get_by_id_customer_use_case.py @@ -0,0 +1,10 @@ +from time_tracker.customers._domain import Customer, CustomerService + + +class GetByIdCustomerUseCase: + + def __init__(self, customer_service: CustomerService): + self.customer_service = customer_service + + def get_customer_by_id(self, id: int) -> Customer: + return self.customer_service.get_by_id(id) diff --git a/V2/time_tracker/customers/_domain/_use_cases/_update_customer_use_case.py b/V2/time_tracker/customers/_domain/_use_cases/_update_customer_use_case.py new file mode 100644 index 00000000..318ced28 --- /dev/null +++ b/V2/time_tracker/customers/_domain/_use_cases/_update_customer_use_case.py @@ -0,0 +1,10 @@ +from time_tracker.customers._domain import Customer, CustomerService + + +class UpdateCustomerUseCase: + + def __init__(self, customer_service: CustomerService): + self.customer_service = customer_service + + def update_customer(self, id: int, data: Customer) -> Customer: + return self.customer_service.update(id, data) diff --git a/V2/time_tracker/customers/_infrastructure/_data_persistence/_customer_dao.py b/V2/time_tracker/customers/_infrastructure/_data_persistence/_customer_dao.py index 2b1f4c0d..f3b15122 100644 --- a/V2/time_tracker/customers/_infrastructure/_data_persistence/_customer_dao.py +++ b/V2/time_tracker/customers/_infrastructure/_data_persistence/_customer_dao.py @@ -1,4 +1,5 @@ import dataclasses +import typing import sqlalchemy as sq @@ -22,6 +23,21 @@ def __init__(self, database: _db.DB): extend_existing=True, ) + def get_by_id(self, id: int) -> domain.Customer: + query = sq.sql.select(self.customer).where( + sq.sql.and_(self.customer.c.id == id, self.customer.c.deleted.is_(False)) + ) + customer = self.db.get_session().execute(query).one_or_none() + return self.__create_customer_dto(dict(customer)) if customer else None + + def get_all(self) -> typing.List[domain.Customer]: + query = sq.sql.select(self.customer).where(self.customer.c.deleted.is_(False)) + result = self.db.get_session().execute(query) + return [ + self.__create_customer_dto(dict(customer)) + for customer in result + ] + def create(self, data: domain.Customer) -> domain.Customer: try: new_customer = data.__dict__ @@ -39,3 +55,26 @@ def create(self, data: domain.Customer) -> domain.Customer: def __create_customer_dto(self, customer: dict) -> domain.Customer: customer = {key: customer.get(key) for key in self.customer_key} return domain.Customer(**customer) + + def delete(self, customer_id: int) -> domain.Customer: + query = ( + self.customer.update() + .where(self.customer.c.id == customer_id) + .values({"deleted": True}) + ) + self.db.get_session().execute(query) + query_deleted_customer = sq.sql.select(self.customer).where(self.customer.c.id == customer_id) + customer = self.db.get_session().execute(query_deleted_customer).one_or_none() + return self.__create_customer_dto(dict(customer)) if customer else None + + def update(self, id: int, data: domain.Customer) -> domain.Customer: + try: + new_customer = data.__dict__ + new_customer.pop("id") + + customer_validated = {key: value for (key, value) in new_customer.items() if value is not None} + query = self.customer.update().where(self.customer.c.id == id).values(customer_validated) + self.db.get_session().execute(query) + return self.get_by_id(id) + except sq.exc.SQLAlchemyError: + return None diff --git a/V2/time_tracker/customers/interface.py b/V2/time_tracker/customers/interface.py index e36b8172..9aef2091 100644 --- a/V2/time_tracker/customers/interface.py +++ b/V2/time_tracker/customers/interface.py @@ -1,2 +1,5 @@ # flake8: noqa from ._application import create_customer +from ._application import get_customers +from ._application import delete_customer +from ._application import update_customer From aedf3d24d1cae9f40dcfb61196c619c15a1ac35c Mon Sep 17 00:00:00 2001 From: Jipson Murillo <38593785+Jobzi@users.noreply.github.com> Date: Tue, 30 Nov 2021 15:13:14 -0500 Subject: [PATCH 64/74] feat: TT-414 get latest projects (#363) feat: TT-414 created get latest project, use case, service, dao test: TT-414 add test to get latest projects Co-authored-by: Alexander --- V2/serverless.yml | 12 ++- .../api/azure/project_azure_endpoints_test.py | 58 +++++++---- .../azure/time_entry_azure_endpoints_test.py | 96 ++++++++++++------- V2/tests/conftest.py | 4 +- V2/tests/fixtures.py | 39 +++++++- .../integration/daos/projects_dao_test.py | 28 +++--- .../integration/daos/time_entries_dao_test.py | 77 ++++++++------- .../unit/services/project_service_test.py | 15 +++ .../unit/use_cases/projects_use_case_test.py | 13 +++ .../projects/_application/__init__.py | 3 +- .../_application/_projects/__init__.py | 3 +- .../_application/_projects/_create_project.py | 3 +- .../_projects/_get_latest_projects.py | 26 +++++ V2/time_tracker/projects/_domain/__init__.py | 3 +- .../projects/_domain/_entities/_project.py | 2 + .../_persistence_contracts/_projects_dao.py | 7 +- .../projects/_domain/_services/_project.py | 3 + .../projects/_domain/_use_cases/__init__.py | 1 + .../_get_latest_projects_use_case.py | 11 +++ .../_data_persistence/_projects_dao.py | 45 +++++++-- V2/time_tracker/projects/interface.py | 3 +- .../_application/_time_entries/__init__.py | 1 + .../time_entries/_domain/__init__.py | 1 + .../_domain/_use_cases/__init__.py | 1 + .../_time_entries_sql_dao.py | 2 +- V2/time_tracker/time_entries/interface.py | 1 + 26 files changed, 334 insertions(+), 124 deletions(-) create mode 100644 V2/time_tracker/projects/_application/_projects/_get_latest_projects.py create mode 100644 V2/time_tracker/projects/_domain/_use_cases/_get_latest_projects_use_case.py diff --git a/V2/serverless.yml b/V2/serverless.yml index f7ae5fd7..66fa83af 100644 --- a/V2/serverless.yml +++ b/V2/serverless.yml @@ -225,6 +225,16 @@ functions: authLevel: anonymous + get_latest_projects: + handler: time_tracker/projects/interface.get_latest_projects + events: + - http: true + x-azure-settings: + methods: + - GET + route: projects/latest + authLevel: anonymous + #endregion End Functions Projects -#endregion End Functions \ No newline at end of file +#endregion End Functions diff --git a/V2/tests/api/azure/project_azure_endpoints_test.py b/V2/tests/api/azure/project_azure_endpoints_test.py index 232462b7..b48a13dc 100644 --- a/V2/tests/api/azure/project_azure_endpoints_test.py +++ b/V2/tests/api/azure/project_azure_endpoints_test.py @@ -1,29 +1,14 @@ import json from http import HTTPStatus -import pytest from faker import Faker import azure.functions as func from time_tracker.projects._application import _projects as azure_projects -from time_tracker.projects import _domain as domain -from time_tracker.projects import _infrastructure as infrastructure PROJECT_URL = '/api/projects/' -@pytest.fixture(name='insert_project') -def _insert_project(test_db, insert_customer, project_factory, customer_factory) -> domain.Project: - inserted_customer = insert_customer(customer_factory(), test_db) - - def _new_project(): - project_to_insert = project_factory(customer_id=inserted_customer.id) - dao = infrastructure.ProjectsSQLDao(test_db) - inserted_project = dao.create(project_to_insert) - return inserted_project - return _new_project - - def test__project_azure_endpoint__returns_all_projects( insert_project ): @@ -146,19 +131,17 @@ def test__update_project_azure_endpoint__returns_a_project__when_found_a_project def test__update_projects_azure_endpoint__returns_a_status_code_404__when_no_found_a_project_to_update( - project_factory ): - project_body = project_factory().__dict__ + project_body = {"description": Faker().sentence()} req = func.HttpRequest( method="PUT", body=json.dumps(project_body).encode("utf-8"), url=PROJECT_URL, - route_params={"id": project_body["id"]}, + route_params={"id": Faker().pyint()}, ) response = azure_projects._update_project.update_project(req) - assert response.status_code == HTTPStatus.NOT_FOUND assert response.get_body() == b"Not found" @@ -249,3 +232,40 @@ def test__project_azure_endpoint__returns_a_status_code_500__when_project_receiv assert response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR assert response.get_body() == b"could not be created" + + +def test__get_latest_projects_azure_endpoint__returns_a_list_of_latest_projects__when_an_owner_id_match( + insert_time_entry +): + inserted_time_entry = insert_time_entry().__dict__ + + req = func.HttpRequest( + method='GET', + body=None, + url=PROJECT_URL+"latest/", + params={"owner_id": inserted_time_entry["owner_id"]}, + ) + + response = azure_projects._get_latest_projects.get_latest_projects(req) + projects_json_data = json.loads(response.get_body().decode("utf-8")) + + assert response.status_code == HTTPStatus.OK + assert inserted_time_entry["project_id"] == projects_json_data[0]["id"] + + +def test__get_latest_projects_azure_endpoint__returns_an_empty_list__when_an_owner_id_not_match( + insert_time_entry +): + insert_time_entry().__dict__ + + req = func.HttpRequest( + method='GET', + body=None, + url=PROJECT_URL+"latest/", + ) + + response = azure_projects._get_latest_projects.get_latest_projects(req) + projects_json_data = json.loads(response.get_body().decode("utf-8")) + + assert response.status_code == HTTPStatus.OK + assert projects_json_data == [] diff --git a/V2/tests/api/azure/time_entry_azure_endpoints_test.py b/V2/tests/api/azure/time_entry_azure_endpoints_test.py index 42e3d5ec..13e3e875 100644 --- a/V2/tests/api/azure/time_entry_azure_endpoints_test.py +++ b/V2/tests/api/azure/time_entry_azure_endpoints_test.py @@ -46,10 +46,11 @@ def test__time_entry_azure_endpoint__creates_an_time_entry__when_time_entry_has_ def test__delete_time_entries_azure_endpoint__returns_an_time_entry_with_true_deleted__when_its_id_is_found( - test_db, time_entry_factory, insert_time_entry, insert_activity, activity_factory, + test_db, time_entry_factory, insert_time_entry, insert_activity, activity_factory, insert_project ): + inserted_project = insert_project() inserted_activity = insert_activity(activity_factory(), test_db).__dict__ - time_entry_body = time_entry_factory(activity_id=inserted_activity["id"]) + time_entry_body = time_entry_factory(activity_id=inserted_activity["id"], project_id=inserted_project.id) inserted_time_entry = insert_time_entry(time_entry_body, test_db) req = func.HttpRequest( @@ -82,10 +83,11 @@ def test__delete_time_entries_azure_endpoint__returns_a_status_code_400__when_ti def test__time_entry_azure_endpoint__returns_all_time_entries( - test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity + test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity, insert_project ): + inserted_project = insert_project() inserted_activity = insert_activity(activity_factory(), test_db) - time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) inserted_time_entries = insert_time_entry(time_entries_to_insert, test_db).__dict__ req = func.HttpRequest(method="GET", body=None, url=TIME_ENTRY_URL) @@ -99,10 +101,11 @@ def test__time_entry_azure_endpoint__returns_all_time_entries( def test__time_entry_azure_endpoint__returns_an_time_entry__when_time_entry_matches_its_id( - test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity + test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity, insert_project ): + inserted_project = insert_project() inserted_activity = insert_activity(activity_factory(), test_db) - time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) inserted_time_entries = insert_time_entry(time_entries_to_insert, test_db).__dict__ req = func.HttpRequest( @@ -120,10 +123,11 @@ def test__time_entry_azure_endpoint__returns_an_time_entry__when_time_entry_matc def test__get_time_entries_azure_endpoint__returns_a_status_code_400__when_time_entry_recive_invalid_id( - test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity + test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity, insert_project ): + inserted_project = insert_project() inserted_activity = insert_activity(activity_factory(), test_db) - time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) insert_time_entry(time_entries_to_insert, test_db).__dict__ req = func.HttpRequest( @@ -139,12 +143,53 @@ def test__get_time_entries_azure_endpoint__returns_a_status_code_400__when_time_ assert response.get_body() == b'Invalid Format ID' +def test__get_latest_entries_azure_endpoint__returns_a_list_of_latest_time_entries__when_an_owner_id_match( + test_db, time_entry_factory, insert_time_entry, insert_activity, activity_factory, insert_project +): + inserted_project = insert_project() + inserted_activity = insert_activity(activity_factory(), test_db).__dict__ + time_entry_body = time_entry_factory(activity_id=inserted_activity["id"], project_id=inserted_project.id) + inserted_time_entry = insert_time_entry(time_entry_body, test_db).__dict__ + + req = func.HttpRequest( + method='GET', + body=None, + url=TIME_ENTRY_URL+"latest/", + params={"owner_id": inserted_time_entry["owner_id"]}, + ) + + response = azure_time_entries._get_latest_entries.get_latest_entries(req) + time_entry_json_data = json.loads(response.get_body().decode("utf-8")) + + assert response.status_code == 200 + assert time_entry_json_data == [inserted_time_entry] + + +def test__get_latest_entries_azure_endpoint__returns_no_time_entries_found__when_recieve_an_invalid_owner_id( + test_db, insert_activity, activity_factory, +): + insert_activity(activity_factory(), test_db) + + req = func.HttpRequest( + method='GET', + body=None, + url=TIME_ENTRY_URL+"latest/", + params={"owner_id": Faker().pyint()}, + ) + + response = azure_time_entries._get_latest_entries.get_latest_entries(req) + + assert response.status_code == 404 + assert response.get_body() == b'Not found' + + def test__update_time_entry_azure_endpoint__returns_an_time_entry__when_found_an_time_entry_to_update( - test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity + test_db, time_entry_factory, insert_time_entry, activity_factory, insert_activity, insert_project ): - inserted_activity = insert_activity(activity_factory(), test_db) - existent_time_entries = time_entry_factory(activity_id=inserted_activity.id) - inserted_time_entries = insert_time_entry(existent_time_entries, test_db).__dict__ + inserted_project = insert_project() + inserted_activity = insert_activity(activity_factory(), test_db).__dict__ + time_entry_body = time_entry_factory(activity_id=inserted_activity["id"], project_id=inserted_project.id) + inserted_time_entry = insert_time_entry(time_entry_body, test_db).__dict__ time_entry_body = {"description": Faker().sentence()} @@ -152,15 +197,15 @@ def test__update_time_entry_azure_endpoint__returns_an_time_entry__when_found_an method='PUT', body=json.dumps(time_entry_body).encode("utf-8"), url=TIME_ENTRY_URL, - route_params={"id": inserted_time_entries["id"]}, + route_params={"id": inserted_time_entry["id"]}, ) response = azure_time_entries._update_time_entry.update_time_entry(req) activitiy_json_data = response.get_body().decode("utf-8") - inserted_time_entries.update(time_entry_body) + inserted_time_entry.update(time_entry_body) assert response.status_code == 200 - assert activitiy_json_data == json.dumps(inserted_time_entries) + assert activitiy_json_data == json.dumps(inserted_time_entry) def test__update_time_entries_azure_endpoint__returns_a_status_code_400__when_time_entry_recive_invalid_format_id(): @@ -211,27 +256,6 @@ def test__update_time_entries_azure_endpoint__returns_a_status_code_400__when_ti assert response.get_body() == b'Incorrect time entry body' -def test__get_latest_entries_azure_endpoint__returns_a_list_of_latest_time_entries__when_an_owner_id_match( - test_db, time_entry_factory, insert_time_entry, insert_activity, activity_factory, -): - inserted_activity = insert_activity(activity_factory(), test_db).__dict__ - time_entry_body = time_entry_factory(activity_id=inserted_activity["id"], technologies="[jira,sql]") - inserted_time_entry = insert_time_entry(time_entry_body, test_db).__dict__ - - req = func.HttpRequest( - method='GET', - body=None, - url=TIME_ENTRY_URL+"latest/", - params={"owner_id": inserted_time_entry["owner_id"]}, - ) - - response = azure_time_entries._get_latest_entries.get_latest_entries(req) - time_entry_json_data = json.loads(response.get_body().decode("utf-8")) - - assert response.status_code == HTTPStatus.OK - assert time_entry_json_data == [inserted_time_entry] - - def test__get_latest_entries_azure_endpoint__returns_not_found__when_recieve_an_invalid_owner_id( test_db, insert_activity, activity_factory, ): diff --git a/V2/tests/conftest.py b/V2/tests/conftest.py index ff67203c..c11fc951 100644 --- a/V2/tests/conftest.py +++ b/V2/tests/conftest.py @@ -1,5 +1,5 @@ # flake8: noqa from fixtures import _activity_factory, _test_db, _insert_activity -from fixtures import _time_entry_factory +from fixtures import _time_entry_factory, _insert_time_entry from fixtures import _customer_factory, _insert_customer -from fixtures import _project_factory +from fixtures import _project_factory, _insert_project diff --git a/V2/tests/fixtures.py b/V2/tests/fixtures.py index e4e52fb3..82391ebf 100644 --- a/V2/tests/fixtures.py +++ b/V2/tests/fixtures.py @@ -3,10 +3,12 @@ import time_tracker.activities._domain as activities_domain import time_tracker.time_entries._domain as time_entries_domain +import time_tracker.time_entries._infrastructure as time_entries_infrastructure import time_tracker.customers._domain as customers_domain import time_tracker.activities._infrastructure as activities_infrastructure import time_tracker.customers._infrastructure as customers_infrastructure import time_tracker.projects._domain as projects_domain +import time_tracker.projects._infrastructure as projects_infrastructure from time_tracker._infrastructure import DB @@ -108,7 +110,8 @@ def _make_project( customer_id=Faker().pyint(), status=Faker().pyint(), deleted=False, - technologies=str(Faker().pylist()) + technologies=str(Faker().pylist()), + customer=None ): project = projects_domain.Project( id=id, @@ -118,7 +121,8 @@ def _make_project( customer_id=customer_id, status=status, deleted=deleted, - technologies=technologies + technologies=technologies, + customer=customer ) return project return _make_project @@ -131,3 +135,34 @@ def _new_customer(customer: customers_domain.Customer, database: DB): new_customer = dao.create(customer) return new_customer return _new_customer + + +@pytest.fixture(name='insert_project') +def _insert_project(test_db, insert_customer, project_factory, customer_factory) -> projects_domain.Project: + inserted_customer = insert_customer(customer_factory(), test_db) + + def _new_project(): + project_to_insert = project_factory(id=None, customer_id=inserted_customer.id, deleted=False) + dao = projects_infrastructure.ProjectsSQLDao(test_db) + inserted_project = dao.create(project_to_insert) + return inserted_project + return _new_project + + +@pytest.fixture(name='insert_time_entry') +def _insert_time_entry( + test_db, insert_project, activity_factory, insert_activity, time_entry_factory +) -> time_entries_domain.TimeEntry: + + inserted_project = insert_project() + inserted_activity = insert_activity(activity_factory(), test_db) + + def _new_time_entry(owner_id: int = Faker().pyint()): + dao = time_entries_infrastructure.TimeEntriesSQLDao(test_db) + time_entries_to_insert = time_entry_factory( + activity_id=inserted_activity.id, project_id=inserted_project.id, owner_id=owner_id + ) + + inserted_time_entries = dao.create(time_entries_to_insert) + return inserted_time_entries + return _new_time_entry diff --git a/V2/tests/integration/daos/projects_dao_test.py b/V2/tests/integration/daos/projects_dao_test.py index 64837e37..01f5a1a3 100644 --- a/V2/tests/integration/daos/projects_dao_test.py +++ b/V2/tests/integration/daos/projects_dao_test.py @@ -7,18 +7,6 @@ from time_tracker._infrastructure import DB -@pytest.fixture(name='insert_project') -def _insert_project(customer_factory, test_db, insert_customer, create_fake_dao, project_factory) -> domain.Project: - inserted_customer = insert_customer(customer_factory(), test_db) - - def _new_project(): - project_to_insert = project_factory(customer_id=inserted_customer.id) - inserted_project = create_fake_dao.create(project_to_insert) - return inserted_project - - return _new_project - - @pytest.fixture(name='create_fake_dao') def _create_fake_dao() -> domain.ProjectsDao: db_fake = DB() @@ -44,6 +32,9 @@ def test__create_project__returns_a_project_dto__when_saves_correctly_with_sql_d inserted_project = dao.create(project_to_insert) + expected_project = project_to_insert.__dict__ + expected_project.update({"customer": inserted_customer.__dict__}) + assert isinstance(inserted_project, domain.Project) assert inserted_project == project_to_insert @@ -85,6 +76,7 @@ def test__get_all__returns_a_list_of_project_dto_objects__when_one_or_more_proje ] projects = dao.get_all() + assert isinstance(projects, typing.List) assert projects == inserted_projects @@ -147,3 +139,15 @@ def test_delete__returns_none__when_no_project_matching_its_id_is_found_with_sql results = dao.delete(project_to_insert.id) assert results is None + + +def test_get_latest_projects__returns_a_list_of_project_dto_objects__when_find_projects_in_the_latest_time_entries( + create_fake_dao, insert_time_entry +): + dao = create_fake_dao + owner_id = Faker().pyint() + inserted_time_entries = insert_time_entry(owner_id) + latest_projects = dao.get_latest(owner_id) + + assert isinstance(latest_projects, typing.List) + assert latest_projects[0].id == inserted_time_entries.project_id diff --git a/V2/tests/integration/daos/time_entries_dao_test.py b/V2/tests/integration/daos/time_entries_dao_test.py index e48241cc..3c17f7e9 100644 --- a/V2/tests/integration/daos/time_entries_dao_test.py +++ b/V2/tests/integration/daos/time_entries_dao_test.py @@ -26,12 +26,12 @@ def _clean_database(): def test__time_entry__returns_a_time_entry_dto__when_saves_correctly_with_sql_database( - test_db, time_entry_factory, create_fake_dao, insert_activity, activity_factory + test_db, time_entry_factory, create_fake_dao, insert_activity, activity_factory, insert_project ): dao = create_fake_dao(test_db) inserted_activity = insert_activity(activity_factory(), dao.db) - - time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id) + inserted_project = insert_project() + time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) inserted_time_entry = dao.create(time_entry_to_insert) @@ -51,12 +51,13 @@ def test__time_entry__returns_None__when_not_saves_correctly( def test_delete__returns_an_time_entry_with_true_deleted__when_an_time_entry_matching_its_id_is_found( - create_fake_dao, test_db, time_entry_factory, insert_activity, activity_factory + create_fake_dao, test_db, time_entry_factory, insert_activity, activity_factory, insert_project ): dao = create_fake_dao(test_db) + inserted_project = insert_project() inserted_activity = insert_activity(activity_factory(), dao.db) - existent_time_entry = time_entry_factory(activity_id=inserted_activity.id) - inserted_time_entry = dao.create(existent_time_entry) + time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) + inserted_time_entry = dao.create(time_entry_to_insert) result = dao.delete(inserted_time_entry.id) @@ -73,42 +74,60 @@ def test_delete__returns_none__when_no_time_entry_matching_its_id_is_found( assert result is None +def test_get_latest_entries__returns_a_list_of_latest_time_entries__when_an_owner_id_match( + create_fake_dao, time_entry_factory, insert_activity, activity_factory, test_db, insert_project +): + dao = create_fake_dao(test_db) + inserted_project = insert_project() + inserted_activity = insert_activity(activity_factory(), dao.db) + time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) + inserted_time_entry = dao.create(time_entry_to_insert).__dict__ + + result = dao.get_latest_entries(int(inserted_time_entry["owner_id"])) + + assert result == [inserted_time_entry] + + def test_update__returns_an_time_entry_dto__when_found_one_time_entry_to_update( - test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory, insert_project ): dao = create_fake_dao(test_db) + inserted_project = insert_project() inserted_activity = insert_activity(activity_factory(), dao.db) - existent_time_entries = time_entry_factory(activity_id=inserted_activity.id) - inserted_time_entries = dao.create(existent_time_entries).__dict__ - time_entry_id = inserted_time_entries["id"] - inserted_time_entries.update({"description": "description updated"}) + time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) + inserted_time_entry = dao.create(time_entry_to_insert).__dict__ + + time_entry_id = inserted_time_entry["id"] + inserted_time_entry.update({"description": "description updated"}) - time_entry = dao.update(time_entry_id=time_entry_id, time_entry_data=inserted_time_entries) + time_entry = dao.update(time_entry_id=time_entry_id, time_entry_data=inserted_time_entry) assert time_entry.id == time_entry_id - assert time_entry.description == inserted_time_entries.get("description") + assert time_entry.description == inserted_time_entry.get("description") def test_update__returns_none__when_doesnt_found_one_time_entry_to_update( - test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory, insert_project ): dao = create_fake_dao(test_db) + inserted_project = insert_project() inserted_activity = insert_activity(activity_factory(), dao.db) - existent_time_entries = time_entry_factory(activity_id=inserted_activity.id) - inserted_time_entries = dao.create(existent_time_entries).__dict__ + time_entry_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) + inserted_time_entry = dao.create(time_entry_to_insert).__dict__ - time_entry = dao.update(0, inserted_time_entries) + time_entry = dao.update(0, inserted_time_entry) assert time_entry is None def test__get_all__returns_a_list_of_time_entries_dto_objects__when_one_or_more_time_entries_are_found_in_sql_database( - test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory, insert_project ): dao = create_fake_dao(test_db) + inserted_project = insert_project() inserted_activity = insert_activity(activity_factory(), dao.db) - time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) inserted_time_entries = [dao.create(time_entries_to_insert)] time_entry = dao.get_all() @@ -128,11 +147,12 @@ def test__get_all__returns_an_empty_list__when_doesnt_found_any_time_entries_in_ def test__get_by_id__returns_a_time_entry_dto__when_found_one_time_entry_that_match_id_with_sql_database( - test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory + test_db, create_fake_dao, time_entry_factory, insert_activity, activity_factory, insert_project ): dao = create_fake_dao(test_db) + inserted_project = insert_project() inserted_activity = insert_activity(activity_factory(), dao.db) - time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id) + time_entries_to_insert = time_entry_factory(activity_id=inserted_activity.id, project_id=inserted_project.id) inserted_time_entries = dao.create(time_entries_to_insert) time_entry = dao.get_by_id(time_entries_to_insert.id) @@ -155,21 +175,6 @@ def test__get_by_id__returns_none__when_no_time_entry_matches_by_id( assert time_entry is None -def test_get_latest_entries__returns_a_list_of_latest_time_entries__when_an_owner_id_match( - create_fake_dao, time_entry_factory, insert_activity, activity_factory, test_db -): - dao = create_fake_dao(test_db) - inserted_activity = insert_activity(activity_factory(), dao.db) - time_entry_to_insert = time_entry_factory( - activity_id=inserted_activity.id, - technologies="[jira,sql]") - inserted_time_entry = dao.create(time_entry_to_insert) - - result = dao.get_latest_entries(int(inserted_time_entry.owner_id)) - - assert result == [inserted_time_entry.__dict__] - - def test_get_latest_entries__returns_none__when_an_owner_id_is_not_found( create_fake_dao, test_db, insert_activity, activity_factory ): diff --git a/V2/tests/unit/services/project_service_test.py b/V2/tests/unit/services/project_service_test.py index 9baf657e..913bd40f 100644 --- a/V2/tests/unit/services/project_service_test.py +++ b/V2/tests/unit/services/project_service_test.py @@ -72,3 +72,18 @@ def test__create_project__uses_the_project_dao__to_create_an_project(mocker, pro assert project_dao.create.called assert expected_project == actual_project + + +def test__get_latest_projects__uses_the_project_dao__to_get_last_projects( + mocker, +): + expected_latest_projects = mocker.Mock() + project_dao = mocker.Mock( + get_latest=mocker.Mock(return_value=expected_latest_projects) + ) + + project_service = ProjectService(project_dao) + latest_projects = project_service.get_latest(Faker().pyint()) + + assert expected_latest_projects == latest_projects + assert project_dao.get_latest.called diff --git a/V2/tests/unit/use_cases/projects_use_case_test.py b/V2/tests/unit/use_cases/projects_use_case_test.py index 22167418..9f5d5f5c 100644 --- a/V2/tests/unit/use_cases/projects_use_case_test.py +++ b/V2/tests/unit/use_cases/projects_use_case_test.py @@ -78,3 +78,16 @@ def test__update_project_function__uses_the_projects_service__to_update_an_proje assert project_service.update.called assert expected_project == updated_project + + +def test__get_latest_projects_function__uses_the_project_service__to_get_latest_project( + mocker: MockFixture, +): + expected_latest_projects = mocker.Mock() + project_service = mocker.Mock(get_latest=mocker.Mock(return_value=expected_latest_projects)) + + project_use_case = _use_cases.GetLatestProjectsUseCase(project_service) + latest_projects = project_use_case.get_latest(Faker().pyint()) + + assert project_service.get_latest.called + assert expected_latest_projects == latest_projects diff --git a/V2/time_tracker/projects/_application/__init__.py b/V2/time_tracker/projects/_application/__init__.py index 6b48fb8a..96a6f985 100644 --- a/V2/time_tracker/projects/_application/__init__.py +++ b/V2/time_tracker/projects/_application/__init__.py @@ -2,4 +2,5 @@ from ._projects import create_project from ._projects import delete_project from ._projects import get_projects -from ._projects import update_project \ No newline at end of file +from ._projects import update_project +from ._projects import get_latest_projects \ No newline at end of file diff --git a/V2/time_tracker/projects/_application/_projects/__init__.py b/V2/time_tracker/projects/_application/_projects/__init__.py index 9f87eef2..b7500f9b 100644 --- a/V2/time_tracker/projects/_application/_projects/__init__.py +++ b/V2/time_tracker/projects/_application/_projects/__init__.py @@ -2,4 +2,5 @@ from ._create_project import create_project from ._delete_project import delete_project from ._get_projects import get_projects -from ._update_project import update_project \ No newline at end of file +from ._update_project import update_project +from ._get_latest_projects import get_latest_projects \ No newline at end of file diff --git a/V2/time_tracker/projects/_application/_projects/_create_project.py b/V2/time_tracker/projects/_application/_projects/_create_project.py index 559ba864..1397284a 100644 --- a/V2/time_tracker/projects/_application/_projects/_create_project.py +++ b/V2/time_tracker/projects/_application/_projects/_create_project.py @@ -31,7 +31,8 @@ def create_project(req: func.HttpRequest) -> func.HttpResponse: customer_id=project_data["customer_id"], status=project_data["status"], deleted=False, - technologies=project_data["technologies"] + technologies=project_data["technologies"], + customer=None ) created_project = use_case.create_project(project_to_create) diff --git a/V2/time_tracker/projects/_application/_projects/_get_latest_projects.py b/V2/time_tracker/projects/_application/_projects/_get_latest_projects.py new file mode 100644 index 00000000..0aa9badc --- /dev/null +++ b/V2/time_tracker/projects/_application/_projects/_get_latest_projects.py @@ -0,0 +1,26 @@ +import json +from http import HTTPStatus + +import azure.functions as func + +from ... import _domain +from ... import _infrastructure +from time_tracker._infrastructure import DB as database + + +def get_latest_projects(req: func.HttpRequest) -> func.HttpResponse: + project_dao = _infrastructure.ProjectsSQLDao(database()) + project_service = _domain.ProjectService(project_dao) + use_case = _domain._use_cases.GetLatestProjectsUseCase(project_service) + + owner_id = req.params.get('owner_id') + response = [ + project.__dict__ + for project in use_case.get_latest(owner_id) + ] + + return func.HttpResponse( + body=json.dumps(response), + status_code=HTTPStatus.OK, + mimetype="application/json", + ) diff --git a/V2/time_tracker/projects/_domain/__init__.py b/V2/time_tracker/projects/_domain/__init__.py index c90dbcaf..6cdbe548 100644 --- a/V2/time_tracker/projects/_domain/__init__.py +++ b/V2/time_tracker/projects/_domain/__init__.py @@ -7,5 +7,6 @@ DeleteProjectUseCase, GetProjectsUseCase, GetProjectUseCase, - UpdateProjectUseCase + UpdateProjectUseCase, + GetLatestProjectsUseCase ) \ No newline at end of file diff --git a/V2/time_tracker/projects/_domain/_entities/_project.py b/V2/time_tracker/projects/_domain/_entities/_project.py index 0b2ffe1a..75361db8 100644 --- a/V2/time_tracker/projects/_domain/_entities/_project.py +++ b/V2/time_tracker/projects/_domain/_entities/_project.py @@ -12,3 +12,5 @@ class Project: status: int deleted: Optional[bool] technologies: List[str] + + customer: Optional[dict] diff --git a/V2/time_tracker/projects/_domain/_persistence_contracts/_projects_dao.py b/V2/time_tracker/projects/_domain/_persistence_contracts/_projects_dao.py index f38c8ebd..ef0bb10f 100644 --- a/V2/time_tracker/projects/_domain/_persistence_contracts/_projects_dao.py +++ b/V2/time_tracker/projects/_domain/_persistence_contracts/_projects_dao.py @@ -1,4 +1,5 @@ import abc +import typing from .. import Project @@ -9,7 +10,7 @@ def create(self, time_entry_data: Project) -> Project: pass @abc.abstractmethod - def get_all(self) -> Project: + def get_all(self) -> typing.List[Project]: pass @abc.abstractmethod @@ -23,3 +24,7 @@ def update(self, id: int, project_data: dict) -> Project: @abc.abstractmethod def delete(self, id: int) -> Project: pass + + @abc.abstractmethod + def get_latest(self, owner_id: int) -> typing.List[Project]: + pass diff --git a/V2/time_tracker/projects/_domain/_services/_project.py b/V2/time_tracker/projects/_domain/_services/_project.py index 0f99dafb..70dfe9c0 100644 --- a/V2/time_tracker/projects/_domain/_services/_project.py +++ b/V2/time_tracker/projects/_domain/_services/_project.py @@ -22,3 +22,6 @@ def update(self, id: int, project_data: dict) -> Project: def delete(self, id: int) -> Project: return self.project_dao.delete(id) + + def get_latest(self, owner_id: int) -> typing.List[Project]: + return self.project_dao.get_latest(owner_id) diff --git a/V2/time_tracker/projects/_domain/_use_cases/__init__.py b/V2/time_tracker/projects/_domain/_use_cases/__init__.py index defb127d..f2a7dfce 100644 --- a/V2/time_tracker/projects/_domain/_use_cases/__init__.py +++ b/V2/time_tracker/projects/_domain/_use_cases/__init__.py @@ -4,3 +4,4 @@ from ._get_project_by_id_use_case import GetProjectUseCase from ._get_projects_use_case import GetProjectsUseCase from ._update_project_use_case import UpdateProjectUseCase +from ._get_latest_projects_use_case import GetLatestProjectsUseCase diff --git a/V2/time_tracker/projects/_domain/_use_cases/_get_latest_projects_use_case.py b/V2/time_tracker/projects/_domain/_use_cases/_get_latest_projects_use_case.py new file mode 100644 index 00000000..b26d484c --- /dev/null +++ b/V2/time_tracker/projects/_domain/_use_cases/_get_latest_projects_use_case.py @@ -0,0 +1,11 @@ +import typing + +from .. import Project, ProjectService + + +class GetLatestProjectsUseCase: + def __init__(self, project_service: ProjectService): + self.project_service = project_service + + def get_latest(self, owner_id: int) -> typing.List[Project]: + return self.project_service.get_latest(owner_id) diff --git a/V2/time_tracker/projects/_infrastructure/_data_persistence/_projects_dao.py b/V2/time_tracker/projects/_infrastructure/_data_persistence/_projects_dao.py index 2ec61186..63e65972 100644 --- a/V2/time_tracker/projects/_infrastructure/_data_persistence/_projects_dao.py +++ b/V2/time_tracker/projects/_infrastructure/_data_persistence/_projects_dao.py @@ -5,6 +5,8 @@ from ... import _domain as domain from time_tracker._infrastructure import _db +from time_tracker.time_entries._infrastructure._data_persistence import TimeEntriesSQLDao +from time_tracker.customers._infrastructure._data_persistence import CustomersSQLDao class ProjectsSQLDao(domain.ProjectsDao): @@ -31,13 +33,12 @@ def __init__(self, database: _db.DB): def create(self, project_data: domain.Project) -> domain.Project: try: - new_project = project_data.__dict__ - new_project.pop('id', None) + validated_project = {key: value for (key, value) in project_data.__dict__.items() if value is not None} + + query = self.project.insert().values(validated_project).return_defaults() - query = self.project.insert().values(new_project).return_defaults() project = self.db.get_session().execute(query) - new_project.update({"id": project.inserted_primary_key[0]}) - return self.__create_project_dto(new_project) + return self.get_by_id(project.inserted_primary_key[0]) except sq.exc.SQLAlchemyError: return None @@ -45,14 +46,29 @@ def create(self, project_data: domain.Project) -> domain.Project: def get_by_id(self, id: int) -> domain.Project: query = sq.sql.select(self.project).where(self.project.c.id == id) project = self.db.get_session().execute(query).one_or_none() - return self.__create_project_dto(dict(project)) if project else None + if project: + customer_dao = CustomersSQLDao(self.db) + customer = customer_dao.get_by_id(project["customer_id"]) + project = dict(project) + project.update({"customer": customer.__dict__ if customer else None}) + + return self.__create_project_dto(project) if project else None def get_all(self) -> typing.List[domain.Project]: query = sq.sql.select(self.project) - result = self.db.get_session().execute(query) + result = self.db.get_session().execute(query).all() + projects = [] + + for project in result: + customer_dao = CustomersSQLDao(self.db) + customer = customer_dao.get_by_id(project["customer_id"]) + project = dict(project) + project.update({"customer": customer.__dict__ if customer else None}) + projects.append(project) + return [ - self.__create_project_dto(dict(project)) - for project in result + self.__create_project_dto(project) + for project in projects ] def delete(self, id: int) -> domain.Project: @@ -72,6 +88,17 @@ def update(self, id: int, project_data: dict) -> domain.Project: except sq.exc.SQLAlchemyError as error: raise Exception(error.orig) + def get_latest(self, owner_id: int) -> typing.List[domain.Project]: + time_entries_dao = TimeEntriesSQLDao(self.db) + latest_time_entries = time_entries_dao.get_latest_entries(owner_id) + latest_projects = [] + + if latest_time_entries: + filter_project = typing.Counter(time_entry['project_id'] for time_entry in latest_time_entries) + latest_projects = [self.get_by_id(project_id) for project_id in filter_project] + + return latest_projects + def __create_project_dto(self, project: dict) -> domain.Project: project = {key: project.get(key) for key in self.project_key} return domain.Project(**project) diff --git a/V2/time_tracker/projects/interface.py b/V2/time_tracker/projects/interface.py index 2fb3244b..a0312258 100644 --- a/V2/time_tracker/projects/interface.py +++ b/V2/time_tracker/projects/interface.py @@ -2,4 +2,5 @@ from ._application import create_project from ._application import delete_project from ._application import get_projects -from ._application import update_project \ No newline at end of file +from ._application import update_project +from ._application import get_latest_projects \ No newline at end of file diff --git a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py index 29631650..9b48eb2a 100644 --- a/V2/time_tracker/time_entries/_application/_time_entries/__init__.py +++ b/V2/time_tracker/time_entries/_application/_time_entries/__init__.py @@ -1,6 +1,7 @@ # flake8: noqa from ._create_time_entry import create_time_entry from ._delete_time_entry import delete_time_entry +from ._get_latest_entries import get_latest_entries from ._update_time_entry import update_time_entry from ._get_time_entries import get_time_entries from ._get_latest_entries import get_latest_entries diff --git a/V2/time_tracker/time_entries/_domain/__init__.py b/V2/time_tracker/time_entries/_domain/__init__.py index 513877bf..f0aec6d0 100644 --- a/V2/time_tracker/time_entries/_domain/__init__.py +++ b/V2/time_tracker/time_entries/_domain/__init__.py @@ -5,6 +5,7 @@ from ._use_cases import ( CreateTimeEntryUseCase, DeleteTimeEntryUseCase, + GetLastestTimeEntryUseCase, UpdateTimeEntryUseCase, GetTimeEntriesUseCase, GetTimeEntryUseCase, diff --git a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py index 055cd850..0dd05666 100644 --- a/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py +++ b/V2/time_tracker/time_entries/_domain/_use_cases/__init__.py @@ -1,6 +1,7 @@ # flake8: noqa from ._create_time_entry_use_case import CreateTimeEntryUseCase from ._delete_time_entry_use_case import DeleteTimeEntryUseCase +from ._get_latest_entries_use_case import GetLastestTimeEntryUseCase from ._update_time_entry_use_case import UpdateTimeEntryUseCase from ._get_time_entry_use_case import GetTimeEntriesUseCase from ._get_time_entry_by_id_use_case import GetTimeEntryUseCase diff --git a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py index 5d368e26..59988205 100644 --- a/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py +++ b/V2/time_tracker/time_entries/_infrastructure/_data_persistence/_time_entries_sql_dao.py @@ -29,7 +29,7 @@ def __init__(self, database: _db.DB): sqlalchemy.Column('end_date', sqlalchemy.DateTime().with_variant(sqlalchemy.String, "sqlite")), sqlalchemy.Column('deleted', sqlalchemy.Boolean), sqlalchemy.Column('timezone_offset', sqlalchemy.String), - sqlalchemy.Column('project_id', sqlalchemy.Integer), + sqlalchemy.Column('project_id', sqlalchemy.Integer, sqlalchemy.ForeignKey('project.id')), extend_existing=True, ) diff --git a/V2/time_tracker/time_entries/interface.py b/V2/time_tracker/time_entries/interface.py index 87876204..1b6c1826 100644 --- a/V2/time_tracker/time_entries/interface.py +++ b/V2/time_tracker/time_entries/interface.py @@ -1,6 +1,7 @@ # flake8: noqa from ._application import create_time_entry from ._application import delete_time_entry +from ._application import get_latest_entries from ._application import update_time_entry from ._application import get_time_entries from ._application import get_latest_entries From e5f3d1c75ebb7b6a70d9f3c6b0930740ae302678 Mon Sep 17 00:00:00 2001 From: Jipson Murillo <38593785+Jobzi@users.noreply.github.com> Date: Fri, 3 Dec 2021 19:14:05 -0500 Subject: [PATCH 65/74] ci: TT-384 add new secret ci (#352) Co-authored-by: Alexander --- .../workflows/time-tracker-v1-on-pull-request-workflow.yml | 5 +++-- .github/workflows/time-tracker-v1-on-push-workflow.yml | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml b/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml index 766f09bf..0610385c 100644 --- a/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml +++ b/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml @@ -35,7 +35,7 @@ jobs: uses: Azure/get-keyvault-secrets@v1 with: keyvault: "time-tracker-secrets" - secrets: "MS-CLIENT-ID, MS-AUTHORITY, MS-SCOPE, MS-SECRET, MS-ENDPOINT, USERID, AZURE-APP-CONFIGURATION-CONNECTION-STRING, DATABASE-ACCOUNT-URI, DATABASE-MASTER-KEY, DATABASE-NAME, AZURE-STORAGE-ACCOUNT-KEY" + secrets: "MS-CLIENT-ID, MS-AUTHORITY, MS-SCOPE, MS-SECRET, MS-ENDPOINT, USERID, AZURE-APP-CONFIGURATION-CONNECTION-STRING, DATABASE-ACCOUNT-URI, DATABASE-MASTER-KEY, DATABASE-NAME, AZURE-STORAGE-ACCOUNT-KEY, AZURE-STORAGE-CONNECTION-STRING" id: timeTrackerAzureVault - name: Run tests @@ -51,8 +51,9 @@ jobs: DATABASE_MASTER_KEY: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-MASTER-KEY }} DATABASE_NAME: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-NAME }} AZURE_STORAGE_ACCOUNT_KEY: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-ACCOUNT-KEY }} + AZURE_STORAGE_CONNECTION_STRING: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-CONNECTION-STRING }} run: | pytest tests - name: Test the build of the app run: | - docker build . \ No newline at end of file + docker build . diff --git a/.github/workflows/time-tracker-v1-on-push-workflow.yml b/.github/workflows/time-tracker-v1-on-push-workflow.yml index 095712b9..1c4c1b18 100644 --- a/.github/workflows/time-tracker-v1-on-push-workflow.yml +++ b/.github/workflows/time-tracker-v1-on-push-workflow.yml @@ -35,7 +35,7 @@ jobs: uses: Azure/get-keyvault-secrets@v1 with: keyvault: "time-tracker-secrets" - secrets: "MS-CLIENT-ID, MS-AUTHORITY, MS-SCOPE, MS-SECRET, MS-ENDPOINT, USERID, AZURE-APP-CONFIGURATION-CONNECTION-STRING, DATABASE-ACCOUNT-URI, DATABASE-MASTER-KEY, DATABASE-NAME, AZURE-STORAGE-ACCOUNT-KEY" + secrets: "MS-CLIENT-ID, MS-AUTHORITY, MS-SCOPE, MS-SECRET, MS-ENDPOINT, USERID, AZURE-APP-CONFIGURATION-CONNECTION-STRING, DATABASE-ACCOUNT-URI, DATABASE-MASTER-KEY, DATABASE-NAME, AZURE-STORAGE-ACCOUNT-KEY, AZURE-STORAGE-CONNECTION-STRING" id: timeTrackerAzureVault - name: Run tests @@ -51,6 +51,7 @@ jobs: DATABASE_MASTER_KEY: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-MASTER-KEY }} DATABASE_NAME: ${{ steps.timeTrackerAzureVault.outputs.DATABASE-NAME }} AZURE_STORAGE_ACCOUNT_KEY: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-ACCOUNT-KEY }} + AZURE_STORAGE_CONNECTION_STRING: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-CONNECTION-STRING }} run: | pytest tests - name: Login to docker registry @@ -62,4 +63,4 @@ jobs: - name: Build and push image run: | docker build . -t ${{ secrets.REGISTRY_LOGIN_SERVER }}/timetrackerapi:${{ github.sha }} - docker push ${{ secrets.REGISTRY_LOGIN_SERVER }}/timetrackerapi:${{ github.sha }} \ No newline at end of file + docker push ${{ secrets.REGISTRY_LOGIN_SERVER }}/timetrackerapi:${{ github.sha }} From 95ae3af1990680581a0e90a9674189b0d8552a75 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Sat, 4 Dec 2021 00:36:12 +0000 Subject: [PATCH 66/74] 0.45.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 20 ++++++++++++++++++++ time_tracker_api/version.py | 2 +- 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b6b74353..c1b05983 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,26 @@ +## v0.45.0 (2021-12-04) +### Feature +* TT-414 get latest projects ([#363](https://github.com/ioet/time-tracker-backend/issues/363)) ([`aedf3d2`](https://github.com/ioet/time-tracker-backend/commit/aedf3d24d1cae9f40dcfb61196c619c15a1ac35c)) +* TT-418 crud customer v2 ([#361](https://github.com/ioet/time-tracker-backend/issues/361)) ([`1db51d6`](https://github.com/ioet/time-tracker-backend/commit/1db51d68824a429730fedd1cbf58ee334ee00fa0)) +* TT-429 created enums for response messages ([#362](https://github.com/ioet/time-tracker-backend/issues/362)) ([`fd0bc98`](https://github.com/ioet/time-tracker-backend/commit/fd0bc986fcc074bd5f6d6e9b7b602951375f2aee)) +* TT-404 GET Time Entries ([#341](https://github.com/ioet/time-tracker-backend/issues/341)) ([`c8a3134`](https://github.com/ioet/time-tracker-backend/commit/c8a31341b120792f46442815fad2d463262302ab)) +* TT-417-crud-v2-projects ([#360](https://github.com/ioet/time-tracker-backend/issues/360)) ([`10ec2bb`](https://github.com/ioet/time-tracker-backend/commit/10ec2bb9e2b5f67358c00b549a376b7f610041de)) +* TT-402 put v2 time entries ([#347](https://github.com/ioet/time-tracker-backend/issues/347)) ([`48f6411`](https://github.com/ioet/time-tracker-backend/commit/48f641170a968c7f12bc60f7882b0f4eda6cede2)) +* TT-418 CRUD customer v2 ([#359](https://github.com/ioet/time-tracker-backend/issues/359)) ([`d6c4c4d`](https://github.com/ioet/time-tracker-backend/commit/d6c4c4d67e72db867f197af8c7f8147839d6c178)) +* TT-403 delete v2 time entries ([#346](https://github.com/ioet/time-tracker-backend/issues/346)) ([`60a0dc7`](https://github.com/ioet/time-tracker-backend/commit/60a0dc7015f98b24a3429b1ceabf31e722741649)) +* TT-401-Post-v2-time-entries ([#344](https://github.com/ioet/time-tracker-backend/issues/344)) ([`5f107f3`](https://github.com/ioet/time-tracker-backend/commit/5f107f33cb640f7fa8e498db2157efb2d11f401d)) +* TT-399 Config use makefile to executing tests ([#350](https://github.com/ioet/time-tracker-backend/issues/350)) ([`32ee36f`](https://github.com/ioet/time-tracker-backend/commit/32ee36f39e81866c2f0767cf243c61afde6841c9)) + +### Fix +* TT-401 change in activity database instance and refactor test ([#355](https://github.com/ioet/time-tracker-backend/issues/355)) ([`b81319f`](https://github.com/ioet/time-tracker-backend/commit/b81319fe12bff57816dac1d0354000bfc6674c1c)) + +### Documentation +* TT-419 update readme V2 ([#357](https://github.com/ioet/time-tracker-backend/issues/357)) ([`6dd8505`](https://github.com/ioet/time-tracker-backend/commit/6dd85055b666888c7a22ffa1635b2e53903e7942)) +* TT-399 Readme update how to use makefile ([#354](https://github.com/ioet/time-tracker-backend/issues/354)) ([`10cc426`](https://github.com/ioet/time-tracker-backend/commit/10cc4269e4e60c6eff77bf1cf02cdf0d31dac86f)) + ## v0.44.0 (2021-11-15) ### Feature * TT-357 Create V2 Activities Azure DAO ([#334](https://github.com/ioet/time-tracker-backend/issues/334)) ([`3a99add`](https://github.com/ioet/time-tracker-backend/commit/3a99add39a3130c540d86b02c5a69dbda8536e8e)) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index a262ca73..952f957f 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.44.0' +__version__ = '0.45.0' From fd39f660dbd895fcc17d6767ca453bcc2b91ab7b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Rafael=20Arcos=20G=C3=B3mez?= <37599693+ararcos@users.noreply.github.com> Date: Sat, 4 Dec 2021 12:12:22 -0500 Subject: [PATCH 67/74] feat: TT-384 add read file from blob storage 12.1 (#366) --- commons/data_access_layer/file.py | 31 +++++++++++++ commons/data_access_layer/file_stream.py | 27 ----------- requirements/time_tracker_api/dev.txt | 2 +- requirements/time_tracker_api/prod.txt | 2 +- .../data_access_layer/file_stream_test.py | 14 +++--- .../activities/activities_namespace_test.py | 7 ++- .../activities/activities_model.py | 45 +++++++++++++++---- utils/azure_users.py | 7 ++- utils/extend_model.py | 15 ++++++- 9 files changed, 103 insertions(+), 47 deletions(-) create mode 100644 commons/data_access_layer/file.py delete mode 100644 commons/data_access_layer/file_stream.py diff --git a/commons/data_access_layer/file.py b/commons/data_access_layer/file.py new file mode 100644 index 00000000..879970dc --- /dev/null +++ b/commons/data_access_layer/file.py @@ -0,0 +1,31 @@ +import os +from azure.storage.blob import BlobServiceClient +from utils.azure_users import AzureConnection + + +class FileStream(): + CONNECTION_STRING = AzureConnection().get_blob_storage_connection_string() + container_name: str + + def __init__(self, container_name: str): + """ + Initialize the FileStream object. which is used to get the file stream from Azure Blob Storage. + `container_name`: The name of the Azure Storage container. + """ + self.container_name = container_name + + def get_file_stream(self, file_name: str): + if self.CONNECTION_STRING is None: + print("No connection string") + return None + + try: + account = BlobServiceClient.from_connection_string( + self.CONNECTION_STRING) + value = account.get_blob_client(self.container_name, file_name) + file = value.download_blob().readall() + print("Connection string is valid") + return file + except Exception as e: + print(f'Error: {e}') + return None diff --git a/commons/data_access_layer/file_stream.py b/commons/data_access_layer/file_stream.py deleted file mode 100644 index a705c061..00000000 --- a/commons/data_access_layer/file_stream.py +++ /dev/null @@ -1,27 +0,0 @@ -import os -from azure.storage.blob.blockblobservice import BlockBlobService - -ACCOUNT_KEY = os.environ.get('AZURE_STORAGE_ACCOUNT_KEY') - -class FileStream: - def __init__(self, account_name:str, container_name:str): - """ - Initialize the FileStream object. which is used to get the file stream from Azure Blob Storage. - `account_name`: The name of the Azure Storage account. - `container_name`: The name of the Azure Storage container. - """ - self.account_name = account_name - self.container_name = container_name - self.blob_service = BlockBlobService(account_name=self.account_name, account_key=ACCOUNT_KEY) - - def get_file_stream(self, filename:str): - import tempfile - try: - local_file = tempfile.NamedTemporaryFile() - self.blob_service.get_blob_to_stream(self.container_name, filename, stream=local_file) - - local_file.seek(0) - return local_file - except Exception as e: - print(e) - return None \ No newline at end of file diff --git a/requirements/time_tracker_api/dev.txt b/requirements/time_tracker_api/dev.txt index b7a6d667..4580007e 100644 --- a/requirements/time_tracker_api/dev.txt +++ b/requirements/time_tracker_api/dev.txt @@ -22,4 +22,4 @@ pyfiglet==0.7 factory_boy==3.2.0 # azure blob storage -azure-storage-blob==2.1.0 \ No newline at end of file +azure-storage-blob==12.1.0 \ No newline at end of file diff --git a/requirements/time_tracker_api/prod.txt b/requirements/time_tracker_api/prod.txt index dd6df0df..2bfaea68 100644 --- a/requirements/time_tracker_api/prod.txt +++ b/requirements/time_tracker_api/prod.txt @@ -47,4 +47,4 @@ pytz==2019.3 python-dateutil==2.8.1 # azure blob storage -azure-storage-blob==2.1.0 \ No newline at end of file +azure-storage-blob==12.1.0 \ No newline at end of file diff --git a/tests/commons/data_access_layer/file_stream_test.py b/tests/commons/data_access_layer/file_stream_test.py index a3119774..c2a5f5d8 100644 --- a/tests/commons/data_access_layer/file_stream_test.py +++ b/tests/commons/data_access_layer/file_stream_test.py @@ -1,15 +1,17 @@ import json -from commons.data_access_layer.file_stream import FileStream +from commons.data_access_layer.file import FileStream + +fs = FileStream("tt-common-files") -fs = FileStream("storageaccounteystr82c5","tt-common-files") def test__get_file_stream__return_file_content__when_enter_file_name(): result = fs.get_file_stream("activity_test.json") - - assert len(json.load(result)) == 15 + + assert len(json.loads(result)) == 15 + def test__get_file_stream__return_None__when_not_enter_file_name_or_incorrect_name(): result = fs.get_file_stream("") - - assert result == None \ No newline at end of file + + assert result == None diff --git a/tests/time_tracker_api/activities/activities_namespace_test.py b/tests/time_tracker_api/activities/activities_namespace_test.py index 86e34691..17efe406 100644 --- a/tests/time_tracker_api/activities/activities_namespace_test.py +++ b/tests/time_tracker_api/activities/activities_namespace_test.py @@ -19,6 +19,7 @@ fake_activity = ({"id": fake.random_int(1, 9999)}).update(valid_activity_data) + def test__get_all_activities__return_response__when_send_activities_get_request( client: FlaskClient, valid_header: dict ): @@ -28,6 +29,7 @@ def test__get_all_activities__return_response__when_send_activities_get_request( assert HTTPStatus.OK == response.status_code + def test_create_activity_should_succeed_with_valid_request( client: FlaskClient, mocker: MockFixture, valid_header: dict ): @@ -64,6 +66,7 @@ def test_create_activity_should_reject_bad_request( assert HTTPStatus.BAD_REQUEST == response.status_code repository_create_mock.assert_not_called() + @pytest.mark.skip(reason="There is currently no way to test this. Getting the value of the azure blob storage") def test_list_all_active( client: FlaskClient, mocker: MockFixture, valid_header: dict @@ -90,6 +93,7 @@ def test_list_all_active( max_count=ANY, ) + @pytest.mark.skip(reason="There is currently no way to test this. Getting the value of the azure blob storage") def test_list_all_active_activities( client: FlaskClient, mocker: MockFixture, valid_header: dict @@ -118,7 +122,7 @@ def test_list_all_active_activities( max_count=ANY, ) - +@pytest.mark.skip(reason="There is currently no way to test this. Getting the value of the azure blob storage") def test_get_activity_should_succeed_with_valid_id( client: FlaskClient, mocker: MockFixture, valid_header: dict ): @@ -141,6 +145,7 @@ def test_get_activity_should_succeed_with_valid_id( repository_find_mock.assert_called_once_with(str(valid_id), ANY) +@pytest.mark.skip(reason="There is currently no way to test this. Getting the value of the azure blob storage") def test_get_activity_should_return_not_found_with_invalid_id( client: FlaskClient, mocker: MockFixture, valid_header: dict ): diff --git a/time_tracker_api/activities/activities_model.py b/time_tracker_api/activities/activities_model.py index 158c8053..0810521c 100644 --- a/time_tracker_api/activities/activities_model.py +++ b/time_tracker_api/activities/activities_model.py @@ -7,13 +7,15 @@ CosmosDBModel, CosmosDBDao, CosmosDBRepository, + CustomError, ) from time_tracker_api.database import CRUDDao, APICosmosDBDao from typing import List, Callable from commons.data_access_layer.database import EventContext from utils.enums.status import Status from utils.query_builder import CosmosDBQueryBuilder -from commons.data_access_layer.file_stream import FileStream +from commons.data_access_layer.file import FileStream + class ActivityDao(CRUDDao): pass @@ -118,16 +120,27 @@ def find_all_from_blob_storage( self, event_context: EventContext, mapper: Callable = None, + activity_id: str = None, file_name: str = "activity.json", - ): + ): tenant_id_value = self.find_partition_key_value(event_context) function_mapper = self.get_mapper_or_dict(mapper) if tenant_id_value is None: - return [] - - fs = FileStream("storageaccounteystr82c5","tt-common-files") + return [{"result": "error", "message": "tenant_id is None"}] + + fs = FileStream("tt-common-files") result = fs.get_file_stream(file_name) - return list(map(function_mapper, json.load(result))) if result is not None else [] + result_json = list(map(function_mapper, json.loads( + result))) if result is not None else [] + if activity_id is not None: + result_json = [ + activity + for activity in result_json + if activity.id == activity_id + ] + + return result_json + class ActivityCosmosDBDao(APICosmosDBDao, ActivityDao): def __init__(self, repository): @@ -143,7 +156,7 @@ def get_all_with_id_in_list( activity_ids, ) - def get_all( + def get_all_v1( self, conditions: dict = None, activities_id: List = None, @@ -162,11 +175,25 @@ def get_all( ) return activities - def get_all_test(self, conditions: dict = None) -> list: + def get_all(self, **kwargs) -> list: event_ctx = self.create_event_context("read-many") - activities = self.repository.find_all_from_blob_storage(event_context=event_ctx) + activities = self.repository.find_all_from_blob_storage( + event_context=event_ctx + ) return activities + def get(self, id: str = None) -> list: + event_ctx = self.create_event_context("read-many") + activities = self.repository.find_all_from_blob_storage( + event_context=event_ctx, + activity_id=id + ) + + if len(activities) > 0: + return activities[0] + else: + raise CustomError(404, "It was not found") + def create(self, activity_payload: dict): event_ctx = self.create_event_context('create') activity_payload['status'] = Status.ACTIVE.value diff --git a/utils/azure_users.py b/utils/azure_users.py index 45a1a0f3..e38507ee 100644 --- a/utils/azure_users.py +++ b/utils/azure_users.py @@ -13,7 +13,8 @@ class MSConfig: 'MS_SECRET', 'MS_SCOPE', 'MS_ENDPOINT', - 'USERID' + 'USERID', + 'AZURE_STORAGE_CONNECTION_STRING' ] check_variables_are_defined(ms_variables) @@ -24,6 +25,7 @@ class MSConfig: SCOPE = os.environ.get('MS_SCOPE') ENDPOINT = os.environ.get('MS_ENDPOINT') USERID = os.environ.get('USERID') + AZURE_STORAGE_CONNECTION_STRING = os.environ.get('AZURE_STORAGE_CONNECTION_STRING') class BearerAuth(requests.auth.AuthBase): @@ -67,6 +69,9 @@ def __init__(self, config=MSConfig): self.client = self.get_msal_client() self.access_token = self.get_token() self.groups_and_users = None + + def get_blob_storage_connection_string(self) -> str: + return self.config.AZURE_STORAGE_CONNECTION_STRING def get_msal_client(self): client = msal.ConfidentialClientApplication( diff --git a/utils/extend_model.py b/utils/extend_model.py index ce39d5b7..9040895f 100644 --- a/utils/extend_model.py +++ b/utils/extend_model.py @@ -96,7 +96,7 @@ def add_project_info_to_time_entries(time_entries, projects): setattr(time_entry, 'customer_name', project.customer_name) -def add_activity_name_to_time_entries(time_entries, activities): +def add_activity_name_to_time_entries_v1(time_entries, activities): for time_entry in time_entries: for activity in activities: if time_entry.activity_id == activity.id: @@ -107,6 +107,19 @@ def add_activity_name_to_time_entries(time_entries, activities): ) setattr(time_entry, 'activity_name', name) +def add_activity_name_to_time_entries(time_entries, activities): + for time_entry in time_entries: + result = [x for x in activities if time_entry.activity_id == x.id] + if result: + name = ( + result[0].name + " (archived)" + if result[0].is_deleted() + else result[0].name + ) + setattr(time_entry, 'activity_name', name) + else: + setattr(time_entry, 'activity_name', "activity") + def add_user_email_to_time_entries(time_entries, users): for time_entry in time_entries: From d33ffe888a4134e25b26afc28d93ac8ca9448717 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Sat, 4 Dec 2021 17:53:58 +0000 Subject: [PATCH 68/74] 0.46.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 4 ++++ time_tracker_api/version.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c1b05983..6cc5c526 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ +## v0.46.0 (2021-12-04) +### Feature +* TT-384 add read file from blob storage 12.1 ([#366](https://github.com/ioet/time-tracker-backend/issues/366)) ([`fd39f66`](https://github.com/ioet/time-tracker-backend/commit/fd39f660dbd895fcc17d6767ca453bcc2b91ab7b)) + ## v0.45.0 (2021-12-04) ### Feature * TT-414 get latest projects ([#363](https://github.com/ioet/time-tracker-backend/issues/363)) ([`aedf3d2`](https://github.com/ioet/time-tracker-backend/commit/aedf3d24d1cae9f40dcfb61196c619c15a1ac35c)) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index 952f957f..50fa61e7 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.45.0' +__version__ = '0.46.0' From 628da5c165c434bfc2a47ffc00222710cdc379b3 Mon Sep 17 00:00:00 2001 From: Sandro Castillo Date: Thu, 20 Jan 2022 05:56:16 -0500 Subject: [PATCH 69/74] fix: TT-507 Error in time-entries list for different time zone (#367) --- utils/time.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/utils/time.py b/utils/time.py index 37082d76..aa82dab3 100644 --- a/utils/time.py +++ b/utils/time.py @@ -60,10 +60,10 @@ def to_utc(date: datetime) -> datetime: localized = _tz.localize(date) return localized - from dateutil.parser import isoparse + from dateutil import parser - no_timezone_info = isoparse(value).tzinfo is None + no_timezone_info = parser.parse(value).tzinfo is None if no_timezone_info: - return to_utc(isoparse(value)) + return to_utc(parser.parse(value)) else: - return isoparse(value) + return parser.parse(value) From eb5256f0eb1ab843f55afa71838f2a3d8eeb7999 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Thu, 20 Jan 2022 11:14:19 +0000 Subject: [PATCH 70/74] 0.46.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 4 ++++ time_tracker_api/version.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6cc5c526..0141b02d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ +## v0.46.1 (2022-01-20) +### Fix +* TT-507 Error in time-entries list for different time zone ([#367](https://github.com/ioet/time-tracker-backend/issues/367)) ([`628da5c`](https://github.com/ioet/time-tracker-backend/commit/628da5c165c434bfc2a47ffc00222710cdc379b3)) + ## v0.46.0 (2021-12-04) ### Feature * TT-384 add read file from blob storage 12.1 ([#366](https://github.com/ioet/time-tracker-backend/issues/366)) ([`fd39f66`](https://github.com/ioet/time-tracker-backend/commit/fd39f660dbd895fcc17d6767ca453bcc2b91ab7b)) diff --git a/time_tracker_api/version.py b/time_tracker_api/version.py index 50fa61e7..dd8e65f1 100644 --- a/time_tracker_api/version.py +++ b/time_tracker_api/version.py @@ -1 +1 @@ -__version__ = '0.46.0' +__version__ = '0.46.1' From 957227eea388bbf9e46f9c37427f3ed9052c64e1 Mon Sep 17 00:00:00 2001 From: Carlos Carvajal <56209390+cxcarvaj@users.noreply.github.com> Date: Wed, 9 Mar 2022 10:12:04 -0500 Subject: [PATCH 71/74] Fix: All ioet users are returned from AD (#369) * Fix: All ioet users are returned from AD * Fix: tests workflows on PR * feat: TT-551 Applying some changes in variable name --- .../time-tracker-v1-on-pull-request-workflow.yml | 2 +- requirements/time_tracker_api/prod.txt | 2 ++ tests/commons/data_access_layer/file_stream_test.py | 3 ++- tests/utils/azure_users_test.py | 2 +- utils/azure_users.py | 8 +++++--- 5 files changed, 11 insertions(+), 6 deletions(-) diff --git a/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml b/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml index 0610385c..323b8615 100644 --- a/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml +++ b/.github/workflows/time-tracker-v1-on-pull-request-workflow.yml @@ -53,7 +53,7 @@ jobs: AZURE_STORAGE_ACCOUNT_KEY: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-ACCOUNT-KEY }} AZURE_STORAGE_CONNECTION_STRING: ${{ steps.timeTrackerAzureVault.outputs.AZURE-STORAGE-CONNECTION-STRING }} run: | - pytest tests + pytest -v - name: Test the build of the app run: | docker build . diff --git a/requirements/time_tracker_api/prod.txt b/requirements/time_tracker_api/prod.txt index 2bfaea68..48bf85a7 100644 --- a/requirements/time_tracker_api/prod.txt +++ b/requirements/time_tracker_api/prod.txt @@ -15,6 +15,8 @@ flake8==3.7.9 WSGIserver==1.3 Werkzeug==0.16.1 Jinja2==2.11.3 +markupsafe==2.0.1 +itsdangerous==2.0.1 #WSGI server gunicorn==20.0.4 diff --git a/tests/commons/data_access_layer/file_stream_test.py b/tests/commons/data_access_layer/file_stream_test.py index c2a5f5d8..a8b4c137 100644 --- a/tests/commons/data_access_layer/file_stream_test.py +++ b/tests/commons/data_access_layer/file_stream_test.py @@ -1,10 +1,11 @@ import json +import pytest from commons.data_access_layer.file import FileStream fs = FileStream("tt-common-files") - +@pytest.mark.skip(reason='file not in the repository') def test__get_file_stream__return_file_content__when_enter_file_name(): result = fs.get_file_stream("activity_test.json") diff --git a/tests/utils/azure_users_test.py b/tests/utils/azure_users_test.py index 22bd8965..90300bd3 100644 --- a/tests/utils/azure_users_test.py +++ b/tests/utils/azure_users_test.py @@ -264,4 +264,4 @@ def test_users_functions_should_returns_all_users( users = AzureConnection().users() - assert len(users) == 0 + assert len(users) == 2 diff --git a/utils/azure_users.py b/utils/azure_users.py index e38507ee..0cf85c96 100644 --- a/utils/azure_users.py +++ b/utils/azure_users.py @@ -133,9 +133,11 @@ def users(self) -> List[AzureUser]: )[1] endpoint = endpoint + skip_token_attribute + request_token - for i in range(len(users)): - if users[i]['mail'] is None: - valid_users.append(users[i]) + for user in users: + user_emails = user['otherMails'] + email_domain = user_emails[0].split('@')[1] + if(len(user_emails) != 0 and email_domain == 'ioet.com'): + valid_users.append(user) return [self.to_azure_user(user) for user in valid_users] From 6abcf1db2dbd7e30d2d1e1b9ca9567261146a2f3 Mon Sep 17 00:00:00 2001 From: Carlos Carvajal <56209390+cxcarvaj@users.noreply.github.com> Date: Wed, 9 Mar 2022 14:23:12 -0500 Subject: [PATCH 72/74] Hot fix TT-551 all users are returned from AD (#372) * Revert changes in user * TT-551 hot fix --- utils/azure_users.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/utils/azure_users.py b/utils/azure_users.py index 0cf85c96..297b43f5 100644 --- a/utils/azure_users.py +++ b/utils/azure_users.py @@ -135,8 +135,7 @@ def users(self) -> List[AzureUser]: for user in users: user_emails = user['otherMails'] - email_domain = user_emails[0].split('@')[1] - if(len(user_emails) != 0 and email_domain == 'ioet.com'): + if(len(user_emails) != 0 and user_emails[0].split('@')[1] == 'ioet.com'): valid_users.append(user) return [self.to_azure_user(user) for user in valid_users] From 446c636c87314488465064e6fe55582b2c632cc4 Mon Sep 17 00:00:00 2001 From: alejandra-ponce <101274839+alejandra-ponce@users.noreply.github.com> Date: Mon, 21 Mar 2022 17:56:41 -0500 Subject: [PATCH 73/74] fix: TT-583 unable to set admin role to users (#373) --- tests/utils/azure_users_test.py | 28 ++++++++++++++-------------- utils/azure_users.py | 19 +++++++++++++++---- 2 files changed, 29 insertions(+), 18 deletions(-) diff --git a/tests/utils/azure_users_test.py b/tests/utils/azure_users_test.py index 90300bd3..8a888ac8 100644 --- a/tests/utils/azure_users_test.py +++ b/tests/utils/azure_users_test.py @@ -17,10 +17,10 @@ ], ) def test_azure_connection_is_test_user( - get_mock, - field_name, - field_value, - is_test_user_expected_value, + get_mock, + field_name, + field_value, + is_test_user_expected_value, ): response_mock = Mock() response_mock.status_code = 200 @@ -58,7 +58,7 @@ def test_azure_connection_get_test_user_ids(get_mock): @patch('utils.azure_users.AzureConnection.get_test_user_ids') @patch('utils.azure_users.AzureConnection.users') def test_azure_connection_get_non_test_users( - users_mock, get_test_user_ids_mock + users_mock, get_test_user_ids_mock ): test_user = AzureUser('ID1', None, None, [], []) non_test_user = AzureUser('ID2', None, None, [], []) @@ -81,7 +81,7 @@ def test_azure_connection_get_group_id_by_group_name(get_mock): group_id = 'ID1' azure_connection = AzureConnection() assert ( - azure_connection.get_group_id_by_group_name('group_name') == group_id + azure_connection.get_group_id_by_group_name('group_name') == group_id ) @@ -91,7 +91,7 @@ def test_azure_connection_get_group_id_by_group_name(get_mock): @patch('requests.post') @mark.parametrize('expected_value', [True, False]) def test_is_user_in_group( - post_mock, get_group_id_by_group_name_mock, expected_value + post_mock, get_group_id_by_group_name_mock, expected_value ): response_expected = {'value': expected_value} response_mock = Mock() @@ -104,8 +104,8 @@ def test_is_user_in_group( azure_connection = AzureConnection() assert ( - azure_connection.is_user_in_group('user_id', payload_mock) - == response_expected + azure_connection.is_user_in_group('user_id', payload_mock) + == response_expected ) @@ -164,7 +164,7 @@ def test_get_groups_and_users(get_mock): ], ) def test_get_groups_by_user_id( - get_groups_and_users_mock, user_id, groups_expected_value + get_groups_and_users_mock, user_id, groups_expected_value ): get_groups_and_users_mock.return_value = [ ('test-group-1', ['user-id1', 'user-id2']), @@ -180,7 +180,7 @@ def test_get_groups_by_user_id( @patch('utils.azure_users.AzureConnection.get_token', Mock()) @patch('utils.azure_users.AzureConnection.get_groups_and_users') def test_get_groups_and_users_called_once_by_instance( - get_groups_and_users_mock, + get_groups_and_users_mock, ): get_groups_and_users_mock.return_value = [] user_id = 'user-id1' @@ -198,7 +198,7 @@ def test_get_groups_and_users_called_once_by_instance( @patch('utils.azure_users.AzureConnection.get_group_id_by_group_name') @patch('requests.post') def test_add_user_to_group( - post_mock, get_group_id_by_group_name_mock, get_user_mock + post_mock, get_group_id_by_group_name_mock, get_user_mock ): get_group_id_by_group_name_mock.return_value = 'dummy_group' test_user = AzureUser('ID1', None, None, [], []) @@ -224,7 +224,7 @@ def test_add_user_to_group( @patch('utils.azure_users.AzureConnection.get_group_id_by_group_name') @patch('requests.delete') def test_remove_user_from_group( - delete_mock, get_group_id_by_group_name_mock, get_user_mock + delete_mock, get_group_id_by_group_name_mock, get_user_mock ): get_group_id_by_group_name_mock.return_value = 'dummy_group' test_user = AzureUser('ID1', None, None, [], []) @@ -247,7 +247,7 @@ def test_remove_user_from_group( @patch('utils.azure_users.AzureConnection.get_groups_and_users') @patch('requests.get') def test_users_functions_should_returns_all_users( - get_mock, get_groups_and_users_mock + get_mock, get_groups_and_users_mock ): first_response = Response() first_response.status_code = 200 diff --git a/utils/azure_users.py b/utils/azure_users.py index 297b43f5..5c97bec7 100644 --- a/utils/azure_users.py +++ b/utils/azure_users.py @@ -69,7 +69,7 @@ def __init__(self, config=MSConfig): self.client = self.get_msal_client() self.access_token = self.get_token() self.groups_and_users = None - + def get_blob_storage_connection_string(self) -> str: return self.config.AZURE_STORAGE_CONNECTION_STRING @@ -187,7 +187,15 @@ def add_user_to_group(self, user_id, group_name): headers=HTTP_PATCH_HEADERS, ) assert 204 == response.status_code - + if self.groups_and_users is None: + self.groups_and_users = [(group_name, [user_id])] + elif group_name not in [gn for (gn, ul) in self.groups_and_users]: + self.groups_and_users.append((group_name, [user_id])) + else: + for (cache_group_name, user_ids) in self.groups_and_users: + if group_name == cache_group_name: + if user_id not in user_ids: + user_ids.append(user_id) return self.get_user(user_id) def remove_user_from_group(self, user_id, group_name): @@ -201,7 +209,11 @@ def remove_user_from_group(self, user_id, group_name): headers=HTTP_PATCH_HEADERS, ) assert 204 == response.status_code - + if self.groups_and_users is not None: + for (cache_group_name, user_ids) in self.groups_and_users: + if group_name == cache_group_name: + if user_id in user_ids: + user_ids.remove(user_id) return self.get_user(user_id) def get_non_test_users(self) -> List[AzureUser]: @@ -271,7 +283,6 @@ def get_groups_and_users(self): result = list(map(parse_item, response.json()['value'])) users_id = self.config.USERID.split(",") result[0][1].extend(users_id) - return result def is_user_in_group(self, user_id, data: dict): From 61678e099a2492cb716454cd2a69c76f030b36bf Mon Sep 17 00:00:00 2001 From: David Cadena Date: Mon, 11 Apr 2022 20:55:21 -0500 Subject: [PATCH 74/74] hotfix: created a variable that contains the original endpoint (#375) --- utils/azure_users.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/utils/azure_users.py b/utils/azure_users.py index 5c97bec7..84f590a4 100644 --- a/utils/azure_users.py +++ b/utils/azure_users.py @@ -108,6 +108,7 @@ def users(self) -> List[AzureUser]: role_fields_params=role_fields_params, ) + final_endpoint = endpoint exists_users = True users = [] valid_users = [] @@ -115,8 +116,8 @@ def users(self) -> List[AzureUser]: while exists_users: response = requests.get( - endpoint, auth=BearerAuth(self.access_token) - ) + final_endpoint, auth=BearerAuth(self.access_token) + ) json_response = response.json() assert 200 == response.status_code assert 'value' in json_response @@ -131,8 +132,8 @@ def users(self) -> List[AzureUser]: request_token = remaining_users_link.split( skip_token_attribute )[1] - endpoint = endpoint + skip_token_attribute + request_token - + final_endpoint = endpoint + skip_token_attribute + request_token + for user in users: user_emails = user['otherMails'] if(len(user_emails) != 0 and user_emails[0].split('@')[1] == 'ioet.com'):