diff --git a/commons/data_access_layer/file.py b/commons/data_access_layer/file.py new file mode 100644 index 00000000..879970dc --- /dev/null +++ b/commons/data_access_layer/file.py @@ -0,0 +1,31 @@ +import os +from azure.storage.blob import BlobServiceClient +from utils.azure_users import AzureConnection + + +class FileStream(): + CONNECTION_STRING = AzureConnection().get_blob_storage_connection_string() + container_name: str + + def __init__(self, container_name: str): + """ + Initialize the FileStream object. which is used to get the file stream from Azure Blob Storage. + `container_name`: The name of the Azure Storage container. + """ + self.container_name = container_name + + def get_file_stream(self, file_name: str): + if self.CONNECTION_STRING is None: + print("No connection string") + return None + + try: + account = BlobServiceClient.from_connection_string( + self.CONNECTION_STRING) + value = account.get_blob_client(self.container_name, file_name) + file = value.download_blob().readall() + print("Connection string is valid") + return file + except Exception as e: + print(f'Error: {e}') + return None diff --git a/commons/data_access_layer/file_stream.py b/commons/data_access_layer/file_stream.py deleted file mode 100644 index a705c061..00000000 --- a/commons/data_access_layer/file_stream.py +++ /dev/null @@ -1,27 +0,0 @@ -import os -from azure.storage.blob.blockblobservice import BlockBlobService - -ACCOUNT_KEY = os.environ.get('AZURE_STORAGE_ACCOUNT_KEY') - -class FileStream: - def __init__(self, account_name:str, container_name:str): - """ - Initialize the FileStream object. which is used to get the file stream from Azure Blob Storage. - `account_name`: The name of the Azure Storage account. - `container_name`: The name of the Azure Storage container. - """ - self.account_name = account_name - self.container_name = container_name - self.blob_service = BlockBlobService(account_name=self.account_name, account_key=ACCOUNT_KEY) - - def get_file_stream(self, filename:str): - import tempfile - try: - local_file = tempfile.NamedTemporaryFile() - self.blob_service.get_blob_to_stream(self.container_name, filename, stream=local_file) - - local_file.seek(0) - return local_file - except Exception as e: - print(e) - return None \ No newline at end of file diff --git a/requirements/time_tracker_api/dev.txt b/requirements/time_tracker_api/dev.txt index b7a6d667..4580007e 100644 --- a/requirements/time_tracker_api/dev.txt +++ b/requirements/time_tracker_api/dev.txt @@ -22,4 +22,4 @@ pyfiglet==0.7 factory_boy==3.2.0 # azure blob storage -azure-storage-blob==2.1.0 \ No newline at end of file +azure-storage-blob==12.1.0 \ No newline at end of file diff --git a/requirements/time_tracker_api/prod.txt b/requirements/time_tracker_api/prod.txt index dd6df0df..2bfaea68 100644 --- a/requirements/time_tracker_api/prod.txt +++ b/requirements/time_tracker_api/prod.txt @@ -47,4 +47,4 @@ pytz==2019.3 python-dateutil==2.8.1 # azure blob storage -azure-storage-blob==2.1.0 \ No newline at end of file +azure-storage-blob==12.1.0 \ No newline at end of file diff --git a/tests/commons/data_access_layer/file_stream_test.py b/tests/commons/data_access_layer/file_stream_test.py index a3119774..c2a5f5d8 100644 --- a/tests/commons/data_access_layer/file_stream_test.py +++ b/tests/commons/data_access_layer/file_stream_test.py @@ -1,15 +1,17 @@ import json -from commons.data_access_layer.file_stream import FileStream +from commons.data_access_layer.file import FileStream + +fs = FileStream("tt-common-files") -fs = FileStream("storageaccounteystr82c5","tt-common-files") def test__get_file_stream__return_file_content__when_enter_file_name(): result = fs.get_file_stream("activity_test.json") - - assert len(json.load(result)) == 15 + + assert len(json.loads(result)) == 15 + def test__get_file_stream__return_None__when_not_enter_file_name_or_incorrect_name(): result = fs.get_file_stream("") - - assert result == None \ No newline at end of file + + assert result == None diff --git a/tests/time_tracker_api/activities/activities_namespace_test.py b/tests/time_tracker_api/activities/activities_namespace_test.py index 86e34691..17efe406 100644 --- a/tests/time_tracker_api/activities/activities_namespace_test.py +++ b/tests/time_tracker_api/activities/activities_namespace_test.py @@ -19,6 +19,7 @@ fake_activity = ({"id": fake.random_int(1, 9999)}).update(valid_activity_data) + def test__get_all_activities__return_response__when_send_activities_get_request( client: FlaskClient, valid_header: dict ): @@ -28,6 +29,7 @@ def test__get_all_activities__return_response__when_send_activities_get_request( assert HTTPStatus.OK == response.status_code + def test_create_activity_should_succeed_with_valid_request( client: FlaskClient, mocker: MockFixture, valid_header: dict ): @@ -64,6 +66,7 @@ def test_create_activity_should_reject_bad_request( assert HTTPStatus.BAD_REQUEST == response.status_code repository_create_mock.assert_not_called() + @pytest.mark.skip(reason="There is currently no way to test this. Getting the value of the azure blob storage") def test_list_all_active( client: FlaskClient, mocker: MockFixture, valid_header: dict @@ -90,6 +93,7 @@ def test_list_all_active( max_count=ANY, ) + @pytest.mark.skip(reason="There is currently no way to test this. Getting the value of the azure blob storage") def test_list_all_active_activities( client: FlaskClient, mocker: MockFixture, valid_header: dict @@ -118,7 +122,7 @@ def test_list_all_active_activities( max_count=ANY, ) - +@pytest.mark.skip(reason="There is currently no way to test this. Getting the value of the azure blob storage") def test_get_activity_should_succeed_with_valid_id( client: FlaskClient, mocker: MockFixture, valid_header: dict ): @@ -141,6 +145,7 @@ def test_get_activity_should_succeed_with_valid_id( repository_find_mock.assert_called_once_with(str(valid_id), ANY) +@pytest.mark.skip(reason="There is currently no way to test this. Getting the value of the azure blob storage") def test_get_activity_should_return_not_found_with_invalid_id( client: FlaskClient, mocker: MockFixture, valid_header: dict ): diff --git a/time_tracker_api/activities/activities_model.py b/time_tracker_api/activities/activities_model.py index 158c8053..0810521c 100644 --- a/time_tracker_api/activities/activities_model.py +++ b/time_tracker_api/activities/activities_model.py @@ -7,13 +7,15 @@ CosmosDBModel, CosmosDBDao, CosmosDBRepository, + CustomError, ) from time_tracker_api.database import CRUDDao, APICosmosDBDao from typing import List, Callable from commons.data_access_layer.database import EventContext from utils.enums.status import Status from utils.query_builder import CosmosDBQueryBuilder -from commons.data_access_layer.file_stream import FileStream +from commons.data_access_layer.file import FileStream + class ActivityDao(CRUDDao): pass @@ -118,16 +120,27 @@ def find_all_from_blob_storage( self, event_context: EventContext, mapper: Callable = None, + activity_id: str = None, file_name: str = "activity.json", - ): + ): tenant_id_value = self.find_partition_key_value(event_context) function_mapper = self.get_mapper_or_dict(mapper) if tenant_id_value is None: - return [] - - fs = FileStream("storageaccounteystr82c5","tt-common-files") + return [{"result": "error", "message": "tenant_id is None"}] + + fs = FileStream("tt-common-files") result = fs.get_file_stream(file_name) - return list(map(function_mapper, json.load(result))) if result is not None else [] + result_json = list(map(function_mapper, json.loads( + result))) if result is not None else [] + if activity_id is not None: + result_json = [ + activity + for activity in result_json + if activity.id == activity_id + ] + + return result_json + class ActivityCosmosDBDao(APICosmosDBDao, ActivityDao): def __init__(self, repository): @@ -143,7 +156,7 @@ def get_all_with_id_in_list( activity_ids, ) - def get_all( + def get_all_v1( self, conditions: dict = None, activities_id: List = None, @@ -162,11 +175,25 @@ def get_all( ) return activities - def get_all_test(self, conditions: dict = None) -> list: + def get_all(self, **kwargs) -> list: event_ctx = self.create_event_context("read-many") - activities = self.repository.find_all_from_blob_storage(event_context=event_ctx) + activities = self.repository.find_all_from_blob_storage( + event_context=event_ctx + ) return activities + def get(self, id: str = None) -> list: + event_ctx = self.create_event_context("read-many") + activities = self.repository.find_all_from_blob_storage( + event_context=event_ctx, + activity_id=id + ) + + if len(activities) > 0: + return activities[0] + else: + raise CustomError(404, "It was not found") + def create(self, activity_payload: dict): event_ctx = self.create_event_context('create') activity_payload['status'] = Status.ACTIVE.value diff --git a/utils/azure_users.py b/utils/azure_users.py index 45a1a0f3..e38507ee 100644 --- a/utils/azure_users.py +++ b/utils/azure_users.py @@ -13,7 +13,8 @@ class MSConfig: 'MS_SECRET', 'MS_SCOPE', 'MS_ENDPOINT', - 'USERID' + 'USERID', + 'AZURE_STORAGE_CONNECTION_STRING' ] check_variables_are_defined(ms_variables) @@ -24,6 +25,7 @@ class MSConfig: SCOPE = os.environ.get('MS_SCOPE') ENDPOINT = os.environ.get('MS_ENDPOINT') USERID = os.environ.get('USERID') + AZURE_STORAGE_CONNECTION_STRING = os.environ.get('AZURE_STORAGE_CONNECTION_STRING') class BearerAuth(requests.auth.AuthBase): @@ -67,6 +69,9 @@ def __init__(self, config=MSConfig): self.client = self.get_msal_client() self.access_token = self.get_token() self.groups_and_users = None + + def get_blob_storage_connection_string(self) -> str: + return self.config.AZURE_STORAGE_CONNECTION_STRING def get_msal_client(self): client = msal.ConfidentialClientApplication( diff --git a/utils/extend_model.py b/utils/extend_model.py index ce39d5b7..9040895f 100644 --- a/utils/extend_model.py +++ b/utils/extend_model.py @@ -96,7 +96,7 @@ def add_project_info_to_time_entries(time_entries, projects): setattr(time_entry, 'customer_name', project.customer_name) -def add_activity_name_to_time_entries(time_entries, activities): +def add_activity_name_to_time_entries_v1(time_entries, activities): for time_entry in time_entries: for activity in activities: if time_entry.activity_id == activity.id: @@ -107,6 +107,19 @@ def add_activity_name_to_time_entries(time_entries, activities): ) setattr(time_entry, 'activity_name', name) +def add_activity_name_to_time_entries(time_entries, activities): + for time_entry in time_entries: + result = [x for x in activities if time_entry.activity_id == x.id] + if result: + name = ( + result[0].name + " (archived)" + if result[0].is_deleted() + else result[0].name + ) + setattr(time_entry, 'activity_name', name) + else: + setattr(time_entry, 'activity_name', "activity") + def add_user_email_to_time_entries(time_entries, users): for time_entry in time_entries: