|
1 | | -import logging |
2 | | - |
3 | | -from dataall.modules.s3_datasets.indexers.dataset_indexer import DatasetIndexer |
4 | | -from dataall.base.context import get_context |
5 | | -from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService |
6 | | -from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService |
7 | | - |
8 | | -##TODO |
9 | | -##from dataall.core.resource_threshold.services.resource_threshold_service import ResourceThresholdService |
10 | | -from dataall.modules.catalog.db.glossary_repositories import GlossaryRepository |
11 | | -from dataall.base.db.exceptions import ResourceAlreadyExists |
12 | | -from dataall.modules.s3_datasets.services.dataset_service import DatasetService |
13 | | -from dataall.modules.s3_datasets.aws.s3_location_client import S3LocationClient |
14 | | -from dataall.modules.s3_datasets.db.dataset_location_repositories import DatasetLocationRepository |
15 | | -from dataall.modules.s3_datasets.indexers.location_indexer import DatasetLocationIndexer |
16 | | -from dataall.modules.s3_datasets.services.dataset_permissions import ( |
17 | | - UPDATE_DATASET_FOLDER, |
18 | | - MANAGE_DATASETS, |
19 | | - CREATE_DATASET_FOLDER, |
20 | | - LIST_DATASET_FOLDERS, |
21 | | - DELETE_DATASET_FOLDER, |
22 | | -) |
23 | | -from dataall.modules.s3_datasets.services.dataset_permissions import DATASET_FOLDER_READ, GET_DATASET_FOLDER |
24 | | -from dataall.modules.s3_datasets.db.dataset_repositories import DatasetRepository |
25 | | -from dataall.modules.s3_datasets.db.dataset_models import DatasetStorageLocation, S3Dataset |
26 | | -from dataall.modules.s3_datasets.aws.bedrock_metadata_client import BedrockClient |
27 | | -from dataall.modules.s3_datasets.aws.s3_dataset_client import S3DatasetClient |
28 | | - |
29 | | -log = logging.getLogger(__name__) |
30 | | - |
31 | | - |
32 | | -class DatasetLocationService: |
33 | | - @staticmethod |
34 | | - def _get_dataset_uri(session, uri): |
35 | | - location = DatasetLocationRepository.get_location_by_uri(session, uri) |
36 | | - return location.datasetUri |
37 | | - |
38 | | - @staticmethod |
39 | | - @TenantPolicyService.has_tenant_permission(MANAGE_DATASETS) |
40 | | - @ResourcePolicyService.has_resource_permission(CREATE_DATASET_FOLDER) |
41 | | - def create_storage_location(uri: str, data: dict): |
42 | | - with get_context().db_engine.scoped_session() as session: |
43 | | - exists = DatasetLocationRepository.exists(session, uri, data['prefix']) |
44 | | - |
45 | | - if exists: |
46 | | - raise ResourceAlreadyExists( |
47 | | - action='Create Folder', |
48 | | - message=f'Folder: {data["prefix"]} already exist on dataset {uri}', |
49 | | - ) |
50 | | - |
51 | | - dataset = DatasetRepository.get_dataset_by_uri(session, uri) |
52 | | - location = DatasetLocationRepository.create_dataset_location(session, dataset, data) |
53 | | - DatasetLocationService._attach_dataset_folder_read_permission(session, dataset, location.locationUri) |
54 | | - |
55 | | - if 'terms' in data.keys(): |
56 | | - DatasetLocationService._create_glossary_links(session, location, data['terms']) |
57 | | - |
58 | | - S3LocationClient(location, dataset).create_bucket_prefix() |
59 | | - |
60 | | - DatasetLocationIndexer.upsert(session=session, folder_uri=location.locationUri) |
61 | | - DatasetIndexer.upsert(session, dataset.datasetUri) |
62 | | - return location |
63 | | - |
64 | | - @staticmethod |
65 | | - @ResourcePolicyService.has_resource_permission(LIST_DATASET_FOLDERS) |
66 | | - def list_dataset_locations(uri: str, filter: dict = None): |
67 | | - with get_context().db_engine.scoped_session() as session: |
68 | | - return DatasetLocationRepository.list_dataset_locations(session=session, uri=uri, data=filter) |
69 | | - |
70 | | - @staticmethod |
71 | | - def get_storage_location(uri): |
72 | | - with get_context().db_engine.scoped_session() as session: |
73 | | - return DatasetLocationRepository.get_location_by_uri(session, uri) |
74 | | - |
75 | | - @staticmethod |
76 | | - @TenantPolicyService.has_tenant_permission(MANAGE_DATASETS) |
77 | | - @ResourcePolicyService.has_resource_permission(UPDATE_DATASET_FOLDER, parent_resource=_get_dataset_uri) |
78 | | - def update_storage_location(uri: str, data: dict): |
79 | | - with get_context().db_engine.scoped_session() as session: |
80 | | - location = DatasetLocationRepository.get_location_by_uri(session, uri) |
81 | | - for k in data.keys(): |
82 | | - setattr(location, k, data.get(k)) |
83 | | - |
84 | | - if 'terms' in data.keys(): |
85 | | - DatasetLocationService._create_glossary_links(session, location, data['terms']) |
86 | | - |
87 | | - DatasetLocationIndexer.upsert(session, folder_uri=location.locationUri) |
88 | | - DatasetIndexer.upsert(session, location.datasetUri) |
89 | | - |
90 | | - return location |
91 | | - |
92 | | - @staticmethod |
93 | | - @TenantPolicyService.has_tenant_permission(MANAGE_DATASETS) |
94 | | - @ResourcePolicyService.has_resource_permission(DELETE_DATASET_FOLDER, parent_resource=_get_dataset_uri) |
95 | | - def remove_storage_location(uri: str = None): |
96 | | - with get_context().db_engine.scoped_session() as session: |
97 | | - location = DatasetLocationRepository.get_location_by_uri(session, uri) |
98 | | - DatasetService.check_before_delete(session, location.locationUri, action=DELETE_DATASET_FOLDER) |
99 | | - DatasetService.execute_on_delete(session, location.locationUri, action=DELETE_DATASET_FOLDER) |
100 | | - dataset = DatasetRepository.get_dataset_by_uri(session, location.datasetUri) |
101 | | - DatasetLocationService._delete_dataset_folder_read_permission(session, dataset, location.locationUri) |
102 | | - DatasetLocationRepository.delete(session, location) |
103 | | - GlossaryRepository.delete_glossary_terms_links( |
104 | | - session, |
105 | | - target_uri=location.locationUri, |
106 | | - target_type='DatasetStorageLocation', |
107 | | - ) |
108 | | - DatasetLocationIndexer.delete_doc(doc_id=location.locationUri) |
109 | | - return True |
110 | | - |
111 | | - @staticmethod |
112 | | - def _create_glossary_links(session, location, terms): |
113 | | - GlossaryRepository.set_glossary_terms_links( |
114 | | - session, get_context().username, location.locationUri, 'Folder', terms |
115 | | - ) |
116 | | - |
117 | | - @staticmethod |
118 | | - def _attach_dataset_folder_read_permission(session, dataset: S3Dataset, location_uri): |
119 | | - """ |
120 | | - Attach Folder permissions to dataset groups |
121 | | - """ |
122 | | - permission_group = { |
123 | | - dataset.SamlAdminGroupName, |
124 | | - dataset.stewards if dataset.stewards is not None else dataset.SamlAdminGroupName, |
125 | | - } |
126 | | - for group in permission_group: |
127 | | - ResourcePolicyService.attach_resource_policy( |
128 | | - session=session, |
129 | | - group=group, |
130 | | - permissions=DATASET_FOLDER_READ, |
131 | | - resource_uri=location_uri, |
132 | | - resource_type=DatasetStorageLocation.__name__, |
133 | | - ) |
134 | | - |
135 | | - @staticmethod |
136 | | - def _delete_dataset_folder_read_permission(session, dataset: S3Dataset, location_uri): |
137 | | - """ |
138 | | - Delete Folder permissions to dataset groups |
139 | | - """ |
140 | | - permission_group = { |
141 | | - dataset.SamlAdminGroupName, |
142 | | - dataset.stewards if dataset.stewards is not None else dataset.SamlAdminGroupName, |
143 | | - } |
144 | | - for group in permission_group: |
145 | | - ResourcePolicyService.delete_resource_policy(session=session, group=group, resource_uri=location_uri) |
146 | | - |
147 | | - @staticmethod |
148 | | - @ResourcePolicyService.has_resource_permission(GET_DATASET_FOLDER) |
149 | | - def get_folder_restricted_information(uri: str, folder: DatasetStorageLocation): |
150 | | - context = get_context() |
151 | | - with context.db_engine.scoped_session() as session: |
152 | | - return DatasetRepository.get_dataset_by_uri(session, folder.datasetUri) |
153 | | - |
154 | | - @staticmethod |
155 | | - @ResourcePolicyService.has_resource_permission(UPDATE_DATASET_FOLDER) |
156 | | - # @ResourceThresholdService.check_invocation_count( |
157 | | - # 'metadata', 'modules.s3_datasets.features.generate_metadata_ai.max_count_per_day' |
158 | | - # ) |
159 | | - def generate_metadata_for_folder(uri, metadata_types): |
160 | | - context = get_context() |
161 | | - with context.db_engine.scoped_session() as session: |
162 | | - folder = DatasetLocationRepository.get_location_by_uri(session, uri) |
163 | | - dataset = DatasetRepository.get_dataset_by_uri(session, folder.datasetUri) |
164 | | - files = S3DatasetClient(dataset).list_bucket_files(folder.S3BucketName, folder.S3Prefix) |
165 | | - metadata = BedrockClient().invoke_model_folder_metadata( |
166 | | - metadata_types=metadata_types, folder=folder, files=[f['Key'] for f in files] |
167 | | - ) |
168 | | - return [{'targetUri': uri, 'targetType': 'Folder'} | metadata] |
0 commit comments