Skip to content

Commit 64f2342

Browse files
committed
fx rename
1 parent c7fb554 commit 64f2342

2 files changed

Lines changed: 12 additions & 16 deletions

File tree

functions-python/tasks_executor/src/tasks/validation_reports/rebuild_missing_validation_reports.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ def rebuild_missing_validation_reports(
148148

149149
# Apply limit inside the GCS blob check so we stop as soon as we have
150150
# enough valid datasets, without discarding candidates that would pass.
151-
valid_datasets = _filter_datasets_with_existing_blob(datasets, limit=limit)
151+
valid_datasets = _filter_out_datasets_without_blob(datasets, limit=limit)
152152
logging.info(
153153
"%s datasets have a GCS blob and will be triggered", len(valid_datasets)
154154
)
@@ -283,7 +283,7 @@ def _get_datasets_for_validation(
283283
return query.all()
284284

285285

286-
def _filter_datasets_with_existing_blob(
286+
def _filter_out_datasets_without_blob(
287287
datasets: List[tuple],
288288
limit: Optional[int] = None,
289289
) -> List[tuple]:

functions-python/tasks_executor/tests/tasks/validation_reports/test_rebuild_missing_validation_reports.py

Lines changed: 10 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ def _make_session_mock(self, datasets=None):
113113
return session
114114

115115
@patch(f"{_MODULE}._get_validator_version", return_value="7.0.0")
116-
@patch(f"{_MODULE}._filter_datasets_with_existing_blob", return_value=[])
116+
@patch(f"{_MODULE}._filter_out_datasets_without_blob", return_value=[])
117117
@patch(f"{_MODULE}.TaskExecutionTracker")
118118
def test_dry_run_returns_count_without_triggering(
119119
self, tracker_cls, filter_blob_mock, version_mock
@@ -141,7 +141,7 @@ def test_dry_run_returns_count_without_triggering(
141141
)
142142

143143
@patch(f"{_MODULE}._get_validator_version", return_value="7.0.0")
144-
@patch(f"{_MODULE}._filter_datasets_with_existing_blob")
144+
@patch(f"{_MODULE}._filter_out_datasets_without_blob")
145145
@patch(f"{_MODULE}.execute_workflows", return_value=["ds-1", "ds-2"])
146146
@patch(f"{_MODULE}.TaskExecutionTracker")
147147
def test_triggers_workflows_when_not_dry_run(
@@ -162,7 +162,7 @@ def test_triggers_workflows_when_not_dry_run(
162162
self.assertFalse(result["params"]["dry_run"])
163163

164164
@patch(f"{_MODULE}._get_validator_version", return_value="7.0.0")
165-
@patch(f"{_MODULE}._filter_datasets_with_existing_blob")
165+
@patch(f"{_MODULE}._filter_out_datasets_without_blob")
166166
@patch(f"{_MODULE}.execute_workflows", return_value=["ds-1"])
167167
@patch(f"{_MODULE}.TaskExecutionTracker")
168168
def test_limit_slices_datasets(
@@ -187,9 +187,7 @@ def test_limit_slices_datasets(
187187
self.assertEqual(result["total_in_call"], 5)
188188

189189
@patch(f"{_MODULE}._get_validator_version", return_value="7.0.0")
190-
@patch(
191-
f"{_MODULE}._filter_datasets_with_existing_blob", return_value=[("f", "ds-1")]
192-
)
190+
@patch(f"{_MODULE}._filter_out_datasets_without_blob", return_value=[("f", "ds-1")])
193191
@patch(f"{_MODULE}.execute_workflows", return_value=["ds-1"])
194192
@patch(f"{_MODULE}.TaskExecutionTracker")
195193
def test_bypass_db_update_passed_explicitly(
@@ -206,9 +204,7 @@ def test_bypass_db_update_passed_explicitly(
206204
self.assertTrue(call_kwargs["bypass_db_update"])
207205

208206
@patch(f"{_MODULE}._get_validator_version", return_value="7.0.0")
209-
@patch(
210-
f"{_MODULE}._filter_datasets_with_existing_blob", return_value=[("f", "ds-1")]
211-
)
207+
@patch(f"{_MODULE}._filter_out_datasets_without_blob", return_value=[("f", "ds-1")])
212208
@patch(f"{_MODULE}.execute_workflows", return_value=["ds-1"])
213209
@patch(f"{_MODULE}.TaskExecutionTracker")
214210
def test_bypass_db_update_defaults_to_false(
@@ -248,7 +244,7 @@ def test_handler_passes_all_params(self, rebuild_mock):
248244
)
249245

250246
@patch(f"{_MODULE}._get_validator_version", return_value="7.0.0")
251-
@patch(f"{_MODULE}._filter_datasets_with_existing_blob", return_value=[])
247+
@patch(f"{_MODULE}._filter_out_datasets_without_blob", return_value=[])
252248
@patch(f"{_MODULE}.TaskExecutionTracker")
253249
def test_default_op_status_filters_published(
254250
self, tracker_cls, filter_blob_mock, version_mock
@@ -271,7 +267,7 @@ class TestFilterDatasetsWithExistingBlob(unittest.TestCase):
271267
def test_stops_at_limit(self, storage_mock):
272268
"""Should stop checking GCS as soon as limit valid datasets are found."""
273269
from tasks.validation_reports.rebuild_missing_validation_reports import (
274-
_filter_datasets_with_existing_blob,
270+
_filter_out_datasets_without_blob,
275271
)
276272

277273
bucket_mock = MagicMock()
@@ -283,7 +279,7 @@ def test_stops_at_limit(self, storage_mock):
283279
bucket_mock.blob.return_value = blob_mock
284280

285281
datasets = [(f"feed-{i}", f"ds-{i}") for i in range(20)]
286-
result = _filter_datasets_with_existing_blob(datasets, limit=3)
282+
result = _filter_out_datasets_without_blob(datasets, limit=3)
287283

288284
self.assertEqual(len(result), 3)
289285
# Only 3 GCS calls should have been made
@@ -293,7 +289,7 @@ def test_stops_at_limit(self, storage_mock):
293289
def test_skips_missing_blobs_and_continues(self, storage_mock):
294290
"""Should skip datasets with no blob and keep going until limit is reached."""
295291
from tasks.validation_reports.rebuild_missing_validation_reports import (
296-
_filter_datasets_with_existing_blob,
292+
_filter_out_datasets_without_blob,
297293
)
298294

299295
bucket_mock = MagicMock()
@@ -306,7 +302,7 @@ def test_skips_missing_blobs_and_continues(self, storage_mock):
306302
bucket_mock.blob.return_value = blob_mock
307303

308304
datasets = [(f"feed-{i}", f"ds-{i}") for i in range(7)]
309-
result = _filter_datasets_with_existing_blob(datasets, limit=3)
305+
result = _filter_out_datasets_without_blob(datasets, limit=3)
310306

311307
self.assertEqual(len(result), 3)
312308
# Must have checked 5 items: 2 missing + 3 valid

0 commit comments

Comments
 (0)