Skip to content

Commit 427f953

Browse files
committed
Rename api_client to custom_storage for consistency across Foundry samples
1 parent 6e4ee4c commit 427f953

3 files changed

Lines changed: 19 additions & 19 deletions

File tree

functions/csv-import/main.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -63,8 +63,8 @@ def import_csv_handler(request: Request, config: Dict[str, object] | None, logge
6363

6464
def _process_import_request(request: Request, collection_name: str, logger: Logger) -> Response:
6565
"""Process the import request and return response."""
66-
# Initialize API client with app headers baked in
67-
api_client = CustomStorage(ext_headers=_app_headers())
66+
# Initialize custom storage with app headers baked in
67+
custom_storage = CustomStorage(ext_headers=_app_headers())
6868

6969
# Read CSV data
7070
csv_data_result = _read_csv_data(request, logger)
@@ -76,7 +76,7 @@ def _process_import_request(request: Request, collection_name: str, logger: Logg
7676
transformed_records = _process_dataframe(df, source_filename, import_timestamp)
7777

7878
# Import records to Collection with batch processing
79-
import_results = batch_import_records(api_client, transformed_records, collection_name)
79+
import_results = batch_import_records(custom_storage, transformed_records, collection_name)
8080

8181
return _create_success_response({
8282
"df": df,
@@ -221,7 +221,7 @@ def validate_record(record: Dict[str, Any]) -> None:
221221

222222

223223
def batch_import_records(
224-
api_client: CustomStorage,
224+
custom_storage: CustomStorage,
225225
records: List[Dict[str, Any]],
226226
collection_name: str,
227227
batch_size: int = 50
@@ -239,7 +239,7 @@ def batch_import_records(
239239
time.sleep(0.5)
240240

241241
batch_context = {
242-
"api_client": api_client,
242+
"custom_storage": custom_storage,
243243
"batch": batch,
244244
"collection_name": collection_name,
245245
"batch_number": i // batch_size + 1
@@ -257,7 +257,7 @@ def batch_import_records(
257257

258258
def _process_batch(batch_context: Dict[str, Any]) -> Dict[str, int]:
259259
"""Process a single batch of records."""
260-
api_client = batch_context["api_client"]
260+
custom_storage = batch_context["custom_storage"]
261261
batch = batch_context["batch"]
262262
collection_name = batch_context["collection_name"]
263263
batch_number = batch_context["batch_number"]
@@ -267,7 +267,7 @@ def _process_batch(batch_context: Dict[str, Any]) -> Dict[str, int]:
267267

268268
for record in batch:
269269
try:
270-
response = api_client.PutObject(body=record,
270+
response = custom_storage.PutObject(body=record,
271271
collection_name=collection_name,
272272
object_key=record["event_id"])
273273

functions/log-event/main.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -48,10 +48,10 @@ def on_post(request: Request) -> Response:
4848
"timestamp": int(time.time())
4949
}
5050

51-
api_client = CustomStorage(ext_headers=_app_headers())
51+
custom_storage = CustomStorage(ext_headers=_app_headers())
5252
collection_name = "event_logs"
5353

54-
response = api_client.PutObject(body=json_data,
54+
response = custom_storage.PutObject(body=json_data,
5555
collection_name=collection_name,
5656
object_key=event_id)
5757

@@ -66,7 +66,7 @@ def on_post(request: Request) -> Response:
6666
)
6767

6868
# Query the collection to retrieve the event by id
69-
query_response = api_client.SearchObjects(filter=f"event_id:'{event_id}'",
69+
query_response = custom_storage.SearchObjects(filter=f"event_id:'{event_id}'",
7070
collection_name=collection_name,
7171
limit=5)
7272

functions/process-events/main.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def process_events_handler(request: Request, config: Dict[str, object] | None, l
2424
_ = config
2525

2626
try:
27-
# Initialize API client and workflow
27+
# Initialize custom storage and workflow
2828
workflow_context = _initialize_workflow(request, logger)
2929

3030
# Get checkpoint data
@@ -51,16 +51,16 @@ def process_events_handler(request: Request, config: Dict[str, object] | None, l
5151

5252

5353
def _initialize_workflow(request: Request, logger: Logger) -> Dict[str, Any]:
54-
"""Initialize workflow context with API client and configuration."""
55-
api_client = CustomStorage(ext_headers=_app_headers())
54+
"""Initialize workflow context with custom storage and configuration."""
55+
custom_storage = CustomStorage(ext_headers=_app_headers())
5656

5757
checkpoint_collection = "processing_checkpoints"
5858
workflow_id = request.body.get("workflow_id", "default")
5959

6060
logger.info(f"Processing workflow ID: {workflow_id}")
6161

6262
return {
63-
"api_client": api_client,
63+
"custom_storage": custom_storage,
6464
"checkpoint_collection": checkpoint_collection,
6565
"workflow_id": workflow_id,
6666
"logger": logger
@@ -77,13 +77,13 @@ def _app_headers() -> Dict[str, str]:
7777

7878
def _get_checkpoint(workflow_context: Dict[str, Any]) -> Dict[str, Any]:
7979
"""Retrieve the last checkpoint for the workflow."""
80-
api_client = workflow_context["api_client"]
80+
custom_storage = workflow_context["custom_storage"]
8181
checkpoint_collection = workflow_context["checkpoint_collection"]
8282
workflow_id = workflow_context["workflow_id"]
8383
logger = workflow_context["logger"]
8484

8585
# Retrieve the most recent checkpoint for this workflow
86-
checkpoint_response = api_client.SearchObjects(filter=f"workflow_id:'{workflow_id}'",
86+
checkpoint_response = custom_storage.SearchObjects(filter=f"workflow_id:'{workflow_id}'",
8787
collection_name=checkpoint_collection,
8888
sort="last_processed_timestamp.desc",
8989
limit=1)
@@ -97,7 +97,7 @@ def _get_checkpoint(workflow_context: Dict[str, Any]) -> Dict[str, Any]:
9797
logger.debug(f"last_checkpoint: {last_checkpoint}")
9898

9999
# SearchObjects returns metadata, not actual objects, so use GetObject for details
100-
object_details = api_client.GetObject(collection_name=checkpoint_collection,
100+
object_details = custom_storage.GetObject(collection_name=checkpoint_collection,
101101
object_key=last_checkpoint["object_key"])
102102

103103
# GetObject returns bytes; convert to JSON
@@ -113,7 +113,7 @@ def _get_checkpoint(workflow_context: Dict[str, Any]) -> Dict[str, Any]:
113113

114114
def _process_and_update(workflow_context: Dict[str, Any], checkpoint_data: Dict[str, Any]) -> Response:
115115
"""Process events and update checkpoint."""
116-
api_client = workflow_context["api_client"]
116+
custom_storage = workflow_context["custom_storage"]
117117
checkpoint_collection = workflow_context["checkpoint_collection"]
118118
workflow_id = workflow_context["workflow_id"]
119119
logger = workflow_context["logger"]
@@ -141,7 +141,7 @@ def _process_and_update(workflow_context: Dict[str, Any], checkpoint_data: Dict[
141141

142142
logger.debug(f"Sending data to PutObject: {checkpoint_update}")
143143

144-
api_client.PutObject(body=checkpoint_update,
144+
custom_storage.PutObject(body=checkpoint_update,
145145
collection_name=checkpoint_collection,
146146
object_key=f"checkpoint_{workflow_id}")
147147

0 commit comments

Comments
 (0)