Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 25 additions & 0 deletions e2e/src/pages/UserPreferencesExtensionPage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,29 @@ export class UserPreferencesExtensionPage extends HostPanelExtensionPage {
await expect(iframe.locator(`text=${config.falconUsername}`)).toBeVisible({ timeout: 10000 });
await expect(iframe.getByRole('button', { name: /Save Preferences/i })).toBeVisible({ timeout: 10000 });
}

/**
* Interact with the extension to save a preference, exercising the collection write path.
* Assumes the extension is already expanded and iframe is visible (call verifyExtensionRenders first).
*/
async savePreference(): Promise<void> {
return this.withTiming(
async () => {
const iframe = this.page.frameLocator('iframe');

// Wait for Save button to confirm form is loaded
await expect(iframe.getByRole('button', { name: /Save Preferences/i })).toBeVisible({ timeout: 10000 });
this.logger.info('Extension form loaded');

// Click Save Preferences to write to the collection
await iframe.getByRole('button', { name: /Save Preferences/i }).click();
this.logger.info('Clicked Save Preferences');

// Verify success alert appears
await expect(iframe.locator('text=/Preferences saved successfully/i')).toBeVisible({ timeout: 10000 });
this.logger.success('Preferences saved successfully - collection write verified');
},
'Save preference via extension'
);
}
}
77 changes: 77 additions & 0 deletions e2e/src/pages/WorkflowsPage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -245,4 +245,81 @@ export class WorkflowsPage extends BasePage {
`Execute and verify workflow: ${workflowName}`
);
}

/**
* Check the actual execution status by viewing the execution details.
* Navigates to the execution log, waits for the execution to complete,
* expands the execution row, and checks the status.
*/
async verifyWorkflowExecutionCompleted(timeoutMs = 120000): Promise<void> {
return this.withTiming(
async () => {
this.logger.info('Checking workflow execution status in detail view');

// The "View" link opens in a new tab - capture it
const viewLink = this.page.getByRole('link', { name: /^view$/i });
await viewLink.waitFor({ state: 'visible', timeout: 10000 });

const [executionPage] = await Promise.all([
this.page.context().waitForEvent('page'),
viewLink.click(),
]);

// Wait for the new tab to load (execution pages can be slow to render)
await executionPage.waitForLoadState('networkidle');
await executionPage.waitForLoadState('domcontentloaded');
this.logger.info('Execution page opened in new tab');

// Wait for "Execution status" to appear (proves execution details loaded)
const statusLabel = executionPage.getByText('Execution status');
await statusLabel.waitFor({ state: 'visible', timeout: 60000 });
this.logger.info('Execution details visible');

// Poll until execution reaches a terminal state
this.logger.info(`Waiting up to ${timeoutMs / 1000}s for execution to complete...`);

const startTime = Date.now();
while (Date.now() - startTime < timeoutMs) {
// Re-find status label each iteration (DOM recreated on reload)
const currentStatusLabel = executionPage.getByText('Execution status');
await currentStatusLabel.waitFor({ state: 'visible', timeout: 15000 });
const statusContainer = currentStatusLabel.locator('..');
const statusText = await statusContainer.textContent() || '';
const currentStatus = statusText.replace('Execution status', '').trim();
this.logger.info(`Current status: ${currentStatus}`);

if (currentStatus.toLowerCase().includes('failed')) {
// Capture error details
const pageContent = await executionPage.textContent('body') || '';
const messageMatch = pageContent.match(/"message":\s*"([^"]+)"/);

let errorMessage = 'Workflow action failed';
if (messageMatch) {
errorMessage = messageMatch[1];
}

await executionPage.close();
this.logger.error(`Workflow execution failed: ${errorMessage}`);
throw new Error(`Workflow execution failed: ${errorMessage}`);
}

if (!currentStatus.toLowerCase().includes('in progress')) {
// Terminal state that isn't "Failed"
await executionPage.close();
this.logger.success(`Workflow execution completed with status: ${currentStatus}`);
return;
}

await executionPage.waitForTimeout(5000);

// Reload to get updated status - the page doesn't auto-refresh
await executionPage.reload({ waitUntil: 'networkidle' });
}

await executionPage.close();
throw new Error('Workflow execution timed out - still in progress');
},
'Verify workflow execution completed'
);
}
}
16 changes: 15 additions & 1 deletion e2e/tests/foundry.spec.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { test, expect } from '../src/fixtures';
import { test } from '../src/fixtures';

test.describe.configure({ mode: 'serial' });

Expand All @@ -9,29 +9,43 @@ test.describe('Collections Toolkit - E2E Tests', () => {
await userPreferencesExtensionPage.verifyExtensionRenders();
});

test('should save preferences via User Preferences extension', async ({ userPreferencesExtensionPage }) => {
await userPreferencesExtensionPage.navigateToExtension();
await userPreferencesExtensionPage.verifyExtensionRenders();
await userPreferencesExtensionPage.savePreference();
});

test('should render Collections CRUD extension', async ({ collectionsCRUDExtensionPage }) => {
await collectionsCRUDExtensionPage.navigateToExtension();
await collectionsCRUDExtensionPage.verifyExtensionRenders();
});

// Workflow Tests - All workflows are self-contained (no external API credentials required)
test('should execute Test log_event_handler function workflow', async ({ workflowsPage }) => {
test.setTimeout(180000);
await workflowsPage.navigateToWorkflows();
await workflowsPage.executeAndVerifyWorkflow('Test log_event_handler function');
await workflowsPage.verifyWorkflowExecutionCompleted();
});

test('should execute Test process_events_handler function workflow', async ({ workflowsPage }) => {
test.setTimeout(180000);
await workflowsPage.navigateToWorkflows();
await workflowsPage.executeAndVerifyWorkflow('Test process_events_handler function');
await workflowsPage.verifyWorkflowExecutionCompleted();
});

test('should execute Test user_preferences collection workflow', async ({ workflowsPage }) => {
test.setTimeout(180000);
await workflowsPage.navigateToWorkflows();
await workflowsPage.executeAndVerifyWorkflow('Test user_preferences collection');
await workflowsPage.verifyWorkflowExecutionCompleted();
});

test('should execute Paginate security_events collection workflow', async ({ workflowsPage }) => {
test.setTimeout(180000);
await workflowsPage.navigateToWorkflows();
await workflowsPage.executeAndVerifyWorkflow('Paginate security_events collection');
await workflowsPage.verifyWorkflowExecutionCompleted();
});
});
38 changes: 16 additions & 22 deletions functions/csv-import/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

import pandas as pd
from crowdstrike.foundry.function import Function, Request, Response, APIError
from falconpy import APIHarnessV2
from falconpy import CustomStorage

FUNC = Function.instance()

Expand Down Expand Up @@ -63,9 +63,8 @@ def import_csv_handler(request: Request, config: Dict[str, object] | None, logge

def _process_import_request(request: Request, collection_name: str, logger: Logger) -> Response:
"""Process the import request and return response."""
# Initialize API client and headers
api_client = APIHarnessV2()
headers = _get_headers()
# Initialize custom storage with app headers baked in
custom_storage = CustomStorage(ext_headers=_app_headers())

# Read CSV data
csv_data_result = _read_csv_data(request, logger)
Expand All @@ -77,7 +76,7 @@ def _process_import_request(request: Request, collection_name: str, logger: Logg
transformed_records = _process_dataframe(df, source_filename, import_timestamp)

# Import records to Collection with batch processing
import_results = batch_import_records(api_client, transformed_records, collection_name, headers)
import_results = batch_import_records(custom_storage, transformed_records, collection_name)

return _create_success_response({
"df": df,
Expand All @@ -89,12 +88,12 @@ def _process_import_request(request: Request, collection_name: str, logger: Logg
})


def _get_headers() -> Dict[str, str]:
"""Get headers for API requests."""
headers = {}
if os.environ.get("APP_ID"):
headers = {"X-CS-APP-ID": os.environ.get("APP_ID")}
return headers
def _app_headers() -> Dict[str, str]:
"""Build app headers for CustomStorage construction."""
app_id = os.environ.get("APP_ID")
if app_id:
return {"X-CS-APP-ID": app_id}
return {}


def _read_csv_data(request: Request, logger: Logger) -> Dict[str, Any]:
Expand Down Expand Up @@ -222,10 +221,9 @@ def validate_record(record: Dict[str, Any]) -> None:


def batch_import_records(
api_client: APIHarnessV2,
custom_storage: CustomStorage,
records: List[Dict[str, Any]],
collection_name: str,
headers: Dict[str, str],
batch_size: int = 50
) -> Dict[str, int]:
"""Import records to Collection in batches with rate limiting."""
Expand All @@ -241,10 +239,9 @@ def batch_import_records(
time.sleep(0.5)

batch_context = {
"api_client": api_client,
"custom_storage": custom_storage,
"batch": batch,
"collection_name": collection_name,
"headers": headers,
"batch_number": i // batch_size + 1
}

Expand All @@ -260,22 +257,19 @@ def batch_import_records(

def _process_batch(batch_context: Dict[str, Any]) -> Dict[str, int]:
"""Process a single batch of records."""
api_client = batch_context["api_client"]
custom_storage = batch_context["custom_storage"]
batch = batch_context["batch"]
collection_name = batch_context["collection_name"]
headers = batch_context["headers"]
batch_number = batch_context["batch_number"]

success_count = 0
error_count = 0

for record in batch:
try:
response = api_client.command("PutObject",
body=record,
collection_name=collection_name,
object_key=record["event_id"],
headers=headers)
response = custom_storage.PutObject(body=record,
collection_name=collection_name,
object_key=record["event_id"])

if response["status_code"] == 200:
success_count += 1
Expand Down
37 changes: 16 additions & 21 deletions functions/log-event/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,19 @@
import uuid

from crowdstrike.foundry.function import Function, Request, Response, APIError
from falconpy import APIHarnessV2
from falconpy import CustomStorage

FUNC = Function.instance()


def _app_headers() -> dict:
"""Build app headers for CustomStorage construction."""
app_id = os.environ.get("APP_ID")
if app_id:
return {"X-CS-APP-ID": app_id}
return {}


@FUNC.handler(method="POST", path="/log-event")
def on_post(request: Request) -> Response:
"""
Expand Down Expand Up @@ -40,22 +48,12 @@ def on_post(request: Request) -> Response:
"timestamp": int(time.time())
}

# Allow setting APP_ID as an env variable for local testing
headers = {}
if os.environ.get("APP_ID"):
headers = {
"X-CS-APP-ID": os.environ.get("APP_ID")
}

api_client = APIHarnessV2()
custom_storage = CustomStorage(ext_headers=_app_headers())
collection_name = "event_logs"

response = api_client.command("PutObject",
body=json_data,
collection_name=collection_name,
object_key=event_id,
headers=headers
)
response = custom_storage.PutObject(body=json_data,
collection_name=collection_name,
object_key=event_id)

if response["status_code"] != 200:
error_message = response.get("error", {}).get("message", "Unknown error")
Expand All @@ -68,12 +66,9 @@ def on_post(request: Request) -> Response:
)

# Query the collection to retrieve the event by id
query_response = api_client.command("SearchObjects",
filter=f"event_id:'{event_id}'",
collection_name=collection_name,
limit=5,
headers=headers
)
query_response = custom_storage.SearchObjects(filter=f"event_id:'{event_id}'",
collection_name=collection_name,
limit=5)

return Response(
body={
Expand Down
Loading
Loading