From f17e081a3a0ff122997eba82ed9c19cc82f7bd8d Mon Sep 17 00:00:00 2001
From: pballai
Date: Wed, 27 Aug 2025 21:31:23 -0400
Subject: [PATCH] Update Recipe Portal - Complete QuickStarts API Toolkit
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
- Advanced API Explorer with smart parameter detection
- Token management and secure key storage
- Comprehensive recipe collection (members, teams, workbooks, connections, embedding)
- Download and export functionality
- Complete authentication system
- Request analyzer and smart parameter forms
This replaces the previous incomplete version with the full-featured toolkit.
š¤ Generated with [Claude Code](https://claude.ai/code)
Co-Authored-By: Claude
---
recipe-portal/.gitignore | 39 +-
recipe-portal/README.md | 129 ++
recipe-portal/app/api/call/route.ts | 141 ++
recipe-portal/app/api/code/route.ts | 6 +-
.../app/api/download-stream/route.ts | 695 +++++++++
recipe-portal/app/api/download/route.ts | 408 ++++++
recipe-portal/app/api/env/route.ts | 2 +-
recipe-portal/app/api/execute/route.ts | 62 +-
recipe-portal/app/api/keys/route.ts | 148 ++
recipe-portal/app/api/open-folder/route.ts | 43 +
recipe-portal/app/api/readme/route.ts | 125 +-
recipe-portal/app/api/resources/route.ts | 281 ++++
recipe-portal/app/api/token/clear/route.ts | 65 +
recipe-portal/app/api/token/route.ts | 87 ++
recipe-portal/app/layout.tsx | 9 +-
recipe-portal/app/page.tsx | 255 +++-
recipe-portal/app/test/page.tsx | 8 +
recipe-portal/bug.png | Bin 0 -> 121989 bytes
recipe-portal/components/CodeViewer.tsx | 1285 ++++++++++++++---
recipe-portal/components/QuickApiExplorer.tsx | 287 ++++
recipe-portal/components/QuickApiModal.tsx | 259 ++++
recipe-portal/components/RecipeCard.tsx | 81 +-
.../components/SmartParameterForm.tsx | 455 ++++++
recipe-portal/lib/keyStorage.ts | 271 ++++
recipe-portal/lib/recipeScanner.ts | 28 +-
recipe-portal/lib/requestAnalyzer.ts | 224 +++
recipe-portal/lib/smartParameters.ts | 488 +++++++
recipe-portal/public/crane.png | Bin 0 -> 11810 bytes
recipe-portal/recipes/.member-emails | 3 +
.../connections/list_all_connections.js | 64 +
.../connections/list_all_connections.md | 22 +
.../recipes/connections/sync_schema.js | 176 +++
.../recipes/connections/sync_schema.md | 27 +
.../embedding/generate_workbook_embed_path.js | 92 ++
.../embedding/generate_workbook_embed_path.md | 25 +
recipe-portal/recipes/get-access-token.js | 60 +
recipe-portal/recipes/launch.json | 13 +
.../recipes/members/bulk-create-members.js | 183 +++
.../recipes/members/bulk-create-members.md | 73 +
.../recipes/members/bulk-deactivate.js | 115 ++
.../recipes/members/bulk-deactivate.md | 25 +
.../members/create-connection-permission.js | 52 +
.../members/create-connection-permission.md | 24 +
recipe-portal/recipes/members/create-new.js | 112 ++
recipe-portal/recipes/members/create-new.md | 28 +
.../members/create-workspace-permission.js | 94 ++
.../members/create-workspace-permission.md | 28 +
.../recipes/members/create-workspace.js | 100 ++
.../recipes/members/create-workspace.md | 24 +
.../recipes/members/deactivate-existing.js | 37 +
.../recipes/members/deactivate-existing.md | 24 +
.../recipes/members/get-member-details.js | 58 +
.../recipes/members/get-member-details.md | 25 +
recipe-portal/recipes/members/list-all.js | 133 ++
recipe-portal/recipes/members/list-all.md | 67 +
.../recipes/members/master-script.js | 145 ++
.../recipes/members/master-script.md | 24 +
.../recipes/members/recent-workbooks.js | 78 +
.../recipes/members/recent-workbooks.md | 24 +
recipe-portal/recipes/members/update.js | 52 +
recipe-portal/recipes/members/update.md | 24 +
recipe-portal/recipes/members_output.json | 92 ++
recipe-portal/recipes/package-lock.json | 304 ++++
recipe-portal/recipes/package.json | 10 +
.../recipes/teams/add-member-to-team.js | 44 +
.../recipes/teams/add-member-to-team.md | 24 +
.../recipes/teams/bulk-assign-team.js | 77 +
.../recipes/teams/bulk-assign-team.md | 34 +
.../recipes/teams/bulk-remove-team.js | 81 ++
.../recipes/teams/bulk-remove-team.md | 34 +
.../PlugsSalesPerformanceDashboard.pdf.temp | Bin 0 -> 61867 bytes
.../recipes/workbooks/all-input-tables.js | 84 ++
.../recipes/workbooks/all-input-tables.md | 26 +
.../recipes/workbooks/copy-workbook-folder.js | 104 ++
.../recipes/workbooks/copy-workbook-folder.md | 26 +
.../workbooks/export-workbook-element-csv.js | 291 ++++
.../workbooks/export-workbook-element-csv.md | 62 +
.../recipes/workbooks/export-workbook-pdf.js | 180 +++
.../recipes/workbooks/export-workbook-pdf.md | 51 +
.../workbooks/get-column-names-all-pages | 125 ++
.../workbooks/get-workbooks-name-url-TABLE.js | 69 +
.../workbooks/get-workbooks-name-url-TABLE.md | 25 +
.../workbooks/initiate-materialization.js | 137 ++
.../workbooks/initiate-materialization.md | 35 +
recipe-portal/recipes/workbooks/list-all.js | 121 ++
recipe-portal/recipes/workbooks/list-all.md | 52 +
.../workbooks/list-workbooks-by-owner.js | 71 +
.../workbooks/list-workbooks-by-owner.md | 30 +
recipe-portal/recipes/workbooks/pagination.js | 98 ++
recipe-portal/recipes/workbooks/pagination.md | 39 +
.../recipes/workbooks/shared-with-memberId.js | 76 +
.../recipes/workbooks/shared-with-memberId.md | 25 +
recipe-portal/recipes/workbooks/test-export | 25 +
.../recipes/workbooks/test-export.pdf | Bin 0 -> 61867 bytes
recipe-portal/recipes/workbooks/test.temp | 135 ++
.../recipes/workbooks/update-owner.js | 44 +
.../recipes/workbooks/update-owner.md | 25 +
97 files changed, 10186 insertions(+), 382 deletions(-)
create mode 100644 recipe-portal/README.md
create mode 100644 recipe-portal/app/api/call/route.ts
create mode 100644 recipe-portal/app/api/download-stream/route.ts
create mode 100644 recipe-portal/app/api/download/route.ts
create mode 100644 recipe-portal/app/api/keys/route.ts
create mode 100644 recipe-portal/app/api/open-folder/route.ts
create mode 100644 recipe-portal/app/api/resources/route.ts
create mode 100644 recipe-portal/app/api/token/clear/route.ts
create mode 100644 recipe-portal/app/api/token/route.ts
create mode 100644 recipe-portal/app/test/page.tsx
create mode 100644 recipe-portal/bug.png
create mode 100644 recipe-portal/components/QuickApiExplorer.tsx
create mode 100644 recipe-portal/components/QuickApiModal.tsx
create mode 100644 recipe-portal/components/SmartParameterForm.tsx
create mode 100644 recipe-portal/lib/keyStorage.ts
create mode 100644 recipe-portal/lib/requestAnalyzer.ts
create mode 100644 recipe-portal/lib/smartParameters.ts
create mode 100644 recipe-portal/public/crane.png
create mode 100644 recipe-portal/recipes/.member-emails
create mode 100644 recipe-portal/recipes/connections/list_all_connections.js
create mode 100644 recipe-portal/recipes/connections/list_all_connections.md
create mode 100644 recipe-portal/recipes/connections/sync_schema.js
create mode 100644 recipe-portal/recipes/connections/sync_schema.md
create mode 100644 recipe-portal/recipes/embedding/generate_workbook_embed_path.js
create mode 100644 recipe-portal/recipes/embedding/generate_workbook_embed_path.md
create mode 100644 recipe-portal/recipes/get-access-token.js
create mode 100644 recipe-portal/recipes/launch.json
create mode 100644 recipe-portal/recipes/members/bulk-create-members.js
create mode 100644 recipe-portal/recipes/members/bulk-create-members.md
create mode 100644 recipe-portal/recipes/members/bulk-deactivate.js
create mode 100644 recipe-portal/recipes/members/bulk-deactivate.md
create mode 100644 recipe-portal/recipes/members/create-connection-permission.js
create mode 100644 recipe-portal/recipes/members/create-connection-permission.md
create mode 100644 recipe-portal/recipes/members/create-new.js
create mode 100644 recipe-portal/recipes/members/create-new.md
create mode 100644 recipe-portal/recipes/members/create-workspace-permission.js
create mode 100644 recipe-portal/recipes/members/create-workspace-permission.md
create mode 100644 recipe-portal/recipes/members/create-workspace.js
create mode 100644 recipe-portal/recipes/members/create-workspace.md
create mode 100644 recipe-portal/recipes/members/deactivate-existing.js
create mode 100644 recipe-portal/recipes/members/deactivate-existing.md
create mode 100644 recipe-portal/recipes/members/get-member-details.js
create mode 100644 recipe-portal/recipes/members/get-member-details.md
create mode 100644 recipe-portal/recipes/members/list-all.js
create mode 100644 recipe-portal/recipes/members/list-all.md
create mode 100644 recipe-portal/recipes/members/master-script.js
create mode 100644 recipe-portal/recipes/members/master-script.md
create mode 100644 recipe-portal/recipes/members/recent-workbooks.js
create mode 100644 recipe-portal/recipes/members/recent-workbooks.md
create mode 100644 recipe-portal/recipes/members/update.js
create mode 100644 recipe-portal/recipes/members/update.md
create mode 100644 recipe-portal/recipes/members_output.json
create mode 100644 recipe-portal/recipes/package-lock.json
create mode 100644 recipe-portal/recipes/package.json
create mode 100644 recipe-portal/recipes/teams/add-member-to-team.js
create mode 100644 recipe-portal/recipes/teams/add-member-to-team.md
create mode 100644 recipe-portal/recipes/teams/bulk-assign-team.js
create mode 100644 recipe-portal/recipes/teams/bulk-assign-team.md
create mode 100644 recipe-portal/recipes/teams/bulk-remove-team.js
create mode 100644 recipe-portal/recipes/teams/bulk-remove-team.md
create mode 100644 recipe-portal/recipes/workbooks/PlugsSalesPerformanceDashboard.pdf.temp
create mode 100644 recipe-portal/recipes/workbooks/all-input-tables.js
create mode 100644 recipe-portal/recipes/workbooks/all-input-tables.md
create mode 100644 recipe-portal/recipes/workbooks/copy-workbook-folder.js
create mode 100644 recipe-portal/recipes/workbooks/copy-workbook-folder.md
create mode 100644 recipe-portal/recipes/workbooks/export-workbook-element-csv.js
create mode 100644 recipe-portal/recipes/workbooks/export-workbook-element-csv.md
create mode 100644 recipe-portal/recipes/workbooks/export-workbook-pdf.js
create mode 100644 recipe-portal/recipes/workbooks/export-workbook-pdf.md
create mode 100644 recipe-portal/recipes/workbooks/get-column-names-all-pages
create mode 100644 recipe-portal/recipes/workbooks/get-workbooks-name-url-TABLE.js
create mode 100644 recipe-portal/recipes/workbooks/get-workbooks-name-url-TABLE.md
create mode 100644 recipe-portal/recipes/workbooks/initiate-materialization.js
create mode 100644 recipe-portal/recipes/workbooks/initiate-materialization.md
create mode 100644 recipe-portal/recipes/workbooks/list-all.js
create mode 100644 recipe-portal/recipes/workbooks/list-all.md
create mode 100644 recipe-portal/recipes/workbooks/list-workbooks-by-owner.js
create mode 100644 recipe-portal/recipes/workbooks/list-workbooks-by-owner.md
create mode 100644 recipe-portal/recipes/workbooks/pagination.js
create mode 100644 recipe-portal/recipes/workbooks/pagination.md
create mode 100644 recipe-portal/recipes/workbooks/shared-with-memberId.js
create mode 100644 recipe-portal/recipes/workbooks/shared-with-memberId.md
create mode 100644 recipe-portal/recipes/workbooks/test-export
create mode 100644 recipe-portal/recipes/workbooks/test-export.pdf
create mode 100644 recipe-portal/recipes/workbooks/test.temp
create mode 100644 recipe-portal/recipes/workbooks/update-owner.js
create mode 100644 recipe-portal/recipes/workbooks/update-owner.md
diff --git a/recipe-portal/.gitignore b/recipe-portal/.gitignore
index 8d8b4022..e659078e 100644
--- a/recipe-portal/.gitignore
+++ b/recipe-portal/.gitignore
@@ -1,35 +1,42 @@
-# Dependencies
-node_modules/
+# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
+
+# dependencies
+/node_modules
/.pnp
.pnp.js
-# Testing
+# testing
/coverage
-# Next.js
+# next.js
/.next/
/out/
-# Production
+# production
/build
-# Misc
+# misc
.DS_Store
-*.tsbuildinfo
-next-env.d.ts
+*.pem
-# Debug
+# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
-# Local env files
+# local env files
.env*.local
-# IDE
-.vscode/
-.idea/
+# vercel
+.vercel
-# OS
-.DS_Store
-Thumbs.db
\ No newline at end of file
+# typescript
+*.tsbuildinfo
+next-env.d.ts
+
+# Sigma API encrypted credentials (security)
+.sigma-portal/
+sigma-portal-keys.json
+
+# Environment files
+.env
\ No newline at end of file
diff --git a/recipe-portal/README.md b/recipe-portal/README.md
new file mode 100644
index 00000000..ce05f86e
--- /dev/null
+++ b/recipe-portal/README.md
@@ -0,0 +1,129 @@
+# QuickStarts API Toolkit
+Experiment with Sigma API calls and learn common request flows
+
+## Features
+
+### Recipes:
+- **Smart Parameter Detection**: Automatically detects and provides dropdown selection for Sigma resources (teams, members, workbooks, etc.)
+- **Interactive Execution**: Run recipes directly in the browser with real-time results
+- **Parameter Summary**: View which parameters were used in each request
+- **Code Viewing**: Browse the actual JavaScript code for each recipe
+
+### Quick API Explorer:
+- **Common Endpoints**: Curated list of the most useful Sigma API endpoints
+- **Zero Setup**: List endpoints require no parameters - perfect for quick exploration
+- **One Parameter**: Detail endpoints need just one ID to get specific resource information
+- **Alphabetical Organization**: Easy to find the endpoint you need
+
+## Authentication & Config Management
+
+### Smart Config System:
+- **Complete Configuration Storage**: Server endpoints + API credentials stored together as named "configs"
+- **Multi-Environment Support**: Easily switch between Production, Staging, Development environments
+- **One-Click Environment Switching**: Load complete configurations instantly
+- **Encrypted Local Storage**: AES-256 encryption for credential security
+
+### Config Management Features:
+- **Quick Start**: Load saved configs with one click - no manual entry needed
+- **Create New Configs**: Mix and match server endpoints with credentials
+- **Update Existing Configs**: Modify and save changes to existing configurations
+- **Delete Configs**: Remove configs you no longer need
+- **Auto-Save**: Configs saved automatically during authentication when enabled
+- **Manual Save**: Explicit save button for immediate config storage
+
+### Token Management:
+- **File-Based Storage**: Authentication tokens cached in system temp directory
+- **Persistent Sessions**: Tokens survive browser/server restarts for the full hour
+- **Automatic Expiration**: Tokens expire after 1 hour (Sigma's standard lifetime)
+- **Auto-Cleanup**: Expired tokens automatically detected and removed
+- **Manual Session End**: Clear authentication anytime with šŖ End Session button
+
+### Storage Locations
+
+**Config Storage (encrypted)**:
+- **macOS**: `~/Library/Application Support/.sigma-portal/encrypted-keys.json`
+- **Windows**: `%APPDATA%\.sigma-portal\encrypted-keys.json`
+- **Linux**: `~/.config/.sigma-portal/encrypted-keys.json`
+
+**Token Cache (temporary)**:
+- **macOS**: `/var/folders/.../sigma-portal-token.json`
+- **Windows**: `%TEMP%\sigma-portal-token.json`
+- **Linux**: `/tmp/sigma-portal-token.json`
+
+### Developer Experience Benefits
+- **Environment Switching**: Instant switch between Production ā Staging ā Development
+- **Zero Re-entry**: Load complete configs without typing credentials repeatedly
+- **Secure Storage**: Military-grade AES-256 encryption for stored credentials
+- **Clean Separation**: Configs stored outside project directory (never committed to git)
+- **Visual Feedback**: Clear indicators show saved/unsaved state and notifications
+- **Flexible Workflow**: Session-only credentials OR persistent named configs
+
+### Config Workflow
+1. **First Time**: Enter server endpoint + credentials ā Save as named config (e.g., "Production")
+2. **Daily Use**: Quick Start ā Select "Production" ā Instantly loaded and ready
+3. **Environment Switch**: Quick Start ā Select "Staging" ā Switched in one click
+4. **New Environment**: "⨠New Config" ā Enter details ā Save with new name
+
+## Getting Started
+Sigma_QuickStart_Public_Repo
+
+
+1. **Setup**: `npm install && npm run dev`
+2. **First-Time Config**: Open any recipe ā **Config** tab ā Enter server endpoint + credentials ā Save as named config
+3. **Daily Use**: **Quick Start** section ā Select your saved config ā Ready to go!
+4. **Explore**: Use the ā” Quick API tab to explore common endpoints with smart parameters
+5. **Run Recipes**: Browse recipes by category and execute them with real-time results
+
+### Config Tab Features
+- **Quick Start**: Load saved configs instantly (appears when configs exist)
+- **Server Endpoint**: Choose your Sigma organization's server location
+- **API Credentials**: Enter Client ID and Client Secret
+- **Config Storage**: Save complete configurations with names like "Production", "Staging"
+- **Save Config**: Manual save button for immediate storage
+- **New Config**: Clear form to create fresh configurations
+- **Delete**: Remove configs you no longer need (šļø button when config selected)
+
+## Requirements
+- Node.js 18+
+- Sigma API credentials (Client ID and Secret)
+- Valid Sigma organization access
+
+## Development
+```bash
+npm install
+npm run dev
+```
+
+Navigate to `http://localhost:3001` to start exploring the Sigma API.
+
+## Project Structure
+```
+recipe-portal/
+āāā app/ # Next.js app directory
+ā āāā api/ # API routes
+ā ā āāā execute/ # Recipe execution
+ā ā āāā resources/ # Resource fetching for dropdowns
+ā ā āāā keys/ # Config management (CRUD operations)
+ā ā āāā token/ # Token management & caching
+ā ā āāā call/ # Quick API endpoint calls
+āāā components/ # React components
+ā āāā QuickApiExplorer.tsx # Quick API exploration interface
+ā āāā QuickApiModal.tsx # API endpoint execution modal
+ā āāā SmartParameterForm.tsx # Smart parameter detection & forms
+ā āāā CodeViewer.tsx # Recipe viewer with Config tab
+ā āāā AuthRecipeCard.tsx # Authentication recipe card
+ā āāā RecipeCard.tsx # Standard recipe cards
+āāā lib/ # Utilities
+ā āāā smartParameters.ts # Parameter detection logic
+ā āāā keyStorage.ts # Encrypted config storage
+ā āāā recipeScanner.ts # Recipe discovery & analysis
+āāā recipes/ # Self-contained recipe files (copied from sigma-api-recipes)
+ āāā connections/ # Connection-related recipes
+ āāā members/ # Member management recipes
+ āāā teams/ # Team management recipes
+ āāā workbooks/ # Workbook operations
+ āāā embedding/ # Embedding examples
+ āāā get-access-token.js # Authentication helper
+```
+
+For setup instructions and API credential creation, visit the QuickStart: [Sigma REST API Recipes](https://quickstarts.sigmacomputing.com/guide/developers_api_code_samples/index.html?index=..%2F..index#0)
\ No newline at end of file
diff --git a/recipe-portal/app/api/call/route.ts b/recipe-portal/app/api/call/route.ts
new file mode 100644
index 00000000..ca483b8e
--- /dev/null
+++ b/recipe-portal/app/api/call/route.ts
@@ -0,0 +1,141 @@
+import { NextResponse } from 'next/server';
+import axios from 'axios';
+import fs from 'fs';
+import path from 'path';
+import os from 'os';
+
+const TOKEN_CACHE_FILE = path.join(os.tmpdir(), 'sigma-portal-token.json');
+
+function getCachedToken(): string | null {
+ try {
+ if (fs.existsSync(TOKEN_CACHE_FILE)) {
+ const tokenData = JSON.parse(fs.readFileSync(TOKEN_CACHE_FILE, 'utf8'));
+ const now = Date.now();
+
+ // Check if token is still valid (not expired)
+ if (tokenData.expiresAt && now < tokenData.expiresAt) {
+ return tokenData.token;
+ } else {
+ // Token expired, remove file
+ fs.unlinkSync(TOKEN_CACHE_FILE);
+ }
+ }
+ } catch (error) {
+ // Ignore errors, just return null
+ }
+ return null;
+}
+
+export async function POST(request: Request) {
+ try {
+ const { endpoint, method, parameters = {}, requestBody } = await request.json();
+
+ if (!endpoint) {
+ return NextResponse.json(
+ { error: 'Endpoint is required' },
+ { status: 400 }
+ );
+ }
+
+ // Get cached token
+ const token = getCachedToken();
+ if (!token) {
+ return NextResponse.json(
+ {
+ error: 'Authentication required',
+ message: 'No valid authentication token found. Please authenticate first.'
+ },
+ { status: 401 }
+ );
+ }
+
+ // Build the full URL
+ const baseURL = process.env.SIGMA_BASE_URL || 'https://aws-api.sigmacomputing.com/v2';
+ let url = `${baseURL}${endpoint}`;
+
+ // Add query parameters
+ if (parameters.query && Object.keys(parameters.query).length > 0) {
+ const queryParams = new URLSearchParams();
+ Object.entries(parameters.query).forEach(([key, value]) => {
+ if (value !== undefined && value !== '') {
+ queryParams.append(key, String(value));
+ }
+ });
+ if (queryParams.toString()) {
+ url += `?${queryParams.toString()}`;
+ }
+ }
+
+ // Prepare headers
+ const headers: Record = {
+ 'Authorization': `Bearer ${token}`,
+ 'Accept': 'application/json',
+ 'Content-Type': 'application/json'
+ };
+
+ // Add header parameters
+ if (parameters.header) {
+ Object.entries(parameters.header).forEach(([key, value]) => {
+ if (value !== undefined && value !== '') {
+ headers[key] = String(value);
+ }
+ });
+ }
+
+ // Make the API call
+ const response = await axios({
+ method: method.toLowerCase(),
+ url,
+ headers,
+ data: requestBody,
+ timeout: 30000 // 30 second timeout
+ });
+
+ // Return successful response
+ return NextResponse.json({
+ output: JSON.stringify(response.data, null, 2),
+ error: '',
+ success: true,
+ timestamp: new Date().toISOString(),
+ httpStatus: response.status,
+ httpStatusText: response.statusText,
+ requestUrl: url,
+ requestMethod: method
+ });
+
+ } catch (error: any) {
+ console.error('API call error:', error);
+
+ let errorMessage = 'Unknown error occurred';
+ let httpStatus = 500;
+ let httpStatusText = 'Internal Server Error';
+
+ if (axios.isAxiosError(error)) {
+ if (error.response) {
+ // Server responded with error status
+ httpStatus = error.response.status;
+ httpStatusText = error.response.statusText;
+ errorMessage = error.response.data?.message || error.response.data?.error || `HTTP ${httpStatus}: ${httpStatusText}`;
+ } else if (error.request) {
+ // Request made but no response
+ errorMessage = 'No response received from server';
+ httpStatus = 0;
+ httpStatusText = 'Network Error';
+ } else {
+ // Error setting up request
+ errorMessage = error.message;
+ }
+ } else {
+ errorMessage = error.message || 'Unknown error';
+ }
+
+ return NextResponse.json({
+ output: '',
+ error: errorMessage,
+ success: false,
+ timestamp: new Date().toISOString(),
+ httpStatus,
+ httpStatusText
+ });
+ }
+}
\ No newline at end of file
diff --git a/recipe-portal/app/api/code/route.ts b/recipe-portal/app/api/code/route.ts
index 3679adf8..68d68863 100644
--- a/recipe-portal/app/api/code/route.ts
+++ b/recipe-portal/app/api/code/route.ts
@@ -14,14 +14,14 @@ export async function GET(request: Request) {
);
}
- // Security check: ensure the file is within the sigma-api-recipes directory
- const recipesPath = path.join(process.cwd(), '..', 'sigma-api-recipes');
+ // Security check: ensure the file is within the recipes directory
+ const recipesPath = path.join(process.cwd(), 'recipes');
const resolvedPath = path.resolve(filePath);
const resolvedRecipesPath = path.resolve(recipesPath);
if (!resolvedPath.startsWith(resolvedRecipesPath)) {
return NextResponse.json(
- { error: 'Access denied: File must be within sigma-api-recipes directory' },
+ { error: 'Access denied: File must be within recipes directory' },
{ status: 403 }
);
}
diff --git a/recipe-portal/app/api/download-stream/route.ts b/recipe-portal/app/api/download-stream/route.ts
new file mode 100644
index 00000000..a8716590
--- /dev/null
+++ b/recipe-portal/app/api/download-stream/route.ts
@@ -0,0 +1,695 @@
+import { NextResponse } from 'next/server';
+import { spawn } from 'child_process';
+import fs from 'fs';
+import path from 'path';
+import os from 'os';
+
+export async function POST(request: Request) {
+ try {
+ // Log request details for debugging
+ console.log('Download stream request received from:', request.headers.get('referer'));
+ console.log('User agent:', request.headers.get('user-agent'));
+
+ // Check if request has a body
+ const body = await request.text();
+ if (!body || body.trim() === '') {
+ console.warn('Empty request body to download-stream endpoint');
+ return NextResponse.json(
+ { error: 'Request body is empty. This endpoint is only for file download scripts.' },
+ { status: 400 }
+ );
+ }
+
+ let parsedBody;
+ try {
+ parsedBody = JSON.parse(body);
+ } catch (parseError) {
+ return NextResponse.json(
+ { error: 'Invalid JSON in request body. This endpoint is only for file download scripts.' },
+ { status: 400 }
+ );
+ }
+
+ const { filePath, envVariables, filename, contentType } = parsedBody;
+
+ if (!filePath) {
+ return NextResponse.json(
+ { error: 'File path is required' },
+ { status: 400 }
+ );
+ }
+
+ // Security check: ensure the file is within the recipes directory
+ const recipesPath = path.join(process.cwd(), 'recipes');
+ const resolvedPath = path.resolve(filePath);
+ const resolvedRecipesPath = path.resolve(recipesPath);
+
+ if (!resolvedPath.startsWith(resolvedRecipesPath)) {
+ return NextResponse.json(
+ { error: 'Access denied: File must be within recipes directory' },
+ { status: 403 }
+ );
+ }
+
+ // Check if file exists
+ if (!fs.existsSync(resolvedPath)) {
+ return NextResponse.json(
+ { error: 'File not found' },
+ { status: 404 }
+ );
+ }
+
+ // Create a readable stream for server-sent events
+ const stream = new ReadableStream({
+ start(controller) {
+ executeDownloadWithProgress(resolvedPath, envVariables, controller);
+ }
+ });
+
+ return new Response(stream, {
+ headers: {
+ 'Content-Type': 'text/event-stream',
+ 'Cache-Control': 'no-cache',
+ 'Connection': 'keep-alive',
+ },
+ });
+
+ } catch (error) {
+ console.error('Error executing download stream:', error);
+ return NextResponse.json(
+ { error: 'Failed to start download stream' },
+ { status: 500 }
+ );
+ }
+}
+
+async function executeDownloadWithProgress(
+ scriptPath: string,
+ envVariables: Record,
+ controller: ReadableStreamDefaultController
+) {
+ const scriptDir = path.dirname(scriptPath);
+ const recipesRoot = path.join(scriptDir, '..');
+
+ // Create temporary .env file
+ const tempEnvPath = path.join(os.tmpdir(), `.env-${Date.now()}`);
+ let envContent = '';
+
+ if (envVariables && typeof envVariables === 'object') {
+ for (const [key, value] of Object.entries(envVariables)) {
+ if (typeof value === 'string') {
+ envContent += `${key}=${value}\n`;
+ }
+ }
+ }
+
+ // Add common variables if not provided
+ if (envVariables && !envVariables.authURL && (envVariables.CLIENT_ID || envVariables.SECRET)) {
+ envContent += `authURL=https://aws-api.sigmacomputing.com/v2/auth/token\n`;
+ }
+ if (envVariables && !envVariables.baseURL && (envVariables.CLIENT_ID || envVariables.SECRET)) {
+ envContent += `baseURL=https://aws-api.sigmacomputing.com/v2\n`;
+ }
+
+ envContent += `ENV_FILE_PATH=${tempEnvPath}\n`;
+
+
+ fs.writeFileSync(tempEnvPath, envContent);
+
+ const sendProgress = (type: string, message: string, data?: any) => {
+ // Handle large content safely for JSON stringification
+ let safeData = data;
+ if (data && data.content && typeof data.content === 'string' && data.content.length > 10000) {
+ // For large content, create a truncated version for the JSON but keep the full content accessible
+ safeData = {
+ ...data,
+ content: '[Large content: ' + data.content.length + ' characters]',
+ _fullContent: data.content, // Store full content separately
+ _isLargeContent: true
+ };
+ }
+
+ try {
+ const event = `data: ${JSON.stringify({ type, message, data: safeData, timestamp: new Date().toISOString() })}\n\n`;
+ controller.enqueue(new TextEncoder().encode(event));
+ } catch (error) {
+ // Fallback for JSON stringification errors
+ const fallbackEvent = `data: ${JSON.stringify({ type, message: message + ' (JSON error)', timestamp: new Date().toISOString() })}\n\n`;
+ controller.enqueue(new TextEncoder().encode(fallbackEvent));
+ }
+ };
+
+ try {
+ sendProgress('info', 'Using cached authentication token');
+
+ // Create wrapper script for streaming progress
+ const scriptName = path.basename(scriptPath);
+ const wrapperScript = `
+process.chdir('${recipesRoot}');
+
+const fs = require('fs');
+const path = require('path');
+const os = require('os');
+
+// Set up environment variables
+const envContent = fs.readFileSync('${tempEnvPath}', 'utf-8');
+const envLines = envContent.split('\\n');
+
+envLines.forEach(line => {
+ const match = line.match(/^([^=]+)=(.*)$/);
+ if (match) {
+ process.env[match[1]] = match[2];
+ }
+});
+
+// Token caching
+const TOKEN_CACHE_FILE = path.join(os.tmpdir(), 'sigma-portal-token.json');
+
+function getCachedToken() {
+ try {
+ if (fs.existsSync(TOKEN_CACHE_FILE)) {
+ const tokenData = JSON.parse(fs.readFileSync(TOKEN_CACHE_FILE, 'utf8'));
+ const now = Date.now();
+ if (tokenData.expiresAt && now < tokenData.expiresAt) {
+ return tokenData.token;
+ } else {
+ fs.unlinkSync(TOKEN_CACHE_FILE);
+ }
+ }
+ } catch (error) {}
+ return null;
+}
+
+// Global variables for capture
+global.DOWNLOAD_CONTENT = null;
+global.DOWNLOAD_FILENAME = null;
+global.STREAM_FINISHED = false;
+global.CAPTURE_IN_PROGRESS = false;
+
+// Override console.log to capture progress
+const originalConsoleLog = console.log;
+console.log = function(...args) {
+ const message = args.map(arg => String(arg)).join(' ');
+
+ // For debugging - show ALL messages for now
+ process.stdout.write('PROGRESS:debug:' + message + '\\n');
+
+ // Also call original for any other logging
+ originalConsoleLog.apply(console, args);
+};
+
+// File capture system
+const originalWriteFileSync = fs.writeFileSync;
+const originalCreateWriteStream = fs.createWriteStream;
+
+fs.writeFileSync = function(filePath, data, options) {
+ if (filePath.endsWith('.json')) {
+ global.DOWNLOAD_CONTENT = typeof data === 'string' ? data : JSON.stringify(data, null, 2);
+ global.DOWNLOAD_FILENAME = path.basename(filePath);
+
+ // Write download data to temp file instead of stdout to avoid truncation
+ const downloadData = {
+ content: global.DOWNLOAD_CONTENT,
+ filename: global.DOWNLOAD_FILENAME
+ };
+
+ try {
+ const resultFile = require('path').join(require('os').tmpdir(), \`download-result-\${Date.now()}.json\`);
+ require('fs').writeFileSync(resultFile, JSON.stringify(downloadData));
+ process.stdout.write('DOWNLOAD_FILE:' + resultFile + '\\n');
+ } catch (err) {
+ process.stdout.write(\`PROGRESS:error:Failed to write download result: \${err.message}\\n\`);
+ }
+
+ process.exit(0); // Exit immediately after successful capture
+ return;
+ }
+ return originalWriteFileSync.call(this, filePath, data, options);
+};
+
+fs.createWriteStream = function(filePath, options) {
+ global.DOWNLOAD_FILENAME = path.basename(filePath);
+ global.DOWNLOAD_FILEPATH = filePath;
+ const tempFilePath = filePath + '.temp';
+
+ const realStream = originalCreateWriteStream.call(this, tempFilePath, options);
+ let totalBytesWritten = 0;
+ let writeCount = 0;
+ let lastWriteTime = Date.now();
+ let inactivityTimer = null;
+
+ const finishDownload = () => {
+ if (global.CAPTURE_IN_PROGRESS) return; // Prevent multiple captures
+ global.CAPTURE_IN_PROGRESS = true;
+
+ process.stdout.write('PROGRESS:info:Finishing download, reading file...\\n');
+ try {
+ realStream.end();
+ setTimeout(() => {
+ process.stdout.write(\`PROGRESS:debug:Looking for temp file at: \${tempFilePath}\\n\`);
+ if (fs.existsSync(tempFilePath)) {
+ const fileData = fs.readFileSync(tempFilePath);
+ process.stdout.write(\`PROGRESS:debug:Successfully read \${fileData.length} bytes from temp file\\n\`);
+ global.DOWNLOAD_CONTENT = fileData.toString('base64');
+ global.STREAM_FINISHED = true;
+
+ // Write download data to temp file instead of stdout to avoid truncation
+ const downloadData = {
+ content: global.DOWNLOAD_CONTENT,
+ filename: global.DOWNLOAD_FILENAME || 'export.pdf'
+ };
+
+ try {
+ const os = require('os');
+ const path = require('path');
+ const fs = require('fs');
+ const tempDir = os.tmpdir();
+ const resultFile = path.join(tempDir, \`download-result-\${Date.now()}-\${Math.random().toString(36).substring(7)}.json\`);
+
+ fs.writeFileSync(resultFile, JSON.stringify(downloadData));
+ process.stdout.write('DOWNLOAD_FILE:' + resultFile + '\\n');
+ } catch (err) {
+ process.stdout.write(\`PROGRESS:error:Failed to write download result: \${err.message}\\n\`);
+ }
+
+ try { fs.unlinkSync(tempFilePath); } catch (e) {}
+ process.exit(0); // Exit immediately after successful capture
+ }
+ }, 500);
+ } catch (err) {
+ process.stdout.write(\`PROGRESS:error:Error finishing download: \${err.message}\\n\`);
+ }
+ };
+
+ const mockStream = {
+ write: function(chunk) {
+ writeCount++;
+ totalBytesWritten += chunk.length;
+ lastWriteTime = Date.now();
+
+ // Clear any existing inactivity timer
+ if (inactivityTimer) {
+ clearTimeout(inactivityTimer);
+ }
+
+ // Set a new inactivity timer - if no writes for 3 seconds, consider download complete
+ inactivityTimer = setTimeout(() => {
+ process.stdout.write('PROGRESS:info:Download appears complete (3s inactivity)\\n');
+ finishDownload();
+ }, 3000);
+
+ // Show progress for first write and every 1000 writes to avoid spam
+ if (writeCount === 1 || writeCount % 1000 === 0) {
+ process.stdout.write(\`PROGRESS:info:Downloaded \${Math.round(totalBytesWritten/1024)}KB...\\n\`);
+ }
+
+ return realStream.write(chunk);
+ },
+ end: function(chunk) {
+ if (chunk) {
+ totalBytesWritten += chunk.length;
+ }
+ return realStream.end(chunk);
+ },
+ on: function(event, callback) {
+ if (event === 'finish') {
+ realStream.on('finish', () => {
+ if (global.CAPTURE_IN_PROGRESS) return; // Prevent multiple captures
+ global.CAPTURE_IN_PROGRESS = true;
+
+ process.stdout.write('PROGRESS:info:Stream finished, capturing file...\\n');
+ try {
+ process.stdout.write(\`PROGRESS:debug:Stream finish - looking for temp file at: \${tempFilePath}\\n\`);
+ if (fs.existsSync(tempFilePath)) {
+ const fileData = fs.readFileSync(tempFilePath);
+ process.stdout.write(\`PROGRESS:debug:Stream finish - successfully read \${fileData.length} bytes\\n\`);
+ global.DOWNLOAD_CONTENT = fileData.toString('base64');
+
+ // Write download data to temp file instead of stdout to avoid truncation
+ const downloadData = {
+ content: global.DOWNLOAD_CONTENT,
+ filename: global.DOWNLOAD_FILENAME || 'export.pdf'
+ };
+
+ try {
+ const os = require('os');
+ const path = require('path');
+ const fs = require('fs');
+ const tempDir = os.tmpdir();
+ const resultFile = path.join(tempDir, \`download-result-\${Date.now()}-\${Math.random().toString(36).substring(7)}.json\`);
+
+ // Validate content first
+ if (!downloadData.content) {
+ throw new Error('Download content is null or undefined');
+ }
+
+ // Log details in one write to reduce race conditions
+ const debugInfo = [
+ 'PROGRESS:debug:About to write result file: ' + resultFile,
+ 'PROGRESS:debug:Content length: ' + downloadData.content.length + ' chars',
+ 'PROGRESS:debug:Filename: ' + downloadData.filename
+ ].join('\\n') + '\\n';
+ process.stdout.write(debugInfo);
+
+ // Create JSON and write file
+ const jsonData = JSON.stringify(downloadData);
+ process.stdout.write('PROGRESS:debug:JSON data size: ' + jsonData.length + ' chars\\n');
+
+ // Write the file synchronously
+ fs.writeFileSync(resultFile, jsonData, 'utf8');
+
+ // Verify the file was written correctly
+ if (!fs.existsSync(resultFile)) {
+ throw new Error('Result file was not created');
+ }
+
+ const fileSize = fs.statSync(resultFile).size;
+ if (fileSize === 0) {
+ throw new Error('Result file is empty');
+ }
+
+ // Success - output file path and success message
+ const successInfo = [
+ 'PROGRESS:debug:Result file written successfully (size: ' + fileSize + ' bytes)',
+ 'DOWNLOAD_FILE:' + resultFile
+ ].join('\\n') + '\\n';
+ process.stdout.write(successInfo);
+
+ } catch (err) {
+ const errorInfo = [
+ 'PROGRESS:error:Failed to write download result: ' + err.message,
+ 'PROGRESS:error:Stack: ' + err.stack,
+ 'PROGRESS:error:Content available: ' + !!downloadData.content,
+ 'PROGRESS:error:Content length: ' + (downloadData.content ? downloadData.content.length : 'N/A')
+ ].join('\\n') + '\\n';
+ process.stdout.write(errorInfo);
+ }
+
+ try { fs.unlinkSync(tempFilePath); } catch (e) {}
+
+ // Ensure stdout is flushed before exit
+ process.stdout.write('', () => {
+ process.exit(0);
+ });
+ }
+ callback();
+ } catch (err) {
+ process.stdout.write(\`PROGRESS:error:Error reading temp file: \${err.message}\\n\`);
+ callback();
+ }
+ });
+ return this;
+ }
+ if (event === 'error') {
+ realStream.on('error', callback);
+ return this;
+ }
+ return realStream.on(event, callback);
+ },
+ once: function(event, callback) {
+ return realStream.once(event, callback);
+ },
+ pipe: function(source) {
+ return source.pipe(realStream);
+ },
+ close: function() {
+ return realStream.close();
+ },
+ destroy: function() {
+ return realStream.destroy();
+ },
+ writable: true,
+ readable: false
+ };
+
+ // Ensure the mock stream has all necessary EventEmitter methods
+ Object.setPrototypeOf(mockStream, realStream);
+
+ return mockStream;
+};
+
+// Get cached token and execute
+const cachedToken = getCachedToken();
+if (cachedToken) {
+
+ let scriptContent = fs.readFileSync('${scriptPath}', 'utf-8');
+
+ const modifiedScript = scriptContent.replace(
+ /const getBearerToken = require\\(['"][^'"]*get-access-token['"]\\);/g,
+ 'const getBearerToken = async () => { return "' + cachedToken + '"; };'
+ ).replace(
+ /if \\(require\\.main === module\\) \\{([\\s\\S]*?)\\}/g,
+ '{ $1 }'
+ ).replace(
+ // Change the 10 second delay to 30 seconds for large datasets
+ /setTimeout\\(resolve, 10000\\)/g,
+ 'setTimeout(resolve, 30000)'
+ ).replace(
+ // Also update any 10000 millisecond delays
+ /await new Promise\\(resolve => setTimeout\\(resolve, 10000\\)\\)/g,
+ 'await new Promise(resolve => setTimeout(resolve, 30000))'
+ );
+
+ const tempScriptPath = '${scriptPath}' + '.stream.js';
+ fs.writeFileSync(tempScriptPath, modifiedScript);
+
+ try {
+
+ delete require.cache[require.resolve(tempScriptPath)];
+ require(tempScriptPath);
+
+ // Check for completion
+ let checkCount = 0;
+ const maxChecks = 30; // 4 minutes max
+
+ const checkForCompletion = () => {
+ checkCount++;
+
+ if (global.DOWNLOAD_CONTENT) {
+ process.stdout.write('DOWNLOAD_RESULT:' + JSON.stringify({
+ content: global.DOWNLOAD_CONTENT,
+ filename: global.DOWNLOAD_FILENAME || 'export.pdf'
+ }) + '\\n');
+ process.exit(0);
+ } else if (checkCount >= maxChecks) {
+ process.stdout.write('PROGRESS:timeout:Download timeout - export may have failed\\n');
+ process.exit(1);
+ } else {
+ setTimeout(checkForCompletion, 8000); // Check every 8 seconds
+ }
+ };
+
+ setTimeout(checkForCompletion, 10000); // Wait 10 seconds for stream to finish before first check
+
+ } finally {
+ try {
+ fs.unlinkSync(tempScriptPath);
+ } catch (err) {}
+ }
+} else {
+ process.stdout.write('PROGRESS:error:No cached authentication token found\\n');
+ process.exit(1);
+}
+`;
+
+ const tempScriptPath = path.join(os.tmpdir(), `temp-stream-wrapper-${Date.now()}.js`);
+ fs.writeFileSync(tempScriptPath, wrapperScript);
+
+ const child = spawn('node', [tempScriptPath], {
+ cwd: recipesRoot,
+ timeout: 600000, // 10 minute timeout for large datasets
+ });
+
+ let fileContent: string | null = null;
+ let filename: string | null = null;
+
+ let downloadResultCapture = false;
+ let capturedFilename = '';
+ let capturedContent = '';
+ let downloadCompleted = false; // Flag to prevent duplicate success messages
+
+ child.stdout?.on('data', (data) => {
+ const output = data.toString();
+ const lines = output.split('\n');
+
+ for (const line of lines) {
+ if (line === 'DOWNLOAD_RESULT_START') {
+ downloadResultCapture = true;
+ sendProgress('info', 'Capturing download result...');
+ } else if (line === 'DOWNLOAD_RESULT_END') {
+ downloadResultCapture = false;
+ // Process the captured data immediately
+ if (capturedFilename && capturedContent) {
+ try {
+ // Write content to JSON file and use the existing DOWNLOAD_FILE protocol
+ const tempResultPath = path.join(os.tmpdir(), `download-result-${Date.now()}.json`);
+ const downloadData = {
+ content: capturedContent,
+ filename: capturedFilename
+ };
+ fs.writeFileSync(tempResultPath, JSON.stringify(downloadData));
+
+ // Don't use sendProgress for DOWNLOAD_FILE - it needs to be processed differently
+ // Store the file path for later processing
+ fileContent = capturedContent;
+ filename = capturedFilename;
+ sendProgress('debug', `Stored fileContent length: ${fileContent?.length || 0}, filename: ${filename || 'none'}`);
+
+ // Also store as global variables as backup
+ (global as any).FINAL_DOWNLOAD_CONTENT = capturedContent;
+ (global as any).FINAL_DOWNLOAD_FILENAME = capturedFilename;
+
+ sendProgress('success', 'Download completed!', {
+ filename: capturedFilename,
+ size: Math.round(capturedContent.length * 0.75) // Rough base64 to bytes
+ });
+
+ downloadCompleted = true; // Mark as completed to prevent duplicate messages
+
+ } catch (err) {
+ sendProgress('error', 'Failed to process download result: ' + (err instanceof Error ? err.message : String(err)));
+ }
+ }
+ } else if (downloadResultCapture) {
+ if (line.startsWith('FILENAME:')) {
+ capturedFilename = line.substring(9);
+ sendProgress('debug', `Captured filename: ${capturedFilename}`);
+ } else if (line.startsWith('CONTENT:')) {
+ capturedContent = line.substring(8);
+ sendProgress('debug', `Captured content length: ${capturedContent.length} chars`);
+ } else if (line.trim() && capturedContent) {
+ // Append additional lines that are part of the base64 content
+ capturedContent += line;
+ sendProgress('debug', `Appended content, total length: ${capturedContent.length} chars`);
+ }
+ } else if (line.startsWith('PROGRESS:')) {
+ const [, type, message] = line.split(':', 3);
+ sendProgress(type, message);
+ } else if (line.startsWith('DOWNLOAD_FILE:')) {
+ try {
+ const filePath = line.substring(14);
+ sendProgress('debug', `Reading download file: ${filePath}`);
+
+ if (!fs.existsSync(filePath)) {
+ throw new Error(`Download file does not exist: ${filePath}`);
+ }
+
+ const fileStats = fs.statSync(filePath);
+ sendProgress('debug', `File size: ${fileStats.size} bytes`);
+
+ const downloadData = JSON.parse(fs.readFileSync(filePath, 'utf8'));
+ fileContent = downloadData.content;
+ filename = downloadData.filename;
+
+ sendProgress('debug', `File content length: ${fileContent ? fileContent.length : 'null'}`);
+ sendProgress('debug', `Filename: ${filename}`);
+
+ sendProgress('success', 'Download completed!', {
+ filename,
+ size: Math.round((fileContent?.length || 0) * 0.75) // Rough base64 to bytes
+ });
+
+ // Clean up temp file
+ try { fs.unlinkSync(filePath); } catch (e) {}
+ } catch (e) {
+ sendProgress('error', `Failed to read download result file: ${e instanceof Error ? e.message : String(e)}`);
+ }
+ } else if (line.startsWith('DOWNLOAD_RESULT:')) {
+ // Keep old method as fallback for smaller files
+ try {
+ const jsonString = line.substring(16);
+ const downloadData = JSON.parse(jsonString);
+ fileContent = downloadData.content;
+ filename = downloadData.filename;
+ sendProgress('success', 'Download completed!', {
+ filename,
+ size: Math.round((fileContent?.length || 0) * 0.75) // Rough base64 to bytes
+ });
+ } catch (e) {
+ sendProgress('error', `Failed to parse download result: ${e instanceof Error ? e.message : String(e)}`);
+ }
+ }
+ }
+ });
+
+ child.stderr?.on('data', (data) => {
+ const output = data.toString();
+ // Handle our direct log messages separately from real errors
+ if (output.includes('DIRECT_LOG:')) {
+ const message = output.replace('DIRECT_LOG:', '').trim();
+ sendProgress('info', message);
+ } else {
+ sendProgress('error', `Error: ${output}`);
+ }
+ });
+
+ child.on('close', (code) => {
+ // Clean up
+ try {
+ fs.unlinkSync(tempScriptPath);
+ fs.unlinkSync(tempEnvPath);
+ } catch (err) {}
+
+ sendProgress('debug', `Process closed with code: ${code}`);
+ sendProgress('debug', `File content available: ${!!fileContent}`);
+ sendProgress('debug', `Filename: ${filename || 'none'}`);
+
+ // Check backup global variables if local ones are empty
+ if (!fileContent && (global as any).FINAL_DOWNLOAD_CONTENT) {
+ fileContent = (global as any).FINAL_DOWNLOAD_CONTENT;
+ filename = (global as any).FINAL_DOWNLOAD_FILENAME;
+ sendProgress('debug', `Using backup global variables - content length: ${fileContent?.length || 0}, filename: ${filename || 'none'}`);
+ }
+
+ if (downloadCompleted) {
+ // Download was already processed successfully via DOWNLOAD_RESULT protocol
+ sendProgress('debug', 'Download already completed via DOWNLOAD_RESULT protocol');
+ } else if (code === 0 && fileContent) {
+ try {
+ // Read content from temp file if it's a file path, otherwise treat as direct content
+ let actualContent: string;
+ if (fileContent.startsWith('/') && fs.existsSync(fileContent)) {
+ // Read from temp file
+ actualContent = fs.readFileSync(fileContent, 'utf8');
+ // Clean up temp file
+ fs.unlinkSync(fileContent);
+ } else {
+ // Direct content (fallback)
+ actualContent = fileContent;
+ }
+
+ // Simple completion message - file is already saved locally by the recipe
+ sendProgress('success', `File "${filename}" saved successfully!`, {
+ filename: filename,
+ localPath: path.resolve('downloaded-files', filename || 'download'),
+ size: Math.round(actualContent.length * 0.75) // Rough base64 to bytes
+ });
+ } catch (err) {
+ sendProgress('error', `Failed to process download file: ${err instanceof Error ? err.message : String(err)}`);
+ }
+ } else if (code !== 0) {
+ sendProgress('error', `Process exited with code ${code}`);
+ } else if (!fileContent && !downloadCompleted) {
+ sendProgress('error', 'No file content captured');
+ }
+
+ controller.close();
+ });
+
+ child.on('error', (error) => {
+ sendProgress('error', `Execution error: ${error.message}`);
+ controller.close();
+ });
+
+ } catch (error) {
+ sendProgress('error', `Failed to start download: ${error}`);
+ controller.close();
+ }
+}
+
+function getContentTypeFromFilename(filename: string): string {
+ if (filename.endsWith('.pdf')) return 'application/pdf';
+ if (filename.endsWith('.csv')) return 'text/csv';
+ if (filename.endsWith('.json')) return 'application/json';
+ return 'application/octet-stream';
+}
\ No newline at end of file
diff --git a/recipe-portal/app/api/download/route.ts b/recipe-portal/app/api/download/route.ts
new file mode 100644
index 00000000..b7818336
--- /dev/null
+++ b/recipe-portal/app/api/download/route.ts
@@ -0,0 +1,408 @@
+import { NextResponse } from 'next/server';
+import { spawn } from 'child_process';
+import fs from 'fs';
+import path from 'path';
+import os from 'os';
+
+export async function POST(request: Request) {
+ try {
+ const { filePath, envVariables, filename, contentType } = await request.json();
+
+ if (!filePath) {
+ return NextResponse.json(
+ { error: 'File path is required' },
+ { status: 400 }
+ );
+ }
+
+ // Security check: ensure the file is within the recipes directory
+ const recipesPath = path.join(process.cwd(), 'recipes');
+ const resolvedPath = path.resolve(filePath);
+ const resolvedRecipesPath = path.resolve(recipesPath);
+
+ if (!resolvedPath.startsWith(resolvedRecipesPath)) {
+ return NextResponse.json(
+ { error: 'Access denied: File must be within recipes directory' },
+ { status: 403 }
+ );
+ }
+
+ // Check if file exists
+ if (!fs.existsSync(resolvedPath)) {
+ return NextResponse.json(
+ { error: 'File not found' },
+ { status: 404 }
+ );
+ }
+
+ // Create temporary .env file with provided variables
+ const tempEnvPath = path.join(os.tmpdir(), `.env-${Date.now()}`);
+ let envContent = '';
+
+ if (envVariables && typeof envVariables === 'object') {
+ for (const [key, value] of Object.entries(envVariables)) {
+ if (typeof value === 'string') {
+ envContent += `${key}=${value}\n`;
+ }
+ }
+ }
+
+ // Add common variables if not provided
+ if (envVariables && !envVariables.authURL && (envVariables.CLIENT_ID || envVariables.SECRET)) {
+ envContent += `authURL=https://aws-api.sigmacomputing.com/v2/auth/token\n`;
+ }
+ if (envVariables && !envVariables.baseURL && (envVariables.CLIENT_ID || envVariables.SECRET)) {
+ envContent += `baseURL=https://aws-api.sigmacomputing.com/v2\n`;
+ }
+
+ // Add the path to the env file in the content
+ envContent += `ENV_FILE_PATH=${tempEnvPath}\n`;
+
+ fs.writeFileSync(tempEnvPath, envContent);
+
+ // Execute the script and capture file content
+ const result = await executeDownloadScript(resolvedPath, tempEnvPath);
+
+ // Clean up temp file
+ try {
+ fs.unlinkSync(tempEnvPath);
+ } catch (err) {
+ console.warn('Failed to cleanup temp env file:', err);
+ }
+
+ if (result.success && result.fileContent) {
+ // Return the file content for browser download
+ return NextResponse.json({
+ fileContent: result.fileContent,
+ filename: filename || 'download',
+ contentType: contentType || 'application/octet-stream',
+ success: true,
+ output: result.stdout,
+ timestamp: new Date().toISOString()
+ });
+ } else {
+ return NextResponse.json({
+ output: result.stdout,
+ error: result.stderr,
+ success: false,
+ timestamp: new Date().toISOString(),
+ httpStatus: 500,
+ httpStatusText: 'Download Failed'
+ });
+ }
+
+ } catch (error) {
+ console.error('Error executing download script:', error);
+ return NextResponse.json(
+ { error: 'Failed to execute download script' },
+ { status: 500 }
+ );
+ }
+}
+
+function executeDownloadScript(scriptPath: string, envFilePath: string): Promise<{
+ stdout: string;
+ stderr: string;
+ success: boolean;
+ fileContent?: string;
+}> {
+ return new Promise((resolve) => {
+ const scriptDir = path.dirname(scriptPath);
+ const recipesRoot = path.join(scriptDir, '..');
+
+ // Create a wrapper script that captures file content instead of writing to disk
+ const scriptName = path.basename(scriptPath);
+ const wrapperScript = `
+// Change to the recipes directory for proper module resolution
+process.chdir('${recipesRoot}');
+
+// Import required modules
+const fs = require('fs');
+const path = require('path');
+const os = require('os');
+
+// Set up environment variables from our temp file
+const envContent = fs.readFileSync('${envFilePath}', 'utf-8');
+const envLines = envContent.split('\\n');
+
+envLines.forEach(line => {
+ const match = line.match(/^([^=]+)=(.*)$/);
+ if (match) {
+ process.env[match[1]] = match[2];
+ }
+});
+
+// File-based token caching
+const TOKEN_CACHE_FILE = path.join(os.tmpdir(), 'sigma-portal-token.json');
+
+function getCachedToken() {
+ try {
+ if (fs.existsSync(TOKEN_CACHE_FILE)) {
+ const tokenData = JSON.parse(fs.readFileSync(TOKEN_CACHE_FILE, 'utf8'));
+ const now = Date.now();
+
+ if (tokenData.expiresAt && now < tokenData.expiresAt) {
+ return tokenData.token;
+ } else {
+ fs.unlinkSync(TOKEN_CACHE_FILE);
+ }
+ }
+ } catch (error) {
+ // Ignore errors
+ }
+ return null;
+}
+
+function cacheToken(token) {
+ try {
+ const tokenData = {
+ token: token,
+ expiresAt: Date.now() + (60 * 60 * 1000),
+ createdAt: Date.now()
+ };
+ fs.writeFileSync(TOKEN_CACHE_FILE, JSON.stringify(tokenData));
+ } catch (error) {
+ console.error('Failed to cache token:', error.message);
+ }
+}
+
+// Global variable to capture file content for download
+global.DOWNLOAD_CONTENT = null;
+global.DOWNLOAD_FILENAME = null;
+
+// Override file writing functions to capture content
+const originalWriteFileSync = fs.writeFileSync;
+const originalCreateWriteStream = fs.createWriteStream;
+const originalReadFileSync = fs.readFileSync;
+const originalUnlinkSync = fs.unlinkSync;
+
+console.log('WRAPPER: Setting up filesystem overrides');
+
+fs.writeFileSync = function(filePath, data, options) {
+ // For JSON files, capture the content
+ if (filePath.endsWith('.json')) {
+ global.DOWNLOAD_CONTENT = typeof data === 'string' ? data : JSON.stringify(data, null, 2);
+ global.DOWNLOAD_FILENAME = path.basename(filePath);
+ console.log(\`Download ready: \${global.DOWNLOAD_FILENAME}\`);
+ return;
+ }
+ // For other files, use original behavior as fallback
+ return originalWriteFileSync.call(this, filePath, data, options);
+};
+
+// Override stream writing for binary files
+fs.createWriteStream = function(filePath, options) {
+ console.log(\`WRAPPER: Intercepted createWriteStream for: \${path.basename(filePath)}\`);
+ global.DOWNLOAD_FILENAME = path.basename(filePath);
+ global.DOWNLOAD_FILEPATH = filePath;
+
+ // Use a temporary file to capture the actual data
+ const tempFilePath = filePath + '.temp';
+ const realStream = originalCreateWriteStream.call(this, tempFilePath, options);
+
+ // Create a proper writable stream proxy that captures completion
+ const mockStream = {
+ write: function(chunk) {
+ return realStream.write(chunk);
+ },
+ end: function(chunk) {
+ if (chunk) realStream.write(chunk);
+ return realStream.end();
+ },
+ destroy: function() {
+ return realStream.destroy();
+ },
+ on: function(event, callback) {
+ if (event === 'finish') {
+ // When the real stream finishes, read the file and store content
+ realStream.on('finish', () => {
+ console.log(\`WRAPPER: Reading completed file...\`);
+ try {
+ // Give the filesystem a moment to flush
+ setTimeout(() => {
+ if (fs.existsSync(tempFilePath)) {
+ const fileData = originalReadFileSync(tempFilePath);
+ global.DOWNLOAD_CONTENT = fileData.toString('base64');
+ console.log(\`WRAPPER: Successfully captured \${fileData.length} bytes\`);
+ console.log(\`Download ready: \${global.DOWNLOAD_FILENAME}\`);
+ // Clean up temp file
+ try { originalUnlinkSync(tempFilePath); } catch (e) {}
+ } else {
+ console.error(\`WRAPPER: Temp file missing: \${tempFilePath}\`);
+ }
+ // Always call the callback to let the recipe know we're done
+ if (callback) callback();
+ }, 200);
+ } catch (err) {
+ console.error('WRAPPER: Error reading file:', err);
+ if (callback) callback();
+ }
+ });
+ return this;
+ }
+ return realStream.on(event, callback);
+ },
+ // Implement writable stream interface properly
+ writable: true,
+ readable: false,
+ close: function() {
+ return realStream.close();
+ }
+ };
+
+ return mockStream;
+};
+
+// Override getBearerToken function for cached tokens
+async function getBearerToken() {
+ const cached = getCachedToken();
+ if (cached) {
+ return cached;
+ }
+
+ const originalConsoleLog = console.log;
+ const originalConsoleError = console.error;
+ console.log = () => {};
+ console.error = () => {};
+
+ const originalGetBearerToken = require('${recipesRoot}/get-access-token');
+ const newToken = await originalGetBearerToken();
+
+ console.log = originalConsoleLog;
+ console.error = originalConsoleError;
+
+ if (newToken) {
+ cacheToken(newToken);
+ }
+
+ return newToken;
+}
+
+// Execute the script
+try {
+ const cachedToken = getCachedToken();
+
+ if (cachedToken) {
+ console.log('Using cached authentication token for download');
+
+ let scriptContent = fs.readFileSync('${scriptPath}', 'utf-8');
+
+ // Replace the getBearerToken import with cached token
+ const modifiedScript = scriptContent.replace(
+ /const getBearerToken = require\\(['"][^'"]*get-access-token['"]\\);/g,
+ 'const getBearerToken = async () => { return "' + cachedToken + '"; };'
+ ).replace(
+ /if \\(require\\.main === module\\) \\{([\\s\\S]*?)\\}/g,
+ '{ $1 }'
+ );
+
+ const tempScriptPath = '${scriptPath}' + '.download.js';
+ fs.writeFileSync(tempScriptPath, modifiedScript);
+
+ try {
+ delete require.cache[require.resolve(tempScriptPath)];
+ require(tempScriptPath);
+
+ // Wait longer for async operations to complete (PDF exports can take time)
+ let checkCount = 0;
+ const maxChecks = 30; // 30 checks * 2 seconds = 60 seconds max
+
+ const checkForCompletion = () => {
+ checkCount++;
+ if (global.DOWNLOAD_CONTENT) {
+ console.log('DOWNLOAD_RESULT:' + JSON.stringify({
+ content: global.DOWNLOAD_CONTENT,
+ filename: global.DOWNLOAD_FILENAME
+ }));
+ process.exit(0);
+ } else if (checkCount >= maxChecks) {
+ console.log('Download timeout - export may have failed or taken too long');
+ process.exit(1);
+ } else {
+ // Check again in 2 seconds
+ setTimeout(checkForCompletion, 2000);
+ }
+ };
+
+ // Start checking after initial delay
+ setTimeout(checkForCompletion, 3000);
+
+ } finally {
+ try {
+ fs.unlinkSync(tempScriptPath);
+ } catch (err) {}
+ }
+ } else {
+ console.log('No cached token found for download script.');
+ process.exit(1);
+ }
+} catch (error) {
+ console.error('Script execution error:', error.message);
+ process.exit(1);
+}
+`;
+
+ const tempScriptPath = path.join(os.tmpdir(), `temp-download-wrapper-${Date.now()}.js`);
+ fs.writeFileSync(tempScriptPath, wrapperScript);
+
+ const child = spawn('node', [tempScriptPath], {
+ cwd: recipesRoot,
+ timeout: 120000, // 120 second timeout for downloads (PDF exports can take time)
+ });
+
+ let stdout = '';
+ let stderr = '';
+ let fileContent: string | null = null;
+
+ child.stdout?.on('data', (data) => {
+ const output = data.toString();
+ stdout += output;
+
+ // Look for download result in output
+ const downloadMatch = output.match(/DOWNLOAD_RESULT:(.+)/);
+ if (downloadMatch) {
+ try {
+ const downloadData = JSON.parse(downloadMatch[1]);
+ fileContent = downloadData.content;
+ } catch (e) {
+ console.error('Failed to parse download result:', e);
+ }
+ }
+ });
+
+ child.stderr?.on('data', (data) => {
+ stderr += data.toString();
+ });
+
+ child.on('close', (code) => {
+ // Clean up temp script file
+ try {
+ fs.unlinkSync(tempScriptPath);
+ } catch (err) {
+ console.warn('Failed to cleanup temp script file:', err);
+ }
+
+ resolve({
+ stdout: stdout || 'Download script executed',
+ stderr: stderr || '',
+ success: code === 0 && fileContent !== null,
+ fileContent: fileContent || undefined
+ });
+ });
+
+ child.on('error', (error) => {
+ // Clean up temp script file
+ try {
+ fs.unlinkSync(tempScriptPath);
+ } catch (err) {
+ console.warn('Failed to cleanup temp script file:', err);
+ }
+
+ resolve({
+ stdout: '',
+ stderr: `Execution error: ${error.message}`,
+ success: false
+ });
+ });
+ });
+}
\ No newline at end of file
diff --git a/recipe-portal/app/api/env/route.ts b/recipe-portal/app/api/env/route.ts
index 7139cf70..f2cdde4c 100644
--- a/recipe-portal/app/api/env/route.ts
+++ b/recipe-portal/app/api/env/route.ts
@@ -4,7 +4,7 @@ import path from 'path';
export async function GET() {
try {
- const envFilePath = path.join(process.cwd(), '..', 'sigma-api-recipes', '.env');
+ const envFilePath = path.join(process.cwd(), 'recipes', '.env');
if (!fs.existsSync(envFilePath)) {
return NextResponse.json({
diff --git a/recipe-portal/app/api/execute/route.ts b/recipe-portal/app/api/execute/route.ts
index 826ec13b..3969e0dd 100644
--- a/recipe-portal/app/api/execute/route.ts
+++ b/recipe-portal/app/api/execute/route.ts
@@ -15,14 +15,14 @@ export async function POST(request: Request) {
);
}
- // Security check: ensure the file is within the sigma-api-recipes directory
- const recipesPath = path.join(process.cwd(), '..', 'sigma-api-recipes');
+ // Security check: ensure the file is within the recipes directory
+ const recipesPath = path.join(process.cwd(), 'recipes');
const resolvedPath = path.resolve(filePath);
const resolvedRecipesPath = path.resolve(recipesPath);
if (!resolvedPath.startsWith(resolvedRecipesPath)) {
return NextResponse.json(
- { error: 'Access denied: File must be within sigma-api-recipes directory' },
+ { error: 'Access denied: File must be within recipes directory' },
{ status: 403 }
);
}
@@ -61,7 +61,7 @@ export async function POST(request: Request) {
fs.writeFileSync(tempEnvPath, envContent);
// Execute the script with timeout
- const output = await executeScript(resolvedPath, tempEnvPath);
+ const output = await executeScript(resolvedPath, tempEnvPath, envVariables?.CLIENT_ID);
// Clean up temp file
try {
@@ -89,7 +89,7 @@ export async function POST(request: Request) {
}
}
-function executeScript(scriptPath: string, envFilePath: string): Promise<{
+function executeScript(scriptPath: string, envFilePath: string, clientId: string = null): Promise<{
stdout: string;
stderr: string;
success: boolean;
@@ -100,6 +100,7 @@ function executeScript(scriptPath: string, envFilePath: string): Promise<{
// Create a wrapper script that handles module resolution and environment setup
const scriptName = path.basename(scriptPath);
+ const isMasterScript = scriptPath.includes('master-script.js');
const wrapperScript = `
// Change to the recipes directory for proper module resolution
process.chdir('${recipesRoot}');
@@ -120,11 +121,16 @@ envLines.forEach(line => {
}
});
-// File-based token caching
-const TOKEN_CACHE_FILE = path.join(os.tmpdir(), 'sigma-portal-token.json');
+// Configuration-specific token caching
+function getTokenCacheFile(clientId) {
+ // Create a safe filename using first 8 chars of clientId
+ const configHash = clientId ? clientId.substring(0, 8) : 'default';
+ return path.join(os.tmpdir(), 'sigma-portal-token-' + configHash + '.json');
+}
-function getCachedToken() {
+function getCachedToken(clientId = null) {
try {
+ const TOKEN_CACHE_FILE = getTokenCacheFile(clientId);
if (fs.existsSync(TOKEN_CACHE_FILE)) {
const tokenData = JSON.parse(fs.readFileSync(TOKEN_CACHE_FILE, 'utf8'));
const now = Date.now();
@@ -143,10 +149,12 @@ function getCachedToken() {
return null;
}
-function cacheToken(token) {
+function cacheToken(token, clientId = null) {
try {
+ const TOKEN_CACHE_FILE = getTokenCacheFile(clientId);
const tokenData = {
token: token,
+ clientId: clientId,
expiresAt: Date.now() + (60 * 60 * 1000), // 1 hour from now
createdAt: Date.now()
};
@@ -157,9 +165,9 @@ function cacheToken(token) {
}
// Override getBearerToken function for recipes that use cached tokens
-async function getBearerToken() {
+async function getBearerToken(clientId = null) {
// First check for cached token
- const cached = getCachedToken();
+ const cached = getCachedToken(clientId);
if (cached) {
// Don't log anything about tokens in regular recipes
return cached;
@@ -199,7 +207,7 @@ try {
console.log('HTTP Status: 200 OK - Authentication successful');
// Cache the token for future use
- cacheToken(token);
+ cacheToken(token, '${clientId}');
} else {
console.log('ā Failed to obtain bearer token');
process.exit(1);
@@ -210,7 +218,7 @@ try {
});
` : `
// For regular scripts, check for cached token first
- const cachedToken = getCachedToken();
+ const cachedToken = getCachedToken('${clientId}');
if (cachedToken) {
console.log('Using cached authentication token');
@@ -229,9 +237,29 @@ try {
'{ $1 }' // Remove the require.main check so the script always executes
);
+ // For master-script.js, we need to override the get-access-token module globally
+ // so that when sub-scripts import it, they get the cached token
+ const isMasterScript = '${scriptPath}'.includes('master-script.js');
+ const finalScript = isMasterScript ?
+ '// Override get-access-token module globally for sub-scripts\\n' +
+ 'const Module = require(\\'module\\');\\n' +
+ 'const originalRequire = Module.prototype.require;\\n' +
+ '\\n' +
+ 'Module.prototype.require = function(id) {\\n' +
+ ' if (id === \\'../get-access-token\\' || id.endsWith(\\'get-access-token\\')) {\\n' +
+ ' return async () => {\\n' +
+ ' console.log("Using master script cached token for sub-operation");\\n' +
+ ' return "' + cachedToken + '";\\n' +
+ ' };\\n' +
+ ' }\\n' +
+ ' return originalRequire.apply(this, arguments);\\n' +
+ '};\\n' +
+ '\\n' +
+ modifiedScript : modifiedScript;
+
// Write to a temporary file and require it
const tempScriptPath = '${scriptPath}' + '.cached.js';
- fs.writeFileSync(tempScriptPath, modifiedScript);
+ fs.writeFileSync(tempScriptPath, finalScript);
try {
// Clear require cache to ensure fresh execution
@@ -264,9 +292,13 @@ try {
const tempScriptPath = path.join(os.tmpdir(), `temp-wrapper-${Date.now()}.js`);
fs.writeFileSync(tempScriptPath, wrapperScript);
+ // Set timeout based on script type - materialization takes longer
+ const isMaterializationScript = scriptPath.includes('initiate-materialization.js');
+ const timeout = isMaterializationScript ? 300000 : 30000; // 5 minutes for materialization, 30 seconds for others
+
const child = spawn('node', [tempScriptPath], {
cwd: recipesRoot,
- timeout: 30000, // 30 second timeout
+ timeout: timeout,
});
let stdout = '';
diff --git a/recipe-portal/app/api/keys/route.ts b/recipe-portal/app/api/keys/route.ts
new file mode 100644
index 00000000..7d22e041
--- /dev/null
+++ b/recipe-portal/app/api/keys/route.ts
@@ -0,0 +1,148 @@
+import { NextResponse } from 'next/server';
+import {
+ storeCredentials,
+ getStoredCredentials,
+ hasStoredCredentials,
+ clearStoredCredentials,
+ getStoredCredentialNames,
+ getDefaultCredentialSetName,
+ setDefaultCredentialSet
+} from '../../../lib/keyStorage';
+
+// GET - Check if stored credentials exist and optionally retrieve them
+export async function GET(request: Request) {
+ try {
+ const { searchParams } = new URL(request.url);
+ const retrieve = searchParams.get('retrieve') === 'true';
+ const list = searchParams.get('list') === 'true';
+ const setName = searchParams.get('set');
+
+ const hasKeys = await hasStoredCredentials();
+
+ if (!hasKeys) {
+ return NextResponse.json({
+ hasStoredKeys: false,
+ credentials: null,
+ credentialSets: [],
+ defaultSet: null
+ });
+ }
+
+ const credentialSets = await getStoredCredentialNames();
+ const defaultSet = await getDefaultCredentialSetName();
+
+ if (list) {
+ // Return list of available sets
+ return NextResponse.json({
+ hasStoredKeys: true,
+ credentialSets,
+ defaultSet,
+ credentials: null
+ });
+ }
+
+ if (retrieve) {
+ const credentials = await getStoredCredentials(setName || undefined);
+ return NextResponse.json({
+ hasStoredKeys: true,
+ credentials: credentials || null,
+ credentialSets,
+ defaultSet
+ });
+ }
+
+ return NextResponse.json({
+ hasStoredKeys: true,
+ credentials: null,
+ credentialSets,
+ defaultSet
+ });
+
+ } catch (error) {
+ console.error('Error checking stored keys:', error);
+ return NextResponse.json(
+ { error: 'Failed to check stored credentials' },
+ { status: 500 }
+ );
+ }
+}
+
+// POST - Store configuration (credentials + server settings)
+export async function POST(request: Request) {
+ try {
+ const { clientId, clientSecret, name, setAsDefault, baseURL, authURL } = await request.json();
+
+ if (!clientId || !clientSecret) {
+ return NextResponse.json(
+ { error: 'Client ID and Client Secret are required' },
+ { status: 400 }
+ );
+ }
+
+ if (!name || name.trim() === '') {
+ return NextResponse.json(
+ { error: 'Credential set name is required' },
+ { status: 400 }
+ );
+ }
+ const credentialSetName = name.trim();
+ const success = await storeCredentials(clientId, clientSecret, credentialSetName, baseURL, authURL);
+
+ // Set as default if requested
+ if (success && setAsDefault) {
+ await setDefaultCredentialSet(credentialSetName);
+ }
+
+ if (success) {
+ return NextResponse.json({
+ success: true,
+ message: 'Credentials stored successfully'
+ });
+ } else {
+ return NextResponse.json(
+ { error: 'Failed to store credentials' },
+ { status: 500 }
+ );
+ }
+
+ } catch (error) {
+ console.error('Error storing credentials:', error);
+ return NextResponse.json(
+ { error: 'Failed to store credentials' },
+ { status: 500 }
+ );
+ }
+}
+
+// DELETE - Clear stored credentials (all or specific config)
+export async function DELETE(request: Request) {
+ try {
+ const { searchParams } = new URL(request.url);
+ const configName = searchParams.get('config');
+
+ const success = await clearStoredCredentials(configName || undefined);
+
+ if (success) {
+ const message = configName
+ ? `Config "${configName}" deleted successfully`
+ : 'All stored credentials cleared successfully';
+
+ return NextResponse.json({
+ success: true,
+ message
+ });
+ } else {
+ return NextResponse.json(
+ { error: 'Failed to clear stored credentials' },
+ { status: 500 }
+ );
+ }
+
+ } catch (error) {
+ console.error('Error clearing stored credentials:', error);
+ return NextResponse.json(
+ { error: 'Failed to clear stored credentials' },
+ { status: 500 }
+ );
+ }
+}
\ No newline at end of file
diff --git a/recipe-portal/app/api/open-folder/route.ts b/recipe-portal/app/api/open-folder/route.ts
new file mode 100644
index 00000000..53a9b454
--- /dev/null
+++ b/recipe-portal/app/api/open-folder/route.ts
@@ -0,0 +1,43 @@
+import { NextResponse } from 'next/server';
+import { exec } from 'child_process';
+import path from 'path';
+
+export async function POST(request: Request) {
+ try {
+ const { folder } = await request.json();
+
+ if (!folder || folder !== 'downloaded-files') {
+ return NextResponse.json(
+ { error: 'Invalid folder specified' },
+ { status: 400 }
+ );
+ }
+
+ const folderPath = path.resolve(folder);
+
+ // Open folder using system command based on OS
+ let command: string;
+ if (process.platform === 'win32') {
+ command = `explorer "${folderPath}"`;
+ } else if (process.platform === 'darwin') {
+ command = `open "${folderPath}"`;
+ } else {
+ command = `xdg-open "${folderPath}"`;
+ }
+
+ exec(command, (error) => {
+ if (error) {
+ console.error('Error opening folder:', error);
+ }
+ });
+
+ return NextResponse.json({ success: true });
+
+ } catch (error) {
+ console.error('Error opening folder:', error);
+ return NextResponse.json(
+ { error: 'Failed to open folder' },
+ { status: 500 }
+ );
+ }
+}
\ No newline at end of file
diff --git a/recipe-portal/app/api/readme/route.ts b/recipe-portal/app/api/readme/route.ts
index e399d04e..bd5c6eb5 100644
--- a/recipe-portal/app/api/readme/route.ts
+++ b/recipe-portal/app/api/readme/route.ts
@@ -2,10 +2,61 @@ import { NextResponse } from 'next/server';
import fs from 'fs';
import path from 'path';
+function convertMarkdownToHtml(markdown: string): string {
+ // First, normalize line endings and remove excessive whitespace
+ let html = markdown
+ .replace(/\r\n/g, '\n')
+ .replace(/\r/g, '\n')
+ // Remove excessive blank lines
+ .replace(/\n{3,}/g, '\n\n')
+ // Trim each line
+ .split('\n')
+ .map(line => line.trim())
+ .join('\n');
+
+ return html
+ // Code blocks (do this first to preserve their content)
+ .replace(/```[\s\S]*?```/g, (match) => {
+ const code = match.replace(/```\w*\n?/, '').replace(/\n?```$/, '');
+ return `${code.replace(//g, '>')}
`;
+ })
+ // Headers
+ .replace(/^### (.+)$/gm, '$1
')
+ .replace(/^## (.+)$/gm, '$1
')
+ .replace(/^# (.+)$/gm, '$1
')
+ // Inline code
+ .replace(/`([^`]+)`/g, '$1')
+ // Links
+ .replace(/\[([^\]]+)\]\(([^)]+)\)/g, '$1')
+ // Bold text
+ .replace(/\*\*([^*]+)\*\*/g, '$1')
+ // Lists
+ .replace(/^- (.+)$/gm, '$1')
+ .replace(/^\* (.+)$/gm, '$1')
+ .replace(/^(\d+)\. (.+)$/gm, '$2')
+ // Wrap consecutive list items in proper containers
+ .replace(/(.*?<\/li>(?:\s*.*?<\/li>)*)/g, (match) => {
+ const items = match.trim();
+ return ``;
+ })
+ // Convert double line breaks to paragraph breaks
+ .replace(/\n\s*\n/g, '
')
+ // Wrap remaining content in paragraphs
+ .replace(/^(?![<])/gm, '
')
+ // Clean up paragraph wrapping around headers and other elements
+ .replace(/
(<[h123]|
|<\/pre>|<\/ul>)/g, '$1')
+ // Remove trailing paragraph tags
+ .replace(/<\/p>$/g, '')
+ // Remove empty paragraphs
+ .replace(/
<\/p>/g, '');
+}
+
export async function GET(request: Request) {
try {
const { searchParams } = new URL(request.url);
const readmePath = searchParams.get('path');
+ const format = searchParams.get('format'); // Check if HTML format is requested
if (!readmePath) {
return NextResponse.json(
@@ -14,14 +65,16 @@ export async function GET(request: Request) {
);
}
- // Security check: ensure the file is within the sigma-api-recipes directory
- const recipesPath = path.join(process.cwd(), '..', 'sigma-api-recipes');
+ // Security check: ensure the file is within the recipes directory or is the main README
+ const recipesPath = path.join(process.cwd(), 'recipes');
+ const mainReadmePath = path.join(process.cwd(), 'README.md');
const resolvedPath = path.resolve(readmePath);
const resolvedRecipesPath = path.resolve(recipesPath);
+ const resolvedMainReadmePath = path.resolve(mainReadmePath);
- if (!resolvedPath.startsWith(resolvedRecipesPath)) {
+ if (!resolvedPath.startsWith(resolvedRecipesPath) && resolvedPath !== resolvedMainReadmePath) {
return NextResponse.json(
- { error: 'Access denied: File must be within sigma-api-recipes directory' },
+ { error: 'Access denied: File must be within recipes directory or be the main README' },
{ status: 403 }
);
}
@@ -36,6 +89,70 @@ export async function GET(request: Request) {
const content = fs.readFileSync(resolvedPath, 'utf-8');
+ // If accessed directly in browser (no explicit JSON format requested), return HTML
+ if (format !== 'json') {
+ const htmlContent = `
+
+
+ Recipe Instructions
+
+
+
+
+ ā Close
+ ${convertMarkdownToHtml(content)}
+
+`;
+
+ return new NextResponse(htmlContent, {
+ headers: {
+ 'Content-Type': 'text/html; charset=utf-8',
+ },
+ });
+ }
+
+ // Return JSON for API calls
return NextResponse.json({
content,
success: true
diff --git a/recipe-portal/app/api/resources/route.ts b/recipe-portal/app/api/resources/route.ts
new file mode 100644
index 00000000..ac45f521
--- /dev/null
+++ b/recipe-portal/app/api/resources/route.ts
@@ -0,0 +1,281 @@
+import { NextResponse } from 'next/server';
+import axios from 'axios';
+
+// Base resource fetching function
+async function fetchWithAuth(endpoint: string, token: string) {
+ try {
+ const baseURL = process.env.SIGMA_BASE_URL || 'https://aws-api.sigmacomputing.com/v2';
+ const url = `${baseURL}${endpoint}`;
+ console.log(`Fetching: ${url}`);
+ const response = await axios.get(url, {
+ headers: {
+ 'Authorization': `Bearer ${token}`,
+ 'Accept': 'application/json'
+ }
+ });
+ console.log(`Response status for ${endpoint}:`, response.status);
+ return response.data;
+ } catch (error) {
+ console.error(`Error fetching ${endpoint}:`, (error as any).response?.data || (error as any).message);
+ throw error;
+ }
+}
+
+export async function GET(request: Request) {
+ try {
+ const { searchParams } = new URL(request.url);
+ const type = searchParams.get('type');
+ const token = searchParams.get('token');
+
+ if (!token) {
+ return NextResponse.json(
+ { error: 'Authentication token is required' },
+ { status: 401 }
+ );
+ }
+
+ if (!type) {
+ return NextResponse.json(
+ { error: 'Resource type is required. Use: teams, members, workbooks, connections, workspaces, bookmarks, templates, datasets, dataModels, accountTypes, workbookElements, materializationSchedules' },
+ { status: 400 }
+ );
+ }
+
+ let data: any;
+ let transformedData: any[];
+
+ switch (type) {
+ case 'teams':
+ data = await fetchWithAuth('/teams', token);
+ transformedData = (data.entries || data).map((team: any) => ({
+ id: team.teamId,
+ name: team.name,
+ description: team.description || '',
+ memberCount: team.memberCount || 0
+ }));
+ break;
+
+ case 'members':
+ data = await fetchWithAuth('/members', token);
+ // Filter out potentially inactive members and map to display format
+ const activeMembers = (data.entries || data).filter((member: any) => {
+ // Add filters for inactive members based on patterns you identify
+ // For now, keeping all members - you can modify this filter
+ return true;
+ });
+
+ transformedData = activeMembers.map((member: any) => ({
+ id: member.memberId,
+ name: `${member.firstName} ${member.lastName}`.trim(),
+ email: member.email,
+ firstName: member.firstName,
+ lastName: member.lastName,
+ type: member.memberType
+ }));
+ break;
+
+ case 'workbooks':
+ data = await fetchWithAuth('/workbooks', token);
+ transformedData = (data.entries || data).map((workbook: any) => ({
+ id: workbook.workbookId,
+ name: workbook.name,
+ path: workbook.path,
+ ownerId: workbook.ownerId,
+ createdBy: workbook.createdBy,
+ url: workbook.url
+ }));
+ break;
+
+ case 'connections':
+ data = await fetchWithAuth('/connections', token);
+ transformedData = (data.entries || data).map((connection: any) => ({
+ id: connection.connectionId,
+ name: connection.name,
+ type: connection.type,
+ description: connection.description || ''
+ }));
+ break;
+
+ case 'workspaces':
+ data = await fetchWithAuth('/workspaces', token);
+ transformedData = (data.entries || data).map((workspace: any) => ({
+ id: workspace.workspaceId,
+ name: workspace.name,
+ description: workspace.description || ''
+ }));
+ break;
+
+ case 'bookmarks':
+ // Using favorites endpoint since bookmarks API maps to favorites
+ data = await fetchWithAuth('/favorites', token);
+ transformedData = (data.entries || data).map((favorite: any) => ({
+ id: favorite.favoriteId || favorite.inodeId,
+ name: favorite.name || favorite.title,
+ description: favorite.description || '',
+ type: favorite.type || 'favorite',
+ url: favorite.url
+ }));
+ break;
+
+ case 'templates':
+ data = await fetchWithAuth('/templates', token);
+ transformedData = (data.entries || data).map((template: any) => ({
+ id: template.templateId,
+ name: template.name,
+ description: template.description || '',
+ type: template.type
+ }));
+ break;
+
+ case 'datasets':
+ data = await fetchWithAuth('/datasets', token);
+ transformedData = (data.entries || data).map((dataset: any) => ({
+ id: dataset.datasetId,
+ name: dataset.name,
+ description: dataset.description || '',
+ type: dataset.type
+ }));
+ break;
+
+ case 'dataModels':
+ data = await fetchWithAuth('/dataModels', token);
+ transformedData = (data.entries || data).map((dataModel: any) => ({
+ id: dataModel.dataModelId,
+ name: dataModel.name,
+ description: dataModel.description || '',
+ type: dataModel.type || 'dataModel'
+ }));
+ break;
+
+ case 'accountTypes':
+ data = await fetchWithAuth('/accountTypes', token);
+ console.log('AccountTypes raw data:', JSON.stringify(data, null, 2));
+ transformedData = (data.entries || data).map((accountType: any) => ({
+ id: accountType.accountTypeName,
+ name: accountType.accountTypeName,
+ description: accountType.description || '',
+ type: accountType.isCustom ? 'custom' : 'built-in',
+ isCustom: accountType.isCustom
+ }));
+ break;
+
+ case 'workbookElements':
+ const workbookId = searchParams.get('workbookId');
+ if (!workbookId) {
+ return NextResponse.json(
+ { error: 'workbookId parameter is required for workbookElements' },
+ { status: 400 }
+ );
+ }
+
+ try {
+ // First, get all pages from the workbook
+ console.log(`Fetching pages for workbook: ${workbookId}`);
+ const pagesData = await fetchWithAuth(`/workbooks/${workbookId}/pages`, token);
+ console.log('Pages data:', JSON.stringify(pagesData, null, 2));
+
+ const pages = pagesData.entries || pagesData || [];
+ let allElements: any[] = [];
+
+ // For each page, get its elements
+ for (const page of pages) {
+ const pageId = page.pageId || page.id;
+ if (pageId) {
+ try {
+ console.log(`Fetching elements for page: ${pageId}`);
+ const elementsData = await fetchWithAuth(`/workbooks/${workbookId}/pages/${pageId}/elements`, token);
+ console.log(`Elements data for page ${pageId}:`, JSON.stringify(elementsData, null, 2));
+
+ const pageElements = elementsData.entries || elementsData || [];
+
+ // Add page information to each element
+ const elementsWithPageInfo = pageElements.map((element: any) => ({
+ ...element,
+ pageId: pageId,
+ pageName: page.name || page.title || `Page ${pageId}`
+ }));
+
+ allElements = allElements.concat(elementsWithPageInfo);
+ } catch (pageError) {
+ console.warn(`Failed to fetch elements for page ${pageId}:`, pageError);
+ // Continue with other pages even if one fails
+ }
+ }
+ }
+
+ console.log('All extracted elements:', allElements);
+
+ transformedData = allElements.map((element: any) => ({
+ id: element.elementId || element.id || element.elementUid,
+ name: element.name || element.title || element.displayName || `${element.pageName} - ${element.name || element.title || element.displayName || 'Unnamed Element'}`,
+ type: element.type || element.elementType || 'element',
+ description: element.description || `Element on page: ${element.pageName}`,
+ pageId: element.pageId,
+ pageName: element.pageName
+ }));
+
+ } catch (error) {
+ console.error('Error fetching workbook elements:', error);
+ transformedData = [];
+ }
+
+ console.log('Final transformed elements data:', transformedData);
+ break;
+
+ case 'materializationSchedules':
+ const workbookIdForMat = searchParams.get('workbookId');
+ if (!workbookIdForMat) {
+ return NextResponse.json(
+ { error: 'workbookId parameter is required for materializationSchedules' },
+ { status: 400 }
+ );
+ }
+
+ try {
+ console.log(`Fetching materialization schedules for workbook: ${workbookIdForMat}`);
+ const schedulesData = await fetchWithAuth(`/workbooks/${workbookIdForMat}/materialization-schedules`, token);
+ console.log('Materialization schedules data:', JSON.stringify(schedulesData, null, 2));
+
+ const schedules = schedulesData.entries || schedulesData || [];
+
+ transformedData = schedules.map((schedule: any) => ({
+ id: schedule.sheetId, // Use sheetId as the value that will be sent to the script
+ name: schedule.elementName, // Display the element name to the user
+ description: `${schedule.schedule.cronSpec} ${schedule.schedule.timezone}${schedule.paused ? ' - PAUSED' : ''}`,
+ type: 'materializationSchedule',
+ sheetId: schedule.sheetId,
+ elementName: schedule.elementName,
+ cronSpec: schedule.schedule.cronSpec,
+ timezone: schedule.schedule.timezone,
+ paused: schedule.paused
+ }));
+
+ } catch (error) {
+ console.error('Error fetching materialization schedules:', error);
+ transformedData = [];
+ }
+
+ console.log('Final transformed schedules data:', transformedData);
+ break;
+
+ default:
+ return NextResponse.json(
+ { error: `Unsupported resource type: ${type}` },
+ { status: 400 }
+ );
+ }
+
+ return NextResponse.json({
+ type,
+ count: transformedData.length,
+ data: transformedData.sort((a: any, b: any) => (a.name || '').localeCompare(b.name || ''))
+ });
+
+ } catch (error) {
+ console.error('Error in resources API:', error);
+ return NextResponse.json(
+ { error: 'Failed to fetch resources' },
+ { status: 500 }
+ );
+ }
+}
\ No newline at end of file
diff --git a/recipe-portal/app/api/token/clear/route.ts b/recipe-portal/app/api/token/clear/route.ts
new file mode 100644
index 00000000..4c0f4b01
--- /dev/null
+++ b/recipe-portal/app/api/token/clear/route.ts
@@ -0,0 +1,65 @@
+import { NextResponse } from 'next/server';
+import fs from 'fs';
+import path from 'path';
+import os from 'os';
+
+// Configuration-specific token caching
+function getTokenCacheFile(clientId) {
+ // Create a safe filename using first 8 chars of clientId
+ const configHash = clientId ? clientId.substring(0, 8) : 'default';
+ return path.join(os.tmpdir(), `sigma-portal-token-${configHash}.json`);
+}
+
+export async function POST(request: Request) {
+ try {
+ const { clientId, clearAll } = await request.json();
+
+ console.log('Token clear request:', { clientId, clearAll });
+
+ if (clearAll) {
+ // Clear all token cache files
+ const tempDir = os.tmpdir();
+ const files = fs.readdirSync(tempDir);
+ const tokenFiles = files.filter(file => file.startsWith('sigma-portal-token-') && file.endsWith('.json'));
+
+ console.log('Clearing all tokens:', tokenFiles);
+
+ let clearedCount = 0;
+ for (const file of tokenFiles) {
+ try {
+ fs.unlinkSync(path.join(tempDir, file));
+ clearedCount++;
+ console.log(`Cleared token file: ${file}`);
+ } catch (err) {
+ console.warn(`Failed to delete token file ${file}:`, err);
+ }
+ }
+
+ return NextResponse.json({
+ success: true,
+ message: `Cleared ${clearedCount} authentication token(s)`
+ });
+ } else {
+ // Clear specific configuration's token
+ const TOKEN_CACHE_FILE = getTokenCacheFile(clientId);
+
+ if (fs.existsSync(TOKEN_CACHE_FILE)) {
+ fs.unlinkSync(TOKEN_CACHE_FILE);
+ }
+
+ return NextResponse.json({
+ success: true,
+ message: 'Authentication token cleared successfully'
+ });
+ }
+ } catch (error) {
+ console.error('Error clearing token:', error);
+ return NextResponse.json(
+ {
+ success: false,
+ error: 'Failed to clear authentication token'
+ },
+ { status: 500 }
+ );
+ }
+}
\ No newline at end of file
diff --git a/recipe-portal/app/api/token/route.ts b/recipe-portal/app/api/token/route.ts
new file mode 100644
index 00000000..01e78b35
--- /dev/null
+++ b/recipe-portal/app/api/token/route.ts
@@ -0,0 +1,87 @@
+import { NextResponse } from 'next/server';
+import fs from 'fs';
+import path from 'path';
+import os from 'os';
+
+// Configuration-specific token caching
+function getTokenCacheFile(clientId: string) {
+ // Create a safe filename using first 8 chars of clientId
+ const configHash = clientId ? clientId.substring(0, 8) : 'default';
+ return path.join(os.tmpdir(), `sigma-portal-token-${configHash}.json`);
+}
+
+export async function GET() {
+ try {
+ // Look for the most recent valid token across all configurations
+ const tempDir = os.tmpdir();
+ const files = fs.readdirSync(tempDir);
+ const tokenFiles = files.filter(file => file.startsWith('sigma-portal-token-') && file.endsWith('.json'));
+
+ let mostRecentToken = null;
+ let mostRecentTime = 0;
+
+ console.log('Found token files:', tokenFiles);
+
+ for (const file of tokenFiles) {
+ try {
+ const filePath = path.join(tempDir, file);
+ const tokenData = JSON.parse(fs.readFileSync(filePath, 'utf8'));
+ const now = Date.now();
+
+ // Check if token is still valid (not expired)
+ if (tokenData.expiresAt && now < tokenData.expiresAt) {
+ // Use the most recently created/accessed token
+ const lastAccessTime = tokenData.lastAccessed || tokenData.createdAt;
+ console.log(`Token ${file}: clientId=${tokenData.clientId?.substring(0,8)}, createdAt=${new Date(tokenData.createdAt)}, lastAccessed=${tokenData.lastAccessed ? new Date(tokenData.lastAccessed) : 'none'}, lastAccessTime=${lastAccessTime}`);
+
+ if (lastAccessTime > mostRecentTime) {
+ console.log(` -> This is the most recent token so far`);
+ mostRecentTime = lastAccessTime;
+ mostRecentToken = {
+ hasValidToken: true,
+ token: tokenData.token,
+ expiresAt: tokenData.expiresAt,
+ timeRemaining: Math.round((tokenData.expiresAt - now) / 1000 / 60), // minutes
+ clientId: tokenData.clientId,
+ filePath: filePath // Keep track of which file this came from
+ };
+ }
+ } else {
+ // Token expired, remove file
+ fs.unlinkSync(filePath);
+ }
+ } catch (err) {
+ // Skip invalid token files
+ console.warn(`Failed to read token file ${file}:`, err);
+ }
+ }
+
+ if (mostRecentToken) {
+ console.log(`Selected token: clientId=${mostRecentToken.clientId?.substring(0,8)}`);
+
+ // Update the last accessed time for this token
+ try {
+ const tokenData = JSON.parse(fs.readFileSync(mostRecentToken.filePath, 'utf8'));
+ tokenData.lastAccessed = Date.now();
+ fs.writeFileSync(mostRecentToken.filePath, JSON.stringify(tokenData));
+ } catch (err) {
+ console.warn('Failed to update token access time:', err);
+ }
+
+ // Remove filePath from response
+ const { filePath, ...responseData } = mostRecentToken;
+ return NextResponse.json(responseData);
+ }
+
+ return NextResponse.json({
+ hasValidToken: false,
+ token: null
+ });
+ } catch (error) {
+ console.error('Error checking token:', error);
+ return NextResponse.json({
+ hasValidToken: false,
+ token: null
+ });
+ }
+}
\ No newline at end of file
diff --git a/recipe-portal/app/layout.tsx b/recipe-portal/app/layout.tsx
index c89b7deb..6b24a7ca 100644
--- a/recipe-portal/app/layout.tsx
+++ b/recipe-portal/app/layout.tsx
@@ -2,8 +2,13 @@ import type { Metadata } from 'next'
import './globals.css'
export const metadata: Metadata = {
- title: 'Sigma API Recipe Portal',
- description: 'Interactive portal for Sigma API recipes and examples',
+ title: 'QuickStarts API Toolkit',
+ description: 'Experiment with Sigma API calls and learn common request flows',
+ icons: {
+ icon: '/crane.png',
+ shortcut: '/crane.png',
+ apple: '/crane.png',
+ },
}
export default function RootLayout({
diff --git a/recipe-portal/app/page.tsx b/recipe-portal/app/page.tsx
index 52b5d35c..101a566a 100644
--- a/recipe-portal/app/page.tsx
+++ b/recipe-portal/app/page.tsx
@@ -1,8 +1,9 @@
'use client';
-import { useState, useEffect } from 'react';
+import { useState, useEffect, useCallback } from 'react';
import { RecipeCard } from '../components/RecipeCard';
import { CodeViewer } from '../components/CodeViewer';
+import { QuickApiExplorer } from '../components/QuickApiExplorer';
interface Recipe {
id: string;
@@ -29,9 +30,36 @@ export default function Home() {
const [recipeData, setRecipeData] = useState(null);
const [loading, setLoading] = useState(true);
const [error, setError] = useState(null);
+ const [activeTopTab, setActiveTopTab] = useState<'recipes' | 'quickapi'>('recipes');
const [activeCategoryTab, setActiveCategoryTab] = useState('');
+ const [authToken, setAuthToken] = useState(null);
const [hasValidToken, setHasValidToken] = useState(false);
const [showAuthModal, setShowAuthModal] = useState(false);
+ const [clearingToken, setClearingToken] = useState(false);
+ const [quickApiKey, setQuickApiKey] = useState(0);
+
+ // Function to check auth status (reusable)
+ const checkAuthStatus = useCallback(async () => {
+ try {
+ const response = await fetch('/api/token');
+ if (response.ok) {
+ const data = await response.json();
+ if (data.hasValidToken) {
+ setHasValidToken(true);
+ setAuthToken(data.token);
+ } else {
+ setHasValidToken(false);
+ setAuthToken(null);
+ }
+ } else {
+ setHasValidToken(false);
+ setAuthToken(null);
+ }
+ } catch (error) {
+ setHasValidToken(false);
+ setAuthToken(null);
+ }
+ }, []);
useEffect(() => {
async function fetchRecipes() {
@@ -53,23 +81,43 @@ export default function Home() {
}
}
- async function checkAuthStatus() {
- try {
- const response = await fetch('/api/token');
- if (response.ok) {
- const data = await response.json();
- if (data.hasValidToken) {
- setHasValidToken(true);
- }
+ fetchRecipes();
+ checkAuthStatus();
+ }, [checkAuthStatus]);
+
+ // Periodically check auth status every 30 seconds
+ useEffect(() => {
+ const interval = setInterval(checkAuthStatus, 30000);
+ return () => clearInterval(interval);
+ }, [checkAuthStatus]);
+
+ const clearToken = async () => {
+ setClearingToken(true);
+ try {
+ const response = await fetch('/api/token/clear', {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json'
+ },
+ body: JSON.stringify({ clearAll: true })
+ });
+
+ if (response.ok) {
+ setHasValidToken(false);
+ setAuthToken(null);
+ // If auth modal is open, close it to trigger form reset on next open
+ if (showAuthModal) {
+ setShowAuthModal(false);
}
- } catch (error) {
- // Ignore errors - just means no token is cached
+ } else {
+ console.error('Failed to clear token');
}
+ } catch (error) {
+ console.error('Error clearing token:', error);
+ } finally {
+ setClearingToken(false);
}
-
- fetchRecipes();
- checkAuthStatus();
- }, []);
+ };
if (loading) {
return (
@@ -109,74 +157,138 @@ export default function Home() {