Skip to content

Commit d164842

Browse files
authored
Merge pull request #74 from databricks-solutions/73-improving-installsh-experience
73 improving installsh experience
2 parents 64d13c1 + 8bffe6f commit d164842

4 files changed

Lines changed: 447 additions & 75 deletions

File tree

app.yaml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,16 @@ env:
9898
- name: GSO_WAREHOUSE_ID
9999
valueFrom: sql-warehouse
100100

101+
# ---------------------------------------------------------------------------
102+
# Resources
103+
# ---------------------------------------------------------------------------
104+
resources:
105+
- name: sql-warehouse
106+
description: "SQL Warehouse for queries and catalog discovery"
107+
sql_warehouse:
108+
id: "__WAREHOUSE_ID__"
109+
permission: CAN_USE
110+
101111
# ---------------------------------------------------------------------------
102112
# Deployment
103113
# ---------------------------------------------------------------------------

scripts/deploy-config.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
# GENIE_APP_NAME (optional) Databricks App name [default: genie-workbench]
1313
# GENIE_DEPLOY_PROFILE (optional) Databricks CLI profile [default: DEFAULT]
1414
# GENIE_LLM_MODEL (optional) LLM serving endpoint [default: databricks-claude-sonnet-4-6]
15-
# GENIE_LAKEBASE_INSTANCE (optional) Lakebase instance name [default: <app-name>]
15+
# GENIE_LAKEBASE_INSTANCE (optional) Lakebase instance name [default: none]
1616
# GENIE_MLFLOW_EXPERIMENT_ID (optional) MLflow experiment ID for agent tracing [default: disabled]
1717
#
1818
# After sourcing, the following variables are available:
@@ -36,7 +36,7 @@ GSO_SCHEMA="genie_space_optimizer" # Fixed default — matches GSO convention
3636
WAREHOUSE_ID="${GENIE_WAREHOUSE_ID:-}"
3737
PROFILE="${GENIE_DEPLOY_PROFILE:-DEFAULT}"
3838
LLM_MODEL="${GENIE_LLM_MODEL:-databricks-claude-sonnet-4-6}"
39-
LAKEBASE_INSTANCE="${GENIE_LAKEBASE_INSTANCE:-$APP_NAME}"
39+
LAKEBASE_INSTANCE="${GENIE_LAKEBASE_INSTANCE:-}"
4040
MLFLOW_EXPERIMENT_ID="${GENIE_MLFLOW_EXPERIMENT_ID:-}"
4141

4242
# ── Validate required values ─────────────────────────────────────────────

scripts/deploy.sh

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -442,6 +442,7 @@ echo "▸ Step $STEP/$TOTAL_STEPS: Redeploying app with freshest code..."
442442
echo " Patching app.yaml on workspace with GSO config..."
443443
PATCHED_APP_YAML="/tmp/app.yaml.patched"
444444
cp "$PROJECT_DIR/app.yaml" "$PATCHED_APP_YAML"
445+
sed -i.bak "s|__WAREHOUSE_ID__|$WAREHOUSE_ID|" "$PATCHED_APP_YAML"
445446
sed -i.bak "s|__GSO_CATALOG__|$CATALOG|" "$PATCHED_APP_YAML"
446447
sed -i.bak "s|__LAKEBASE_INSTANCE__|$LAKEBASE_INSTANCE|" "$PATCHED_APP_YAML"
447448
sed -i.bak "s|__LLM_MODEL__|$LLM_MODEL|" "$PATCHED_APP_YAML"
@@ -461,7 +462,7 @@ fi
461462

462463
databricks workspace import "$WS_PATH/app.yaml" \
463464
--profile "$PROFILE" --file "$PATCHED_APP_YAML" --format AUTO --overwrite 2>/dev/null && \
464-
echo " ✓ app.yaml patched (GSO_CATALOG=$CATALOG, GSO_JOB_ID=${JOB_ID:-<none>}, LAKEBASE_INSTANCE=$LAKEBASE_INSTANCE, LLM_MODEL=$LLM_MODEL, MLFLOW=${MLFLOW_EXPERIMENT_ID:-<disabled>})" || \
465+
echo " ✓ app.yaml patched (WAREHOUSE=$WAREHOUSE_ID, GSO_CATALOG=$CATALOG, GSO_JOB_ID=${JOB_ID:-<none>}, LAKEBASE_INSTANCE=$LAKEBASE_INSTANCE, LLM_MODEL=$LLM_MODEL, MLFLOW=${MLFLOW_EXPERIMENT_ID:-<disabled>})" || \
465466
echo " ⚠ Could not patch app.yaml — config may not be set"
466467

467468
# Sync _metadata.py — required at runtime for the genie_space_optimizer
@@ -550,17 +551,20 @@ except Exception: pass
550551
fi
551552
fi
552553

553-
# ── Set app scopes + resources, then deploy ──────────────────────────────
554-
# Merge existing resources (e.g. manually-added Lakebase) with required ones.
554+
# ── Configure app scopes and resources ───────────────────────────────────
555+
# The PATCH API is the mechanism that configures both user_api_scopes and
556+
# resources on a Databricks App. app.yaml user_api_scopes are documentation
557+
# only; apps deploy does not apply them.
555558
echo " Configuring app scopes and resources..."
556559
EXISTING_RESOURCES=$(databricks apps get "$APP_NAME" --profile "$PROFILE" -o json 2>/dev/null \
557560
| python3 -c "import sys,json; print(json.dumps(json.load(sys.stdin).get('resources',[])))" 2>/dev/null || echo "[]")
558561

559562
PATCH_PAYLOAD=$(python3 -c "
560563
import json
564+
561565
scopes = ['sql', 'dashboards.genie', 'serving.serving-endpoints',
562-
'catalog.catalogs:read', 'catalog.schemas:read',
563-
'catalog.tables:read', 'files.files']
566+
'catalog.catalogs:read', 'catalog.schemas:read',
567+
'catalog.tables:read', 'files.files']
564568
565569
# Start with existing resources. The PATCH API replaces all resources,
566570
# so we must include everything. Preserve all resources that either have
@@ -596,7 +600,7 @@ print(json.dumps({'user_api_scopes': scopes, 'resources': list(by_name.values())
596600
")
597601
databricks api patch "/api/2.0/apps/$APP_NAME" \
598602
--profile "$PROFILE" --json "$PATCH_PAYLOAD" 2>/dev/null && \
599-
echo " ✓ App scopes and resources configured" || \
603+
echo " ✓ App scopes and resources configured (sql-warehouse: $WAREHOUSE_ID)" || \
600604
echo " ⚠ Could not configure app scopes/resources"
601605

602606
databricks apps deploy "$APP_NAME" --profile "$PROFILE" \

0 commit comments

Comments
 (0)