Skip to content

Commit b885ffb

Browse files
committed
refactor(api, web): update environment variable bindings and enhance database reset commands for improved resource management
1 parent 7f30ae6 commit b885ffb

9 files changed

Lines changed: 34 additions & 49 deletions

File tree

apps/api/package.json

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,13 +12,13 @@
1212
"fix": "eslint --fix . && pnpm run format",
1313
"format": "prettier --write .",
1414
"format:check": "prettier --check .",
15-
"db:reset": "wrangler d1 execute DB --local --command=\"DROP TABLE IF EXISTS executions; DROP TABLE IF EXISTS cron_triggers; DROP TABLE IF EXISTS deployments; DROP TABLE IF EXISTS memberships; DROP TABLE IF EXISTS api_keys; DROP TABLE IF EXISTS workflows; DROP TABLE IF EXISTS users; DROP TABLE IF EXISTS organizations; DROP TABLE IF EXISTS d1_migrations;\"",
15+
"db:reset": "wrangler d1 execute DB --local --command=\"DROP TABLE IF EXISTS datasets; DROP TABLE IF EXISTS executions; DROP TABLE IF EXISTS cron_triggers; DROP TABLE IF EXISTS deployments; DROP TABLE IF EXISTS memberships; DROP TABLE IF EXISTS api_keys; DROP TABLE IF EXISTS workflows; DROP TABLE IF EXISTS users; DROP TABLE IF EXISTS organizations; DROP TABLE IF EXISTS d1_migrations;\"",
1616
"db:migrate": "wrangler d1 migrations apply DB --local",
17-
"db:preview:reset": "wrangler d1 execute DB --remote --preview --command=\"DROP TABLE IF EXISTS executions; DROP TABLE IF EXISTS cron_triggers; DROP TABLE IF EXISTS deployments; DROP TABLE IF EXISTS memberships; DROP TABLE IF EXISTS api_keys; DROP TABLE IF EXISTS workflows; DROP TABLE IF EXISTS users; DROP TABLE IF EXISTS organizations; DROP TABLE IF EXISTS d1_migrations;\"",
17+
"db:preview:reset": "wrangler d1 execute DB --remote --preview --command=\"DROP TABLE IF EXISTS datasets; DROP TABLE IF EXISTS executions; DROP TABLE IF EXISTS cron_triggers; DROP TABLE IF EXISTS deployments; DROP TABLE IF EXISTS memberships; DROP TABLE IF EXISTS api_keys; DROP TABLE IF EXISTS workflows; DROP TABLE IF EXISTS users; DROP TABLE IF EXISTS organizations; DROP TABLE IF EXISTS d1_migrations;\"",
1818
"db:preview:migrate": "wrangler d1 migrations apply DB --remote --preview",
1919
"db:generate": "drizzle-kit generate",
2020
"db:prod:migrate": "wrangler d1 migrations apply DB --remote --env production",
21-
"db:prod:reset": "wrangler d1 execute DB --remote --env production --command=\"DROP TABLE IF EXISTS executions; DROP TABLE IF EXISTS cron_triggers; DROP TABLE IF EXISTS deployments; DROP TABLE IF EXISTS memberships; DROP TABLE IF EXISTS api_keys; DROP TABLE IF EXISTS workflows; DROP TABLE IF EXISTS users; DROP TABLE IF EXISTS organizations; DROP TABLE IF EXISTS d1_migrations;\" && wrangler d1 migrations apply DB --remote --env production"
21+
"db:prod:reset": "wrangler d1 execute DB --remote --env production --command=\"DROP TABLE IF EXISTS datasets; DROP TABLE IF EXISTS executions; DROP TABLE IF EXISTS cron_triggers; DROP TABLE IF EXISTS deployments; DROP TABLE IF EXISTS memberships; DROP TABLE IF EXISTS api_keys; DROP TABLE IF EXISTS workflows; DROP TABLE IF EXISTS users; DROP TABLE IF EXISTS organizations; DROP TABLE IF EXISTS d1_migrations;\" && wrangler d1 migrations apply DB --remote --env production"
2222
},
2323
"devDependencies": {
2424
"@cloudflare/puppeteer": "^1.0.2",

apps/api/src/context.ts

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,11 +8,10 @@ import { RuntimeParams } from "./runtime/runtime";
88
export interface Bindings {
99
DB: D1Database;
1010
EXECUTE: Workflow<RuntimeParams>;
11-
BUCKET: R2Bucket;
11+
RESSOURCES: R2Bucket;
1212
DATASETS: R2Bucket;
1313
DATASETS_AUTORAG: string;
1414
AI: Ai;
15-
KV: KVNamespace;
1615
BROWSER: Fetcher;
1716
COMPUTE: AnalyticsEngineDataset;
1817
WEB_HOST: string;

apps/api/src/routes/objects.ts

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ objectRoutes.get("/", apiKeyOrJwtMiddleware, async (c) => {
3434
}
3535

3636
try {
37-
const objectStore = new ObjectStore(c.env.BUCKET);
37+
const objectStore = new ObjectStore(c.env.RESSOURCES);
3838
const reference: ObjectReference = { id: objectId, mimeType };
3939
const result = await objectStore.readObject(reference);
4040

@@ -111,7 +111,7 @@ objectRoutes.post("/", jwtMiddleware, async (c) => {
111111
}
112112

113113
try {
114-
const objectStore = new ObjectStore(c.env.BUCKET);
114+
const objectStore = new ObjectStore(c.env.RESSOURCES);
115115
const buffer = await file.arrayBuffer();
116116
const data = new Uint8Array(buffer);
117117
const reference = await objectStore.writeObject(
@@ -142,7 +142,7 @@ objectRoutes.delete("/:id", jwtMiddleware, async (c) => {
142142
}
143143

144144
try {
145-
const objectStore = new ObjectStore(c.env.BUCKET);
145+
const objectStore = new ObjectStore(c.env.RESSOURCES);
146146
const reference: ObjectReference = { id: objectId, mimeType };
147147

148148
// Check if the object exists and if the user has permission to delete it
@@ -186,7 +186,7 @@ objectRoutes.get("/metadata/:id", jwtMiddleware, async (c) => {
186186
}
187187

188188
try {
189-
const objectStore = new ObjectStore(c.env.BUCKET);
189+
const objectStore = new ObjectStore(c.env.RESSOURCES);
190190
const reference: ObjectReference = { id: objectId, mimeType };
191191

192192
const result = await objectStore.readObject(reference);
@@ -232,7 +232,7 @@ objectRoutes.get("/list", jwtMiddleware, async (c) => {
232232
}
233233

234234
try {
235-
const objectStore = new ObjectStore(c.env.BUCKET);
235+
const objectStore = new ObjectStore(c.env.RESSOURCES);
236236
const objectList = await objectStore.listObjects(organizationId);
237237

238238
const response: ListObjectsResponse = { objects: objectList };

apps/api/src/routes/public.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ publicRoutes.get("/objects", async (c) => {
3030
}
3131

3232
try {
33-
const objectStore = new ObjectStore(c.env.BUCKET);
33+
const objectStore = new ObjectStore(c.env.RESSOURCES);
3434
const reference: ObjectReference = { id: objectId, mimeType };
3535
const result = await objectStore.readObject(reference);
3636

@@ -139,7 +139,7 @@ publicRoutes.get("/images/:key", async (c) => {
139139
const key = c.req.param("key");
140140

141141
try {
142-
const object = await c.env.BUCKET.get("images/" + key);
142+
const object = await c.env.RESSOURCES.get("images/" + key);
143143
const mimeType = object?.httpMetadata?.contentType;
144144

145145
if (!object || !mimeType) {

apps/api/src/runtime/runtime.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -472,7 +472,7 @@ export class Runtime extends WorkflowEntrypoint<Bindings, RuntimeParams> {
472472
if (!node) throw new Error(`Node ${nodeIdentifier} not found`);
473473

474474
const processed: Record<string, unknown> = {};
475-
const objectStore = new ObjectStore(this.env.BUCKET);
475+
const objectStore = new ObjectStore(this.env.RESSOURCES);
476476

477477
for (const definition of node.inputs) {
478478
const { name, type, required } = definition;
@@ -516,7 +516,7 @@ export class Runtime extends WorkflowEntrypoint<Bindings, RuntimeParams> {
516516
if (!node) throw new Error(`Node ${nodeIdentifier} not found`);
517517

518518
const processed: Record<string, unknown> = {};
519-
const objectStore = new ObjectStore(this.env.BUCKET);
519+
const objectStore = new ObjectStore(this.env.RESSOURCES);
520520

521521
for (const definition of node.outputs) {
522522
const { name, type } = definition;

apps/api/src/utils/og-image-generator.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ export async function generateExecutionOgImage({
4242
});
4343

4444
const key = `images/og-execution-${executionId}.jpeg`;
45-
await env.BUCKET.put(key, screenshotBuffer, {
45+
await env.RESSOURCES.put(key, screenshotBuffer, {
4646
httpMetadata: {
4747
contentType: "image/jpeg",
4848
cacheControl: "public, max-age=31536000",

apps/api/wrangler.jsonc

Lines changed: 18 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -33,42 +33,33 @@
3333
"d1_databases": [
3434
{
3535
"binding": "DB",
36-
"database_name": "workflow-development",
37-
"database_id": "a89e76fb-ac20-49fc-a797-6a0f08a1dfd7",
38-
// TODO: Update this to the correct preview database id
39-
"preview_database_id": "01b80cb4-7115-47e0-a26c-f7970e1a6605",
36+
"database_name": "dafthunk-development",
37+
"database_id": "05b1b8e5-d566-47b8-9085-161de45bbd25",
38+
"preview_database_id": "05b1b8e5-d566-47b8-9085-161de45bbd25",
4039
"migrations_dir": "./src/db/migrations",
4140
},
4241
],
4342
"r2_buckets": [
4443
{
45-
"binding": "BUCKET",
46-
"bucket_name": "workflow-development",
47-
// TODO: Update this to the correct preview bucket name
48-
"preview_bucket_name": "workflow-development-preview",
44+
"binding": "RESSOURCES",
45+
"bucket_name": "dafthunk-ressources-development",
46+
"preview_bucket_name": "dafthunk-ressources-development",
4947
},
5048
{
5149
"binding": "DATASETS",
52-
"bucket_name": "dafthunk-datasets",
53-
},
54-
],
55-
"kv_namespaces": [
56-
{
57-
"binding": "KV",
58-
"id": "868c7fd3732e42c4ad1c6f65939d1bfc",
59-
// TODO: Update this to the correct preview kv namespace id
60-
"preview_id": "e1547dbff039449eb2eb89dbc9a0376c",
50+
"bucket_name": "dafthunk-datasets-development",
51+
"preview_bucket_name": "dafthunk-datasets-development",
6152
},
6253
],
6354
"workflows": [
6455
{
65-
"name": "workflows-starter",
56+
"name": "dafthunk-api",
6657
"binding": "EXECUTE",
6758
"class_name": "Runtime",
6859
},
6960
],
7061
"analytics_engine_datasets": [
71-
{ "binding": "COMPUTE", "dataset": "dafthunk-compute" },
62+
{ "binding": "COMPUTE", "dataset": "dafthunk-compute-development" },
7263
],
7364

7465
/**
@@ -88,7 +79,7 @@
8879
},
8980
"workflows": [
9081
{
91-
"name": "workflows-starter",
82+
"name": "dafthunk-api",
9283
"binding": "EXECUTE",
9384
"class_name": "Runtime",
9485
},
@@ -102,31 +93,24 @@
10293
"d1_databases": [
10394
{
10495
"binding": "DB",
105-
"database_name": "workflow-production",
106-
"database_id": "9c1418f1-4099-4e42-9143-6d4ee2069f60",
107-
"preview_database_id": "9c1418f1-4099-4e42-9143-6d4ee2069f60",
96+
"database_name": "dafthunk-production",
97+
"database_id": "1d40113a-d6ec-4a46-8ba8-7958ee15b79a",
98+
"preview_database_id": "1d40113a-d6ec-4a46-8ba8-7958ee15b79a",
10899
"migrations_dir": "./src/db/migrations",
109100
},
110101
],
111102
"r2_buckets": [
112103
{
113-
"binding": "BUCKET",
114-
"bucket_name": "workflow-production",
104+
"binding": "RESSOURCES",
105+
"bucket_name": "dafthunk-ressources-production",
115106
},
116107
{
117108
"binding": "DATASETS",
118-
"bucket_name": "dafthunk-datasets",
119-
},
120-
],
121-
"kv_namespaces": [
122-
{
123-
"binding": "KV",
124-
"id": "dd17ff67a75c41768dc30078eec37c46",
125-
"preview_id": "dd17ff67a75c41768dc30078eec37c46",
109+
"bucket_name": "dafthunk-datasets-production",
126110
},
127111
],
128112
"analytics_engine_datasets": [
129-
{ "binding": "COMPUTE", "dataset": "dafthunk-compute" },
113+
{ "binding": "COMPUTE", "dataset": "dafthunk-compute-production" },
130114
],
131115
"vars": {
132116
"WEB_HOST": "https://www.dafthunk.com",

apps/web/package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
"fix": "eslint --fix . && pnpm run format",
1212
"test": "vitest run",
1313
"dev": "tsx watch server.ts",
14+
"dev:preview": "tsx watch server.ts",
1415
"preview": "pnpm run build && wrangler pages dev ./dist --port 3000",
1516
"deploy": "wrangler pages deploy ./dist",
1617
"format": "prettier --write .",

package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
"preinstall": "npx only-allow pnpm",
77
"scripts": {
88
"dev": "pnpm --parallel dev",
9+
"dev:preview": "pnpm --parallel dev:preview",
910
"format": "pnpm --parallel format",
1011
"lint": "pnpm run typecheck && pnpm --parallel lint && pnpm run knip",
1112
"typecheck": "pnpm --parallel typecheck",

0 commit comments

Comments
 (0)