Skip to content
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
Show all changes
72 commits
Select commit Hold shift + click to select a range
9e41309
added external-db-sync
aadesh18 Nov 30, 2025
3879001
stopped using yupArray
aadesh18 Nov 30, 2025
937db90
fixed build errors
aadesh18 Nov 30, 2025
3f2a8ef
Revert "fixed build errors"
aadesh18 Nov 30, 2025
dcda402
Update apps/backend/scripts/run-cron-jobs.ts
aadesh18 Dec 1, 2025
9e954b9
Update apps/backend/src/app/api/latest/internal/external-db-sync/poll…
aadesh18 Dec 1, 2025
f0cc311
Update apps/backend/src/app/api/latest/internal/external-db-sync/poll…
aadesh18 Dec 1, 2025
1d7a0de
pr changes
aadesh18 Dec 2, 2025
8c2d1c3
merge dev
BilalG1 Jan 28, 2026
409d89b
Merge remote-tracking branch 'origin/dev' into external-db-sync
BilalG1 Jan 29, 2026
0d2b3b9
fix pnpm lock
BilalG1 Jan 29, 2026
ef8f74e
fix typecheck
BilalG1 Jan 29, 2026
8cdd107
prisma fix
BilalG1 Jan 29, 2026
871fe12
fix tests
BilalG1 Jan 29, 2026
5d6bde2
fix lint
BilalG1 Jan 29, 2026
c4ed09a
fix
BilalG1 Jan 29, 2026
b102db3
fix tests
BilalG1 Jan 29, 2026
2eeb537
fix tests
BilalG1 Jan 29, 2026
c61a2b9
fix test
BilalG1 Jan 30, 2026
ac261d2
increase session test time
BilalG1 Jan 30, 2026
0c1a02e
fix flaky test and external sot sync
BilalG1 Jan 30, 2026
79e330f
comment resolving
BilalG1 Jan 30, 2026
939b1a9
resolve pr comments
BilalG1 Jan 30, 2026
2974c83
retry
BilalG1 Jan 30, 2026
0799a0b
attempt test fixes
BilalG1 Jan 30, 2026
1f67742
fix env
BilalG1 Jan 30, 2026
bd788b4
fix env
BilalG1 Jan 30, 2026
4b7dd53
attempt test fixes
BilalG1 Jan 30, 2026
044377e
fix tests
BilalG1 Jan 30, 2026
744b871
Merge branch 'dev' into external-db-sync
BilalG1 Jan 30, 2026
74c634b
fix lint
BilalG1 Jan 30, 2026
ce5a1bb
fix tests
BilalG1 Jan 30, 2026
2c5440b
fix tests
BilalG1 Jan 30, 2026
f726f61
Merge remote-tracking branch 'origin/dev' into external-db-sync
BilalG1 Jan 31, 2026
665c084
fix tests
BilalG1 Jan 31, 2026
85bb893
Merge branch 'dev' into external-db-sync
BilalG1 Jan 31, 2026
856aaf3
fix tests
BilalG1 Jan 31, 2026
8f3ad45
m
BilalG1 Jan 31, 2026
fe393e9
merge dev
BilalG1 Feb 2, 2026
b9c0ef4
fix lockfile
BilalG1 Feb 2, 2026
b5781a1
Merge remote-tracking branch 'origin/dev' into external-db-sync
BilalG1 Feb 2, 2026
b841f7b
fix tests
BilalG1 Feb 2, 2026
2e0d8de
m
BilalG1 Feb 2, 2026
e5cbc1a
Merge branch 'dev' into external-db-sync
BilalG1 Feb 2, 2026
ba1df26
fix tests
BilalG1 Feb 2, 2026
093eaf0
e2e: isolate external DB sync cleanup per suite (#1148)
BilalG1 Feb 3, 2026
5192875
CI: run dev tests single-worker (#1153)
BilalG1 Feb 3, 2026
686a1e6
Merge branch 'dev' into external-db-sync
BilalG1 Feb 3, 2026
8910138
fixes
BilalG1 Feb 3, 2026
62171dc
single test run
BilalG1 Feb 3, 2026
1038d1a
Merge branch 'dev' into external-db-sync
BilalG1 Feb 3, 2026
3370e63
test fixes (#1155)
BilalG1 Feb 3, 2026
d04e944
concurrent fix
BilalG1 Feb 3, 2026
61f2b79
Merge branch 'external-db-sync' of https://github.com/stack-auth/stac…
BilalG1 Feb 3, 2026
bbac70e
Merge branch 'dev' into external-db-sync
BilalG1 Feb 3, 2026
8e92205
fix routes, external-db dashboard
BilalG1 Feb 4, 2026
3b9c22e
merge dev
BilalG1 Feb 4, 2026
c0a3f7a
replace trigger
BilalG1 Feb 4, 2026
d34a2c7
fusebox
BilalG1 Feb 4, 2026
c91998e
add tracing
BilalG1 Feb 4, 2026
64d9d93
remove old tests
BilalG1 Feb 4, 2026
7fd7886
fix sot test
BilalG1 Feb 4, 2026
719d1c2
Merge remote-tracking branch 'origin/dev' into external-db-sync
BilalG1 Feb 4, 2026
04970c2
Update vercel.json
N2D4 Feb 4, 2026
4abd410
Various fixes
N2D4 Feb 4, 2026
8247481
test: disable direct external db sync in CI
BilalG1 Feb 5, 2026
362e1fb
merge dev
BilalG1 Feb 5, 2026
98d451d
increase test timeout
BilalG1 Feb 5, 2026
8be639d
Merge branch 'dev' into external-db-sync
BilalG1 Feb 5, 2026
caa3dca
remove fusebox sync engine col
BilalG1 Feb 5, 2026
445e889
Merge branch 'external-db-sync' of https://github.com/stack-auth/stac…
BilalG1 Feb 5, 2026
63df87a
fix test
BilalG1 Feb 5, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions apps/backend/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
"typecheck": "tsc --noEmit",
"with-env": "dotenv -c development --",
"with-env:prod": "dotenv -c --",
"dev": "concurrently -n \"dev,codegen,prisma-studio\" -k \"next dev --turbopack --port ${NEXT_PUBLIC_STACK_PORT_PREFIX:-81}02\" \"pnpm run codegen:watch\" \"pnpm run prisma-studio\"",
"dev": "concurrently -n \"dev,codegen,prisma-studio,cron-jobs\" -k \"next dev --turbopack --port ${NEXT_PUBLIC_STACK_PORT_PREFIX:-81}02\" \"pnpm run codegen:watch\" \"pnpm run prisma-studio\" \"pnpm run run-cron-jobs\"",
"build": "pnpm run codegen && next build",
"docker-build": "pnpm run codegen && next build --experimental-build-mode compile",
"build-self-host-migration-script": "tsup --config scripts/db-migrations.tsup.config.ts",
Expand Down Expand Up @@ -35,7 +35,8 @@
"generate-openapi-fumadocs": "pnpm run with-env tsx scripts/generate-openapi-fumadocs.ts",
"generate-keys": "pnpm run with-env tsx scripts/generate-keys.ts",
"db-seed-script": "pnpm run db:seed",
"verify-data-integrity": "pnpm run with-env tsx scripts/verify-data-integrity.ts"
"verify-data-integrity": "pnpm run with-env tsx scripts/verify-data-integrity.ts",
"run-cron-jobs": "pnpm run with-env tsx scripts/run-cron-jobs.ts"
},
"prisma": {
"seed": "pnpm run db-seed-script"
Expand Down
Comment thread
BilalG1 marked this conversation as resolved.
Outdated
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
CREATE SEQUENCE global_seq_id
AS BIGINT
START 1
INCREMENT BY 11
NO MINVALUE
NO MAXVALUE;

-- SPLIT_STATEMENT_SENTINEL
ALTER TABLE "ContactChannel" ADD COLUMN "sequenceId" BIGINT;

-- SPLIT_STATEMENT_SENTINEL
ALTER TABLE "ProjectUser" ADD COLUMN "sequenceId" BIGINT;

-- SPLIT_STATEMENT_SENTINEL
CREATE UNIQUE INDEX "ContactChannel_sequenceId_key" ON "ContactChannel"("sequenceId");

-- SPLIT_STATEMENT_SENTINEL
CREATE UNIQUE INDEX "ProjectUser_sequenceId_key" ON "ProjectUser"("sequenceId");
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@

CREATE TABLE "OutgoingRequest" (
"id" UUID NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"qstashOptions" JSONB NOT NULL,
"fulfilledAt" TIMESTAMP(3),

CONSTRAINT "OutgoingRequest_pkey" PRIMARY KEY ("id")
);


CREATE INDEX "OutgoingRequest_fulfilledAt_idx" ON "OutgoingRequest"("fulfilledAt");

Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
CREATE TABLE "DeletedRow" (
"id" UUID NOT NULL,
"tenancyId" UUID NOT NULL,
"tableName" TEXT NOT NULL,
"sequenceId" BIGINT,
"primaryKey" JSONB NOT NULL,
"data" JSONB,
"deletedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"fulfilledAt" TIMESTAMP(3),

CONSTRAINT "DeletedRow_pkey" PRIMARY KEY ("id")
);
Comment thread
BilalG1 marked this conversation as resolved.
Outdated


CREATE UNIQUE INDEX "DeletedRow_sequenceId_key" ON "DeletedRow"("sequenceId");

CREATE INDEX "DeletedRow_tableName_idx" ON "DeletedRow"("tableName");

CREATE INDEX "DeletedRow_tenancyId_idx" ON "DeletedRow"("tenancyId");


Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
ALTER TABLE "ProjectUser" ADD COLUMN "shouldUpdateSequenceId" BOOLEAN NOT NULL DEFAULT TRUE;

-- SPLIT_STATEMENT_SENTINEL
ALTER TABLE "ContactChannel" ADD COLUMN "shouldUpdateSequenceId" BOOLEAN NOT NULL DEFAULT TRUE;

-- SPLIT_STATEMENT_SENTINEL
ALTER TABLE "DeletedRow" ADD COLUMN "shouldUpdateSequenceId" BOOLEAN NOT NULL DEFAULT TRUE;

Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
CREATE INDEX "ProjectUser_shouldUpdateSequenceId_idx" ON "ProjectUser"("shouldUpdateSequenceId") WHERE "shouldUpdateSequenceId" = TRUE;

-- SPLIT_STATEMENT_SENTINEL
CREATE INDEX "ContactChannel_shouldUpdateSequenceId_idx" ON "ContactChannel"("shouldUpdateSequenceId") WHERE "shouldUpdateSequenceId" = TRUE;

-- SPLIT_STATEMENT_SENTINEL
CREATE INDEX "DeletedRow_shouldUpdateSequenceId_idx" ON "DeletedRow"("shouldUpdateSequenceId") WHERE "shouldUpdateSequenceId" = TRUE;

Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
-- SINGLE_STATEMENT_SENTINEL
CREATE FUNCTION reset_sequence_id_on_update()
RETURNS TRIGGER AS $$
BEGIN
NEW."shouldUpdateSequenceId" := TRUE;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;

-- SPLIT_STATEMENT_SENTINEL
CREATE TRIGGER mark_should_update_sequence_id_project_user
BEFORE UPDATE ON "ProjectUser"
FOR EACH ROW
WHEN (OLD."shouldUpdateSequenceId" = FALSE)
EXECUTE FUNCTION reset_sequence_id_on_update();

-- SPLIT_STATEMENT_SENTINEL
CREATE TRIGGER mark_should_update_sequence_id_contact_channel
BEFORE UPDATE ON "ContactChannel"
FOR EACH ROW
WHEN (OLD."shouldUpdateSequenceId" = FALSE)
EXECUTE FUNCTION reset_sequence_id_on_update();

-- SPLIT_STATEMENT_SENTINEL
CREATE TRIGGER mark_should_update_sequence_id_deleted_row
BEFORE UPDATE ON "DeletedRow"
FOR EACH ROW
WHEN (OLD."shouldUpdateSequenceId" = FALSE)
EXECUTE FUNCTION reset_sequence_id_on_update();

Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
-- SINGLE_STATEMENT_SENTINEL
CREATE FUNCTION log_deleted_row()
RETURNS TRIGGER AS $function$
DECLARE
row_data jsonb;
pk jsonb := '{}'::jsonb;
col record;
BEGIN
row_data := to_jsonb(OLD);

FOR col IN
SELECT a.attname
FROM pg_index i
JOIN pg_attribute a ON a.attrelid = i.indrelid AND a.attnum = ANY(i.indkey)
WHERE i.indrelid = TG_RELID
AND i.indisprimary
LOOP
pk := pk || jsonb_build_object(col.attname, row_data -> col.attname);
END LOOP;

INSERT INTO "DeletedRow" (
"id",
"tenancyId",
"tableName",
"primaryKey",
"data",
"deletedAt",
"shouldUpdateSequenceId"
)
VALUES (
gen_random_uuid(),
OLD."tenancyId",
TG_TABLE_NAME,
pk,
row_data,
NOW(),
TRUE
);

RETURN OLD;
END;
$function$ LANGUAGE plpgsql;

-- SPLIT_STATEMENT_SENTINEL
CREATE TRIGGER log_deleted_row_project_user
BEFORE DELETE ON "ProjectUser"
FOR EACH ROW
EXECUTE FUNCTION log_deleted_row();

-- SPLIT_STATEMENT_SENTINEL
CREATE TRIGGER log_deleted_row_contact_channel
BEFORE DELETE ON "ContactChannel"
FOR EACH ROW
EXECUTE FUNCTION log_deleted_row();

Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
-- SINGLE_STATEMENT_SENTINEL
CREATE FUNCTION enqueue_tenant_sync(p_tenant_id uuid)
RETURNS void AS $$
BEGIN
INSERT INTO "OutgoingRequest" ("id", "createdAt", "qstashOptions", "fulfilledAt")
SELECT
gen_random_uuid(),
NOW(),
json_build_object(
'url', '/api/latest/internal/external-db-sync/sync-engine',
'body', json_build_object('tenantId', p_tenant_id)
),
NULL
WHERE NOT EXISTS (
SELECT 1
FROM "OutgoingRequest"
WHERE "fulfilledAt" IS NULL
AND ("qstashOptions"->'body'->>'tenantId')::uuid = p_tenant_id
);
END;
$$ LANGUAGE plpgsql;
-- SPLIT_STATEMENT_SENTINEL
-- SINGLE_STATEMENT_SENTINEL
CREATE FUNCTION backfill_null_sequence_ids()
Comment thread
BilalG1 marked this conversation as resolved.
Outdated
RETURNS void AS $$
DECLARE
v_tenancy_id uuid;
BEGIN
FOR v_tenancy_id IN
WITH rows_to_update AS (
SELECT "tenancyId", "projectUserId"
FROM "ProjectUser"
WHERE "shouldUpdateSequenceId" = TRUE
OR "sequenceId" IS NULL
LIMIT 1000
FOR UPDATE SKIP LOCKED
),
updated_rows AS (
UPDATE "ProjectUser" pu
SET "sequenceId" = nextval('global_seq_id'),
"shouldUpdateSequenceId" = FALSE
FROM rows_to_update r
WHERE pu."tenancyId" = r."tenancyId"
AND pu."projectUserId" = r."projectUserId"
RETURNING pu."tenancyId"
)
SELECT DISTINCT "tenancyId" FROM updated_rows
LOOP
PERFORM enqueue_tenant_sync(v_tenancy_id);
END LOOP;

FOR v_tenancy_id IN
WITH rows_to_update AS (
SELECT "tenancyId", "projectUserId", "id"
FROM "ContactChannel"
WHERE "shouldUpdateSequenceId" = TRUE
OR "sequenceId" IS NULL
LIMIT 1000
FOR UPDATE SKIP LOCKED
),
updated_rows AS (
UPDATE "ContactChannel" cc
SET "sequenceId" = nextval('global_seq_id'),
"shouldUpdateSequenceId" = FALSE
FROM rows_to_update r
WHERE cc."tenancyId" = r."tenancyId"
AND cc."projectUserId" = r."projectUserId"
AND cc."id" = r."id"
RETURNING cc."tenancyId"
)
SELECT DISTINCT "tenancyId" FROM updated_rows
LOOP
PERFORM enqueue_tenant_sync(v_tenancy_id);
END LOOP;

FOR v_tenancy_id IN
WITH rows_to_update AS (
SELECT "id", "tenancyId"
FROM "DeletedRow"
WHERE "shouldUpdateSequenceId" = TRUE
OR "sequenceId" IS NULL
LIMIT 1000
FOR UPDATE SKIP LOCKED
),
updated_rows AS (
UPDATE "DeletedRow" dr
SET "sequenceId" = nextval('global_seq_id'),
"shouldUpdateSequenceId" = FALSE
FROM rows_to_update r
WHERE dr."id" = r."id"
RETURNING dr."tenancyId"
)
SELECT DISTINCT "tenancyId" FROM updated_rows
LOOP
PERFORM enqueue_tenant_sync(v_tenancy_id);
END LOOP;

END;
$$ LANGUAGE plpgsql;

37 changes: 36 additions & 1 deletion apps/backend/prisma/schema.prisma
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,9 @@ model ProjectUser {
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt

sequenceId BigInt? @unique
shouldUpdateSequenceId Boolean @default(true)
Comment thread
BilalG1 marked this conversation as resolved.
Outdated

displayName String?
serverMetadata Json?
clientReadOnlyMetadata Json?
Expand Down Expand Up @@ -252,6 +255,9 @@ model ContactChannel {
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt

sequenceId BigInt? @unique
shouldUpdateSequenceId Boolean @default(true)
Comment thread
BilalG1 marked this conversation as resolved.
Outdated

type ContactChannelType
isPrimary BooleanTrue?
usedForAuth BooleanTrue?
Expand Down Expand Up @@ -861,7 +867,7 @@ model CacheEntry {

model SubscriptionInvoice {
id String @default(uuid()) @db.Uuid
tenancyId String @db.Uuid
tenancyId String @db.Uuid
stripeSubscriptionId String
stripeInvoiceId String
isSubscriptionCreationInvoice Boolean
Expand All @@ -874,3 +880,32 @@ model SubscriptionInvoice {
@@id([tenancyId, id])
@@unique([tenancyId, stripeInvoiceId])
}

model OutgoingRequest {
id String @id @default(uuid()) @db.Uuid

createdAt DateTime @default(now())

qstashOptions Json
fulfilledAt DateTime?

@@index([fulfilledAt])
Comment thread
BilalG1 marked this conversation as resolved.
Outdated
}

model DeletedRow {
id String @id @default(uuid()) @db.Uuid
tenancyId String @db.Uuid
tableName String

sequenceId BigInt? @unique
shouldUpdateSequenceId Boolean @default(true)
Comment thread
BilalG1 marked this conversation as resolved.
Outdated

primaryKey Json
data Json?

deletedAt DateTime @default(now())
fulfilledAt DateTime?

Comment thread
BilalG1 marked this conversation as resolved.
@@index([tableName])
@@index([tenancyId])
Comment thread
BilalG1 marked this conversation as resolved.
}
Comment thread
BilalG1 marked this conversation as resolved.
40 changes: 40 additions & 0 deletions apps/backend/scripts/run-cron-jobs.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import { getEnvVariable } from "@stackframe/stack-shared/dist/utils/env";
import { StackAssertionError } from "@stackframe/stack-shared/dist/utils/errors";
import { runAsynchronously } from "@stackframe/stack-shared/dist/utils/promises";
Comment thread
coderabbitai[bot] marked this conversation as resolved.
Outdated
Comment thread
vercel[bot] marked this conversation as resolved.
Outdated

const endpoints = [
"/api/latest/internal/external-db-sync/sequencer",
"/api/latest/internal/external-db-sync/poller",
];

async function main() {
console.log("Starting cron jobs...");
const cronSecret = getEnvVariable('CRON_SECRET');

const baseUrl = `http://localhost:${getEnvVariable('NEXT_PUBLIC_STACK_PORT_PREFIX', '81')}02`;

const run = (endpoint: string) => runAsynchronously(async () => {
console.log(`Running ${endpoint}...`);
const res = await fetch(`${baseUrl}${endpoint}`, {
headers: { 'Authorization': `Bearer ${cronSecret}` },
});
if (!res.ok) throw new StackAssertionError(`Failed to call ${endpoint}: ${res.status} ${res.statusText}\n${await res.text()}`, { res });
console.log(`${endpoint} completed.`);
});

for (const endpoint of endpoints) {
runAsynchronously(async () => {
while (true) {
run(endpoint);
Comment thread
BilalG1 marked this conversation as resolved.
Outdated
Comment thread
BilalG1 marked this conversation as resolved.
Outdated
// Vercel only guarantees minute-granularity for cron jobs, so we randomize the interval
await wait(Math.random() * 120_000);
}
});
}
Comment thread
aadesh18 marked this conversation as resolved.
Comment thread
BilalG1 marked this conversation as resolved.
}

// eslint-disable-next-line no-restricted-syntax
main().catch((err) => {
console.error(err);
process.exit(1);
});
Loading
Loading