Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 3 additions & 4 deletions internal/db/declarative/declarative.go
Original file line number Diff line number Diff line change
Expand Up @@ -235,10 +235,9 @@ func WriteDeclarativeSchemas(output diff.DeclarativeOutput, fsys afero.Fs) error
return err
}
}
// When pg-delta has its own config section, the declarative path is the single
// source of truth there; do not overwrite [db.migrations] schema_paths.
if utils.IsPgDeltaEnabled() && utils.Config.Experimental.PgDelta != nil &&
len(utils.Config.Experimental.PgDelta.DeclarativeSchemaPath) > 0 {
// When pg-delta is enabled, the declarative directory (default or configured)
// is the source of truth; do not overwrite [db.migrations] schema_paths.
if utils.IsPgDeltaEnabled() {
return nil
}
utils.Config.Db.Migrations.SchemaPaths = []string{
Expand Down
60 changes: 60 additions & 0 deletions internal/db/declarative/declarative_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,34 @@ func TestWriteDeclarativeSchemas(t *testing.T) {
assert.Contains(t, string(cfg), `"database"`)
}

func TestWriteDeclarativeSchemasSkipsConfigUpdateWhenPgDeltaEnabled(t *testing.T) {
fsys := afero.NewMemMapFs()
originalConfig := "[db]\n"
require.NoError(t, afero.WriteFile(fsys, utils.ConfigPath, []byte(originalConfig), 0644))
original := utils.Config.Experimental.PgDelta
utils.Config.Experimental.PgDelta = &config.PgDeltaConfig{Enabled: true}
t.Cleanup(func() {
utils.Config.Experimental.PgDelta = original
})

output := diff.DeclarativeOutput{
Files: []diff.DeclarativeFile{
{Path: "schemas/public/tables/users.sql", SQL: "create table users(id bigint);"},
},
}

err := WriteDeclarativeSchemas(output, fsys)
require.NoError(t, err)

users, err := afero.ReadFile(fsys, filepath.Join(utils.DeclarativeDir, "schemas", "public", "tables", "users.sql"))
require.NoError(t, err)
assert.Equal(t, "create table users(id bigint);", string(users))

cfg, err := afero.ReadFile(fsys, utils.ConfigPath)
require.NoError(t, err)
assert.Equal(t, originalConfig, string(cfg))
}

func TestTryCacheMigrationsCatalogWritesPrefixedCache(t *testing.T) {
fsys := afero.NewMemMapFs()
original := utils.Config.Experimental.PgDelta
Expand Down Expand Up @@ -146,6 +174,38 @@ func TestWriteDeclarativeSchemasUsesConfiguredDir(t *testing.T) {
assert.Contains(t, string(cfg), `db/decl`)
}

func TestWriteDeclarativeSchemasSkipsConfigUpdateForPgDeltaCustomDir(t *testing.T) {
fsys := afero.NewMemMapFs()
originalConfig := "[db]\n"
require.NoError(t, afero.WriteFile(fsys, utils.ConfigPath, []byte(originalConfig), 0644))
original := utils.Config.Experimental.PgDelta
utils.Config.Experimental.PgDelta = &config.PgDeltaConfig{
Enabled: true,
DeclarativeSchemaPath: filepath.Join(utils.SupabaseDirPath, "db", "decl"),
}
t.Cleanup(func() {
utils.Config.Experimental.PgDelta = original
})

output := diff.DeclarativeOutput{
Files: []diff.DeclarativeFile{
{Path: "cluster/roles.sql", SQL: "create role app;"},
},
}

err := WriteDeclarativeSchemas(output, fsys)
require.NoError(t, err)

rolesPath := filepath.Join(utils.SupabaseDirPath, "db", "decl", "cluster", "roles.sql")
roles, err := afero.ReadFile(fsys, rolesPath)
require.NoError(t, err)
assert.Equal(t, "create role app;", string(roles))

cfg, err := afero.ReadFile(fsys, utils.ConfigPath)
require.NoError(t, err)
assert.Equal(t, originalConfig, string(cfg))
}

func TestWriteDeclarativeSchemasRejectsUnsafePath(t *testing.T) {
// Export paths must stay within supabase/declarative to prevent traversal.
fsys := afero.NewMemMapFs()
Expand Down
9 changes: 8 additions & 1 deletion internal/db/diff/templates/pgdelta.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,14 @@ const target = Deno.env.get("TARGET");

const includedSchemas = Deno.env.get("INCLUDED_SCHEMAS");
if (includedSchemas) {
supabase.filter = { schema: includedSchemas.split(",") };
const schemas = includedSchemas.split(",");
const schemaFilter = {
or: [{ "*/schema": schemas }, { "schema/name": schemas }],
};
// CompositionPattern `and` is valid FilterDSL; Deno's structural typing is strict on `or` branches.
supabase.filter = {
and: [supabase.filter!, schemaFilter],
} as typeof supabase.filter;
}

const formatOptionsRaw = Deno.env.get("FORMAT_OPTIONS");
Expand Down
17 changes: 10 additions & 7 deletions internal/db/diff/templates/pgdelta_declarative_export.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,20 +22,23 @@ async function resolveInput(ref: string | undefined) {
const source = Deno.env.get("SOURCE");
const target = Deno.env.get("TARGET");
supabase.filter = {
// Also allow dropped extensions from migrations to be capted in the declarative schema export
// Also allow dropped extensions from migrations to be captured in the declarative schema export
// TODO: fix upstream bug into pgdelta supabase integration
or: [
...supabase.filter.or,
{ type: "extension", operation: "drop", scope: "object" },
...supabase.filter!.or!,
{ objectType: "extension", operation: "drop", scope: "object" },
],
};

const includedSchemas = Deno.env.get("INCLUDED_SCHEMAS");
if (includedSchemas) {
const schemaFilter = { schema: includedSchemas.split(",") };
supabase.filter = supabase.filter
? { and: [supabase.filter, schemaFilter] }
: schemaFilter;
const schemas = includedSchemas.split(",");
const schemaFilter = {
or: [{ "*/schema": schemas }, { "schema/name": schemas }],
};
supabase.filter = {
and: [supabase.filter!, schemaFilter],
} as unknown as typeof supabase.filter;
}

const formatOptionsRaw = Deno.env.get("FORMAT_OPTIONS");
Expand Down
29 changes: 21 additions & 8 deletions internal/pgdelta/apply.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,17 +18,27 @@ import (
//go:embed templates/pgdelta_declarative_apply.ts
var pgDeltaDeclarativeApplyScript string

// StuckStatement models a single statement that pg-delta could not apply,
// as emitted by pgdelta_declarative_apply.ts. The full statement payload is
// retained as a raw JSON object so future fields don't break parsing.
type StuckStatement struct {
Statement json.RawMessage `json:"statement"`
Code string `json:"code"`
Message string `json:"message"`
IsDependencyError bool `json:"isDependencyError"`
}

// ApplyResult models the JSON payload emitted by pgdelta_declarative_apply.ts.
//
// The fields are surfaced to provide concise CLI feedback after apply runs.
type ApplyResult struct {
Status string `json:"status"`
TotalStatements int `json:"totalStatements"`
TotalRounds int `json:"totalRounds"`
TotalApplied int `json:"totalApplied"`
TotalSkipped int `json:"totalSkipped"`
Errors []string `json:"errors"`
StuckStatements []string `json:"stuckStatements"`
Status string `json:"status"`
TotalStatements int `json:"totalStatements"`
TotalRounds int `json:"totalRounds"`
TotalApplied int `json:"totalApplied"`
TotalSkipped int `json:"totalSkipped"`
Errors []string `json:"errors"`
StuckStatements []StuckStatement `json:"stuckStatements"`
}

// ApplyDeclarative applies files from supabase/declarative to the target
Expand Down Expand Up @@ -71,7 +81,10 @@ func ApplyDeclarative(ctx context.Context, config pgconn.Config, fsys afero.Fs)
fmt.Fprintf(os.Stderr, "Errors: %v\n", result.Errors)
}
if len(result.StuckStatements) > 0 {
fmt.Fprintf(os.Stderr, "Stuck statements: %v\n", result.StuckStatements)
fmt.Fprintln(os.Stderr, "Stuck statements:")
for _, s := range result.StuckStatements {
fmt.Fprintf(os.Stderr, " - %s: %s\n", s.Code, s.Message)
}
}
return errors.Errorf("pg-delta declarative apply failed with status: %s", result.Status)
}
Expand Down
38 changes: 26 additions & 12 deletions pkg/config/auth.go
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,7 @@ type (
SigningKeysPath string `toml:"signing_keys_path" json:"signing_keys_path"`
SigningKeys []JWK `toml:"-" json:"-"`
Passkey *passkey `toml:"passkey" json:"passkey"`
Webauthn *webauthn `toml:"webauthn" json:"webauthn"`

RateLimit rateLimit `toml:"rate_limit" json:"rate_limit"`
Captcha *captcha `toml:"captcha" json:"captcha"`
Expand Down Expand Up @@ -380,7 +381,10 @@ type (
}

passkey struct {
Enabled bool `toml:"enabled" json:"enabled"`
Enabled bool `toml:"enabled" json:"enabled"`
}

webauthn struct {
RpDisplayName string `toml:"rp_display_name" json:"rp_display_name"`
RpId string `toml:"rp_id" json:"rp_id"`
RpOrigins []string `toml:"rp_origins" json:"rp_origins"`
Expand Down Expand Up @@ -418,6 +422,9 @@ func (a *auth) ToUpdateAuthConfigBody() v1API.UpdateAuthConfigBody {
if a.Passkey != nil {
a.Passkey.toAuthConfigBody(&body)
}
if a.Webauthn != nil {
a.Webauthn.toAuthConfigBody(&body)
}
a.Hook.toAuthConfigBody(&body)
a.MFA.toAuthConfigBody(&body)
a.Sessions.toAuthConfigBody(&body)
Expand All @@ -442,6 +449,7 @@ func (a *auth) FromRemoteAuthConfig(remoteConfig v1API.AuthConfigResponse) {
prc := ValOrDefault(remoteConfig.PasswordRequiredCharacters, "")
a.PasswordRequirements = NewPasswordRequirement(v1API.UpdateAuthConfigBodyPasswordRequiredCharacters(prc))
a.Passkey.fromAuthConfig(remoteConfig)
a.Webauthn.fromAuthConfig(remoteConfig)
a.RateLimit.fromAuthConfig(remoteConfig)
if s := a.Email.Smtp; s != nil && s.Enabled {
a.RateLimit.EmailSent = cast.IntToUint(ValOrDefault(remoteConfig.RateLimitEmailSent, 0))
Expand Down Expand Up @@ -502,27 +510,33 @@ func (c *captcha) fromAuthConfig(remoteConfig v1API.AuthConfigResponse) {
}

func (p passkey) toAuthConfigBody(body *v1API.UpdateAuthConfigBody) {
if body.PasskeyEnabled = cast.Ptr(p.Enabled); p.Enabled {
body.WebauthnRpDisplayName = nullable.NewNullableWithValue(p.RpDisplayName)
body.WebauthnRpId = nullable.NewNullableWithValue(p.RpId)
body.WebauthnRpOrigins = nullable.NewNullableWithValue(strings.Join(p.RpOrigins, ","))
}
body.PasskeyEnabled = cast.Ptr(p.Enabled)
}

func (p *passkey) fromAuthConfig(remoteConfig v1API.AuthConfigResponse) {
// When local config is not set, we assume platform defaults should not change
if p == nil {
return
}
// Ignore disabled passkey fields to minimise config diff
if p.Enabled {
p.RpDisplayName = ValOrDefault(remoteConfig.WebauthnRpDisplayName, "")
p.RpId = ValOrDefault(remoteConfig.WebauthnRpId, "")
p.RpOrigins = strToArr(ValOrDefault(remoteConfig.WebauthnRpOrigins, ""))
}
p.Enabled = remoteConfig.PasskeyEnabled
}

func (w webauthn) toAuthConfigBody(body *v1API.UpdateAuthConfigBody) {
body.WebauthnRpDisplayName = nullable.NewNullableWithValue(w.RpDisplayName)
body.WebauthnRpId = nullable.NewNullableWithValue(w.RpId)
body.WebauthnRpOrigins = nullable.NewNullableWithValue(strings.Join(w.RpOrigins, ","))
}

func (w *webauthn) fromAuthConfig(remoteConfig v1API.AuthConfigResponse) {
// When local config is not set, we assume platform defaults should not change
if w == nil {
return
}
w.RpDisplayName = ValOrDefault(remoteConfig.WebauthnRpDisplayName, "")
w.RpId = ValOrDefault(remoteConfig.WebauthnRpId, "")
w.RpOrigins = strToArr(ValOrDefault(remoteConfig.WebauthnRpOrigins, ""))
}

func (h hook) toAuthConfigBody(body *v1API.UpdateAuthConfigBody) {
// When local config is not set, we assume platform defaults should not change
if hook := h.BeforeUserCreated; hook != nil {
Expand Down
43 changes: 28 additions & 15 deletions pkg/config/auth_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -215,8 +215,8 @@ func TestCaptchaDiff(t *testing.T) {
func TestPasskeyConfigMapping(t *testing.T) {
t.Run("serializes passkey config to update body", func(t *testing.T) {
c := newWithDefaults()
c.Passkey = &passkey{
Enabled: true,
c.Passkey = &passkey{Enabled: true}
c.Webauthn = &webauthn{
RpDisplayName: "Supabase CLI",
RpId: "localhost",
RpOrigins: []string{
Expand All @@ -235,14 +235,9 @@ func TestPasskeyConfigMapping(t *testing.T) {
assert.Equal(t, "http://127.0.0.1:3000,https://localhost:3000", ValOrDefault(body.WebauthnRpOrigins, ""))
})

t.Run("does not serialize rp fields when passkey is disabled", func(t *testing.T) {
t.Run("does not serialize rp fields when webauthn is undefined", func(t *testing.T) {
c := newWithDefaults()
c.Passkey = &passkey{
Enabled: false,
RpDisplayName: "Supabase CLI",
RpId: "localhost",
RpOrigins: []string{"http://127.0.0.1:3000"},
}
c.Passkey = &passkey{Enabled: false}
// Run test
body := c.ToUpdateAuthConfigBody()
// Check result
Expand All @@ -257,12 +252,27 @@ func TestPasskeyConfigMapping(t *testing.T) {
assert.Error(t, err)
})

t.Run("hydrates passkey config from remote", func(t *testing.T) {
t.Run("serializes webauthn fields independently of passkey", func(t *testing.T) {
c := newWithDefaults()
c.Passkey = &passkey{
Enabled: true,
c.Webauthn = &webauthn{
RpDisplayName: "Supabase CLI",
RpId: "localhost",
RpOrigins: []string{"http://127.0.0.1:3000"},
}
// Run test
body := c.ToUpdateAuthConfigBody()
// Check result
assert.Nil(t, body.PasskeyEnabled)
assert.Equal(t, "Supabase CLI", ValOrDefault(body.WebauthnRpDisplayName, ""))
assert.Equal(t, "localhost", ValOrDefault(body.WebauthnRpId, ""))
assert.Equal(t, "http://127.0.0.1:3000", ValOrDefault(body.WebauthnRpOrigins, ""))
})

t.Run("hydrates passkey and webauthn config from remote", func(t *testing.T) {
c := newWithDefaults()
c.Passkey = &passkey{Enabled: true}
c.Webauthn = &webauthn{}
// Run test
c.FromRemoteAuthConfig(v1API.AuthConfigResponse{
PasskeyEnabled: true,
WebauthnRpDisplayName: nullable.NewNullableWithValue("Supabase CLI"),
Expand All @@ -272,12 +282,14 @@ func TestPasskeyConfigMapping(t *testing.T) {
// Check result
if assert.NotNil(t, c.Passkey) {
assert.True(t, c.Passkey.Enabled)
assert.Equal(t, "Supabase CLI", c.Passkey.RpDisplayName)
assert.Equal(t, "localhost", c.Passkey.RpId)
}
if assert.NotNil(t, c.Webauthn) {
assert.Equal(t, "Supabase CLI", c.Webauthn.RpDisplayName)
assert.Equal(t, "localhost", c.Webauthn.RpId)
assert.Equal(t, []string{
"http://127.0.0.1:3000",
"https://localhost:3000",
}, c.Passkey.RpOrigins)
}, c.Webauthn.RpOrigins)
}
})

Expand All @@ -292,6 +304,7 @@ func TestPasskeyConfigMapping(t *testing.T) {
})
// Check result
assert.Nil(t, c.Passkey)
assert.Nil(t, c.Webauthn)
})
}

Expand Down
Loading