diff --git a/go.mod b/go.mod index 1d790e827..77ec26667 100644 --- a/go.mod +++ b/go.mod @@ -61,7 +61,7 @@ require ( require ( 4d63.com/gocheckcompilerdirectives v1.2.1 // indirect 4d63.com/gochecknoglobals v0.2.1 // indirect - dario.cat/mergo v1.0.0 // indirect + dario.cat/mergo v1.0.1 // indirect github.com/4meepo/tagalign v1.3.4 // indirect github.com/Abirdcfly/dupword v0.0.14 // indirect github.com/Antonboom/errname v0.1.13 // indirect diff --git a/go.sum b/go.sum index 4fdaab3fb..83533483e 100644 --- a/go.sum +++ b/go.sum @@ -36,6 +36,8 @@ cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RX cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s= +dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/4meepo/tagalign v1.3.4 h1:P51VcvBnf04YkHzjfclN6BbsopfJR5rxs1n+5zHt+w8= github.com/4meepo/tagalign v1.3.4/go.mod h1:M+pnkHH2vG8+qhE5bVc/zeP7HS/j910Fwa9TUSyZVI0= diff --git a/internal/functions/new/new_test.go b/internal/functions/new/new_test.go index d5e9c2fc8..c6663b531 100644 --- a/internal/functions/new/new_test.go +++ b/internal/functions/new/new_test.go @@ -15,6 +15,8 @@ func TestNewCommand(t *testing.T) { t.Run("creates new function", func(t *testing.T) { // Setup in-memory fs fsys := afero.NewMemMapFs() + err := afero.WriteFile(fsys, "supabase/config.toml", []byte{}, 0644) + require.NoError(t, err) // Run test assert.NoError(t, Run(context.Background(), "test-func", fsys)) // Validate output diff --git a/internal/utils/config.go b/internal/utils/config.go index 3f89dc75c..ac4a6974c 100644 --- a/internal/utils/config.go +++ b/internal/utils/config.go @@ -108,6 +108,12 @@ func LoadConfigFS(fsys afero.Fs) error { } return err } + if branch, err := GetCurrentBranchFS(fsys); err == nil { + fmt.Fprintf(GetDebugLogger(), "Current branch %s detected, attempt config overridde with [remotes.%[1]s.] values\n", branch) + if err := Config.LoadRemoteConfigOverrides("", branch, NewRootFS(fsys)); err != nil { + return err + } + } UpdateDockerIds() return nil } diff --git a/pkg/config/config.go b/pkg/config/config.go index db441e9bc..c18f6eba7 100644 --- a/pkg/config/config.go +++ b/pkg/config/config.go @@ -20,6 +20,7 @@ import ( "text/template" "time" + "dario.cat/mergo" "github.com/BurntSushi/toml" "github.com/docker/go-units" "github.com/go-errors/errors" @@ -119,7 +120,8 @@ func (c CustomClaims) NewToken() *jwt.Token { // // Default values for internal configs should be added to `var Config` initializer. type ( - config struct { + // Common config fields between our "base" config and any "remote" branch specific + BaseConfig struct { ProjectId string `toml:"project_id"` Hostname string `toml:"-"` Api api `toml:"api"` @@ -135,6 +137,11 @@ type ( Experimental experimental `toml:"experimental" mapstructure:"-"` } + config struct { + BaseConfig + Remotes map[string]BaseConfig `toml:"remotes"` + } + api struct { Enabled bool `toml:"enabled"` Image string `toml:"-"` @@ -447,7 +454,7 @@ func WithHostname(hostname string) ConfigEditor { } func NewConfig(editors ...ConfigEditor) config { - initial := config{ + base := BaseConfig{ Hostname: "127.0.0.1", Api: api{ Image: postgrestImage, @@ -544,6 +551,7 @@ func NewConfig(editors ...ConfigEditor) config { Image: edgeRuntimeImage, }, } + initial := config{BaseConfig: base} for _, apply := range editors { apply(&initial) } @@ -559,6 +567,127 @@ var ( envPattern = regexp.MustCompile(`^env\((.*)\)$`) ) +// maybeLoadEnv replaces "env(SOMETHING)" with the value of the environment variable SOMETHING. +func maybeLoadEnv(s string) (string, error) { + matches := envPattern.FindStringSubmatch(s) + if len(matches) != 2 { + // If the string doesn't match "env(SOMETHING)", return it as is + return s, nil + } + + envName := matches[1] + value, exists := os.LookupEnv(envName) + if !exists { + return "", errors.Errorf("environment variable for: %s but %s is not set", s, envName) + } + + return value, nil +} + +// replaceEnvVariables recursively traverses the config map and replaces +// any "env(SOMETHING)" strings with the actual environment variable values. +func replaceEnvVariables(data interface{}) (interface{}, error) { + switch v := data.(type) { + case map[string]interface{}: + for key, value := range v { + replaced, err := replaceEnvVariables(value) + if err != nil { + return nil, err + } + v[key] = replaced + } + return v, nil + case []interface{}: + for i, item := range v { + replaced, err := replaceEnvVariables(item) + if err != nil { + return nil, err + } + v[i] = replaced + } + return v, nil + case string: + return maybeLoadEnv(v) + default: + return v, nil + } +} + +func (c *config) loadConfigAsMap(path string, fsys fs.FS) (map[string]interface{}, error) { + var configMap map[string]interface{} + builder := NewPathBuilder(path) + if _, err := toml.DecodeFS(fsys, builder.ConfigPath, &configMap); err != nil { + return nil, errors.Errorf("failed to load config.toml: %w", err) + } + return configMap, nil +} + +func (c *config) loadMapInConfig(configMap map[string]interface{}) error { + // We use this buffer and encode/decode trick to convert our arbitrary structure + // back to our original config structure with the new overridden values + var buf bytes.Buffer + // Encode the map back to TOML + if err := toml.NewEncoder(&buf).Encode(configMap); err != nil { + return errors.Errorf("faild to encode map to TOML: %w", err) + } + // Decode the TOML into the config struct + if _, err := toml.Decode(buf.String(), c); err != nil { + return errors.Errorf("failed to decode TOML into struct: %w", err) + } + return nil +} + +func (c *config) loadConfigAsMapFromTemplate() (map[string]interface{}, error) { + var buf bytes.Buffer + if err := initConfigTemplate.Option("missingkey=zero").Execute(&buf, c); err != nil { + return nil, errors.Errorf("failed to execute config template: %w", err) + } + + var configMap map[string]interface{} + if _, err := toml.Decode(buf.String(), &configMap); err != nil { + return nil, errors.Errorf("failed to decode config template into map: %w", err) + } + + return configMap, nil +} + +func (c *config) loadConfigAsMapWithEnvOverride(path string, fsys fs.FS) (map[string]interface{}, error) { + // Step 1: Load default config as map from template + defaultMap, err := c.loadConfigAsMapFromTemplate() + if err != nil { + return nil, err + } + + // Step 2: Load user config as map from config.toml + userConfigMap, err := c.loadConfigAsMap(path, fsys) + if err != nil { + return nil, err + } + + // Load secrets from .env file + if err := loadDefaultEnv(); err != nil { + return nil, err + } + + // Step 4: Replace env variables in the user defined config map + replacedMap, err := replaceEnvVariables(userConfigMap) + if err != nil { + return nil, err + } + + // Step 6: Assert that replacedMap is of type map[string]interface{} + finalUserConfigMap, ok := replacedMap.(map[string]interface{}) + if !ok { + return nil, errors.New("failed to assert replacedMap to map[string]interface{}") + } + + // Step 3: Merge user config into default config + if err := mergo.Merge(&defaultMap, finalUserConfigMap, mergo.WithOverride); err != nil { + return nil, errors.Errorf("failed to merge default and user config: %w", err) + } + return defaultMap, nil +} + func (c *config) Eject(w io.Writer) error { // Defaults to current directory name as project id if len(c.ProjectId) == 0 { @@ -578,32 +707,16 @@ func (c *config) Eject(w io.Writer) error { func (c *config) Load(path string, fsys fs.FS) error { builder := NewPathBuilder(path) - // Load default values - var buf bytes.Buffer - if err := initConfigTemplate.Option("missingkey=zero").Execute(&buf, c); err != nil { - return errors.Errorf("failed to initialise config template: %w", err) - } - dec := toml.NewDecoder(&buf) - if _, err := dec.Decode(c); err != nil { - return errors.Errorf("failed to decode config template: %w", err) - } - // Load user defined config - if metadata, err := toml.DecodeFS(fsys, builder.ConfigPath, c); err != nil { - cwd, osErr := os.Getwd() - if osErr != nil { - cwd = "current directory" - } - return errors.Errorf("cannot read config in %s: %w", cwd, err) - } else if undecoded := metadata.Undecoded(); len(undecoded) > 0 { - fmt.Fprintf(os.Stderr, "Unknown config fields: %+v\n", undecoded) - } - // Load secrets from .env file - if err := loadDefaultEnv(); err != nil { + + if configMap, err := c.loadConfigAsMapWithEnvOverride(path, fsys); err != nil { return err + } else { + // Step 5: Load the replaced map back into the config struct + if err := c.loadMapInConfig(configMap); err != nil { + return err + } } - if err := viper.Unmarshal(c); err != nil { - return errors.Errorf("failed to parse env to config: %w", err) - } + // Generate JWT tokens if len(c.Auth.AnonKey) == 0 { anonToken := CustomClaims{Role: "anon"}.NewToken() @@ -688,6 +801,43 @@ func (c *config) Load(path string, fsys fs.FS) error { return c.Validate() } +// Will override the current config values with the ones presents in the [remotes.remoteName] +// definitions if there is some. +func (c *config) LoadRemoteConfigOverrides(path, remoteName string, fsys fs.FS) error { + // Load the entire config as a map so we can distinguish between unset and "default" + // fields filled with false/empty string/0 values + if configMap, err := c.loadConfigAsMapWithEnvOverride(path, fsys); err != nil { + return err + } else { + // Extract the remotes section + remotes, ok := configMap["remotes"].(map[string]interface{}) + if !ok { + // No remotes defined + return nil + } + // Extract the remote configuration for the current branch into an abstact structure + remoteConfigMap, ok := remotes[remoteName].(map[string]interface{}) + if !ok { + // No configuration for the current remote + return nil + } + + // Remove the remotes from our configMap + delete(configMap, "remotes") + + // We merge our remotes configuration to our original config, overriding the original + // with all the values set in the remote config + if err := mergo.Merge(&configMap, remoteConfigMap, mergo.WithOverride); err != nil { + return errors.Errorf("failed to merge config and %s config: %w", remoteName, err) + } + if err := c.loadMapInConfig(configMap); err != nil { + return err + } + // We validate that the overridden config is still valid + return c.Validate() + } +} + func (c *config) Validate() error { if c.ProjectId == "" { return errors.New("Missing required field in config: project_id") @@ -717,19 +867,6 @@ func (c *config) Validate() error { case 15: if len(c.Experimental.OrioleDBVersion) > 0 { c.Db.Image = "supabase/postgres:orioledb-" + c.Experimental.OrioleDBVersion - var err error - if c.Experimental.S3Host, err = maybeLoadEnv(c.Experimental.S3Host); err != nil { - return err - } - if c.Experimental.S3Region, err = maybeLoadEnv(c.Experimental.S3Region); err != nil { - return err - } - if c.Experimental.S3AccessKey, err = maybeLoadEnv(c.Experimental.S3AccessKey); err != nil { - return err - } - if c.Experimental.S3SecretKey, err = maybeLoadEnv(c.Experimental.S3SecretKey); err != nil { - return err - } } default: return errors.Errorf("Failed reading config: Invalid %s: %v.", "db.major_version", c.Db.MajorVersion) @@ -764,7 +901,6 @@ func (c *config) Validate() error { } else if parsed.Host == "" || parsed.Host == c.Hostname { c.Studio.ApiUrl = c.Api.ExternalUrl } - c.Studio.OpenaiApiKey, _ = maybeLoadEnv(c.Studio.OpenaiApiKey) } // Validate smtp config if c.Inbucket.Enabled { @@ -777,19 +913,13 @@ func (c *config) Validate() error { if c.Auth.SiteUrl == "" { return errors.New("Missing required field in config: auth.site_url") } - var err error - if c.Auth.SiteUrl, err = maybeLoadEnv(c.Auth.SiteUrl); err != nil { - return err - } + // Validate email config for name, tmpl := range c.Auth.Email.Template { if len(tmpl.ContentPath) > 0 && !fs.ValidPath(filepath.Clean(tmpl.ContentPath)) { return errors.Errorf("Invalid config for auth.email.%s.content_path: %s", name, tmpl.ContentPath) } } - if c.Auth.Email.Smtp.Pass, err = maybeLoadEnv(c.Auth.Email.Smtp.Pass); err != nil { - return err - } // Validate sms config if c.Auth.Sms.Twilio.Enabled { if len(c.Auth.Sms.Twilio.AccountSid) == 0 { @@ -801,9 +931,7 @@ func (c *config) Validate() error { if len(c.Auth.Sms.Twilio.AuthToken) == 0 { return errors.New("Missing required field in config: auth.sms.twilio.auth_token") } - if c.Auth.Sms.Twilio.AuthToken, err = maybeLoadEnv(c.Auth.Sms.Twilio.AuthToken); err != nil { - return err - } + } if c.Auth.Sms.TwilioVerify.Enabled { if len(c.Auth.Sms.TwilioVerify.AccountSid) == 0 { @@ -815,9 +943,7 @@ func (c *config) Validate() error { if len(c.Auth.Sms.TwilioVerify.AuthToken) == 0 { return errors.New("Missing required field in config: auth.sms.twilio_verify.auth_token") } - if c.Auth.Sms.TwilioVerify.AuthToken, err = maybeLoadEnv(c.Auth.Sms.TwilioVerify.AuthToken); err != nil { - return err - } + } if c.Auth.Sms.Messagebird.Enabled { if len(c.Auth.Sms.Messagebird.Originator) == 0 { @@ -826,9 +952,7 @@ func (c *config) Validate() error { if len(c.Auth.Sms.Messagebird.AccessKey) == 0 { return errors.New("Missing required field in config: auth.sms.messagebird.access_key") } - if c.Auth.Sms.Messagebird.AccessKey, err = maybeLoadEnv(c.Auth.Sms.Messagebird.AccessKey); err != nil { - return err - } + } if c.Auth.Sms.Textlocal.Enabled { if len(c.Auth.Sms.Textlocal.Sender) == 0 { @@ -837,9 +961,7 @@ func (c *config) Validate() error { if len(c.Auth.Sms.Textlocal.ApiKey) == 0 { return errors.New("Missing required field in config: auth.sms.textlocal.api_key") } - if c.Auth.Sms.Textlocal.ApiKey, err = maybeLoadEnv(c.Auth.Sms.Textlocal.ApiKey); err != nil { - return err - } + } if c.Auth.Sms.Vonage.Enabled { if len(c.Auth.Sms.Vonage.From) == 0 { @@ -851,12 +973,7 @@ func (c *config) Validate() error { if len(c.Auth.Sms.Vonage.ApiSecret) == 0 { return errors.New("Missing required field in config: auth.sms.vonage.api_secret") } - if c.Auth.Sms.Vonage.ApiKey, err = maybeLoadEnv(c.Auth.Sms.Vonage.ApiKey); err != nil { - return err - } - if c.Auth.Sms.Vonage.ApiSecret, err = maybeLoadEnv(c.Auth.Sms.Vonage.ApiSecret); err != nil { - return err - } + } if err := c.Auth.Hook.MFAVerificationAttempt.HandleHook("mfa_verification_attempt"); err != nil { return err @@ -884,18 +1001,7 @@ func (c *config) Validate() error { if !sliceContains([]string{"apple", "google"}, ext) && provider.Secret == "" { return errors.Errorf("Missing required field in config: auth.external.%s.secret", ext) } - if provider.ClientId, err = maybeLoadEnv(provider.ClientId); err != nil { - return err - } - if provider.Secret, err = maybeLoadEnv(provider.Secret); err != nil { - return err - } - if provider.RedirectUri, err = maybeLoadEnv(provider.RedirectUri); err != nil { - return err - } - if provider.Url, err = maybeLoadEnv(provider.Url); err != nil { - return err - } + c.Auth.External[ext] = provider } } @@ -938,20 +1044,6 @@ func (c *config) Validate() error { return nil } -func maybeLoadEnv(s string) (string, error) { - matches := envPattern.FindStringSubmatch(s) - if len(matches) == 0 { - return s, nil - } - - envName := matches[1] - if value := os.Getenv(envName); value != "" { - return value, nil - } - - return "", errors.Errorf(`Error evaluating "%s": environment variable %s is unset.`, s, envName) -} - func truncateText(text string, maxLen int) string { if len(text) > maxLen { return text[:maxLen] @@ -1007,10 +1099,6 @@ func (h *hookConfig) HandleHook(hookType string) error { if err := validateHookURI(h.URI, hookType); err != nil { return err } - var err error - if h.Secrets, err = maybeLoadEnv(h.Secrets); err != nil { - return errors.Errorf("missing required field in config: auth.hook.%s.secrets", hookType) - } return nil } @@ -1081,17 +1169,9 @@ func (c *tpaCognito) validate() error { if c.UserPoolID == "" { return errors.New("Invalid config: auth.third_party.cognito is enabled but without a user_pool_id.") } - var err error - if c.UserPoolID, err = maybeLoadEnv(c.UserPoolID); err != nil { - return err - } - if c.UserPoolRegion == "" { return errors.New("Invalid config: auth.third_party.cognito is enabled but without a user_pool_region.") } - if c.UserPoolRegion, err = maybeLoadEnv(c.UserPoolRegion); err != nil { - return err - } return nil } diff --git a/pkg/config/config_test.go b/pkg/config/config_test.go index 29a605059..ce88dd90a 100644 --- a/pkg/config/config_test.go +++ b/pkg/config/config_test.go @@ -15,6 +15,23 @@ import ( //go:embed testdata/config.toml var testInitConfigEmbed []byte +//go:embed testdata/config-remotes-overrides.toml +var testInitRemotesConfigEmbed []byte + +//go:embed testdata/config-remotes-env-overrides.toml +var testInitRemotesConfigWithEnvOverrideEmbed []byte + +func setEnvForTestTemplate(t *testing.T) { + // Run test + t.Setenv("TWILIO_AUTH_TOKEN", "token") + t.Setenv("AZURE_CLIENT_ID", "hello") + t.Setenv("AZURE_SECRET", "this is cool") + t.Setenv("AUTH_SEND_SMS_SECRETS", "v1,whsec_aWxpa2VzdXBhYmFzZXZlcnltdWNoYW5kaWhvcGV5b3Vkb3Rvbw==") + t.Setenv("SENDGRID_API_KEY", "sendgrid") + t.Setenv("S3_HOST", "some-host") + t.Setenv("OPENAI_API_KEY", "open-api-key") +} + func TestConfigParsing(t *testing.T) { t.Run("classic config file", func(t *testing.T) { config := NewConfig() @@ -23,6 +40,7 @@ func TestConfigParsing(t *testing.T) { require.NoError(t, config.Eject(&buf)) file := fs.MapFile{Data: buf.Bytes()} fsys := fs.MapFS{"config.toml": &file} + setEnvForTestTemplate(t) // Check error assert.NoError(t, config.Load("config.toml", fsys)) }) @@ -34,14 +52,10 @@ func TestConfigParsing(t *testing.T) { "supabase/config.toml": &fs.MapFile{Data: testInitConfigEmbed}, "supabase/templates/invite.html": &fs.MapFile{}, } - // Run test - t.Setenv("TWILIO_AUTH_TOKEN", "token") - t.Setenv("AZURE_CLIENT_ID", "hello") - t.Setenv("AZURE_SECRET", "this is cool") - t.Setenv("AUTH_SEND_SMS_SECRETS", "v1,whsec_aWxpa2VzdXBhYmFzZXZlcnltdWNoYW5kaWhvcGV5b3Vkb3Rvbw==") - t.Setenv("SENDGRID_API_KEY", "sendgrid") + setEnvForTestTemplate(t) assert.NoError(t, config.Load("", fsys)) // Check error + assert.Equal(t, "test", config.ProjectId) assert.Equal(t, "hello", config.Auth.External["azure"].ClientId) assert.Equal(t, "this is cool", config.Auth.External["azure"].Secret) }) @@ -55,6 +69,31 @@ func TestConfigParsing(t *testing.T) { // Run test assert.Error(t, config.Load("", fsys)) }) + + t.Run("config file with remotes branch config", func(t *testing.T) { + config := NewConfig() + // Setup in-memory fs + fsys := fs.MapFS{ + "supabase/config.toml": &fs.MapFile{Data: testInitRemotesConfigEmbed}, + "supabase/templates/invite.html": &fs.MapFile{}, + } + // Run test + assert.NoError(t, config.Load("", fsys)) + // Check the default value in the config + assert.Equal(t, "http://127.0.0.1:3000", config.Auth.SiteUrl) + assert.Equal(t, true, config.Auth.EnableSignup) + assert.Equal(t, true, config.Auth.External["azure"].Enabled) + assert.Equal(t, "AZURE_CLIENT_ID", config.Auth.External["azure"].ClientId) + assert.Equal(t, []string{"image/png", "image/jpeg"}, config.Storage.Buckets["images"].AllowedMimeTypes) + // Check the values for the remote feature-auth-branch override + assert.Equal(t, "http://feature-auth-branch.com/", config.Remotes["feature-auth-branch"].Auth.SiteUrl) + assert.Equal(t, false, config.Remotes["feature-auth-branch"].Auth.EnableSignup) + assert.Equal(t, false, config.Remotes["feature-auth-branch"].Auth.External["azure"].Enabled) + assert.Equal(t, "nope", config.Remotes["feature-auth-branch"].Auth.External["azure"].ClientId) + + // Check the values for the remote feature-storage-branch override + assert.Equal(t, []string{"image/png", "image/jpeg", "image/svg+xml"}, config.Remotes["feature-storage-branch"].Storage.Buckets["images"].AllowedMimeTypes) + }) } func TestFileSizeLimitConfigParsing(t *testing.T) { @@ -212,3 +251,233 @@ func TestValidateHookURI(t *testing.T) { }) } } + +func TestLoadRemoteConfigOverrides(t *testing.T) { + t.Run("no remote config load non-existent branch", func(t *testing.T) { + const configToml = ` + project_id = "config" + [api] + port = 54321 + ` + + config := NewConfig() + fsys := fs.MapFS{ + "config.toml": &fs.MapFile{Data: []byte(configToml)}, + } + assert.NoError(t, config.Load("config.toml", fsys)) + + // Attempt to load a non-existent remote branch + err := config.LoadRemoteConfigOverrides("config.toml", "non-existent-branch", fsys) + assert.NoError(t, err) + + // Ensure the config remains unchanged + assert.Equal(t, "config", config.ProjectId) + assert.Equal(t, uint16(54321), config.Api.Port) + }) + + t.Run("override project id", func(t *testing.T) { + const configToml = ` + project_id = "original-project" + + [remotes.feature-branch] + project_id = "feature-project" + ` + + config := NewConfig() + fsys := fs.MapFS{ + "config.toml": &fs.MapFile{Data: []byte(configToml)}, + } + assert.NoError(t, config.Load("config.toml", fsys)) + // Load the remote configuration overrides + assert.NoError(t, config.LoadRemoteConfigOverrides("config.toml", "feature-branch", fsys)) + // Assert that the project_id has been overridden + assert.Equal(t, "feature-project", config.ProjectId) + }) + + t.Run("override nested field", func(t *testing.T) { + const configToml = ` + project_id = "config" + [api] + port = 54321 + + [remotes.feature-branch.api] + port = 9000 + ` + + config := NewConfig() + fsys := fs.MapFS{ + "config.toml": &fs.MapFile{Data: []byte(configToml)}, + } + assert.NoError(t, config.Load("config.toml", fsys)) + assert.NoError(t, config.LoadRemoteConfigOverrides("config.toml", "feature-branch", fsys)) + + // Assert that the API port has been overridden + assert.Equal(t, uint16(9000), config.Api.Port) + }) + + t.Run("partial override", func(t *testing.T) { + const configToml = ` + project_id = "config" + [api] + port = 54321 + + [remotes.feature-branch.api] + port = 9000 + ` + + config := NewConfig() + fsys := fs.MapFS{ + "config.toml": &fs.MapFile{Data: []byte(configToml)}, + } + assert.NoError(t, config.Load("config.toml", fsys)) + assert.NoError(t, config.LoadRemoteConfigOverrides("config.toml", "feature-branch", fsys)) + + // Assert that only the API port is overridden + assert.Equal(t, "config", config.ProjectId) + assert.Equal(t, uint16(9000), config.Api.Port) + }) + + t.Run("multiple nested overrides", func(t *testing.T) { + const configToml = ` + project_id = "original-project" + [api] + port = 54321 + [auth] + site_url = "http://original.com" + + [remotes.feature-branch] + project_id = "feature-project" + [remotes.feature-branch.api] + port = 9000 + [remotes.feature-branch.auth] + site_url = "http://feature.com" + ` + + config := NewConfig() + fsys := fs.MapFS{ + "config.toml": &fs.MapFile{Data: []byte(configToml)}, + } + assert.NoError(t, config.Load("config.toml", fsys)) + assert.NoError(t, config.LoadRemoteConfigOverrides("config.toml", "feature-branch", fsys)) + + // Assert that all specified fields are overridden + assert.Equal(t, "feature-project", config.ProjectId) + assert.Equal(t, uint16(9000), config.Api.Port) + assert.Equal(t, "http://feature.com", config.Auth.SiteUrl) + }) + + t.Run("override with empty remote config", func(t *testing.T) { + const configToml = ` + project_id = "config" + [api] + port = 54321 + + [remotes.feature-branch] + # Empty remote config + ` + + config := NewConfig() + fsys := fs.MapFS{ + "config.toml": &fs.MapFile{Data: []byte(configToml)}, + } + assert.NoError(t, config.Load("config.toml", fsys)) + assert.NoError(t, config.LoadRemoteConfigOverrides("config.toml", "feature-branch", fsys)) + + // Assert that the config remains unchanged + assert.Equal(t, "config", config.ProjectId) + assert.Equal(t, uint16(54321), config.Api.Port) + }) + + t.Run("override with invalid config", func(t *testing.T) { + const configToml = ` + project_id = "config" + [db] + major_version = 15 + + [remotes.feature-branch.db] + major_version = 12 + ` + + config := NewConfig() + fsys := fs.MapFS{ + "config.toml": &fs.MapFile{Data: []byte(configToml)}, + } + assert.NoError(t, config.Load("config.toml", fsys)) + + // Attempt to load the invalid remote config + err := config.LoadRemoteConfigOverrides("config.toml", "feature-branch", fsys) + require.Error(t, err) + assert.Contains(t, err.Error(), "Postgres version 12.x is unsupported") + }) + + t.Run("can load feature-auth-branch", func(t *testing.T) { + config := NewConfig() + // Setup in-memory fs + fsys := fs.MapFS{ + "supabase/config.toml": &fs.MapFile{Data: testInitRemotesConfigEmbed}, + "supabase/templates/invite.html": &fs.MapFile{}, + } + // Run test + // First load the config + assert.NoError(t, config.Load("", fsys)) + // Load our branch values + assert.NoError(t, config.LoadRemoteConfigOverrides("", "feature-auth-branch", fsys)) + // Check that feature-auth-branch config replaced default config + assert.Equal(t, "http://feature-auth-branch.com/", config.Auth.SiteUrl) + assert.Equal(t, false, config.Auth.EnableSignup) + assert.Equal(t, false, config.Auth.External["azure"].Enabled) + assert.Equal(t, "nope", config.Auth.External["azure"].ClientId) + // Verify that other config values remain unchanged + assert.Equal(t, "test", config.ProjectId) + assert.Equal(t, uint16(54321), config.Api.Port) + assert.Equal(t, []string{"image/png", "image/jpeg"}, config.Storage.Buckets["images"].AllowedMimeTypes) + }) + + t.Run("can load feature-storage-branch", func(t *testing.T) { + config := NewConfig() + // Setup in-memory fs + fsys := fs.MapFS{ + "supabase/config.toml": &fs.MapFile{Data: testInitRemotesConfigEmbed}, + "supabase/templates/invite.html": &fs.MapFile{}, + } + // Run test + assert.NoError(t, config.Load("", fsys)) + assert.NoError(t, config.LoadRemoteConfigOverrides("", "feature-storage-branch", fsys)) + // Check that feature-storage-branch config replaced default config + assert.Equal(t, []string{"image/png", "image/jpeg", "image/svg+xml"}, config.Storage.Buckets["images"].AllowedMimeTypes) + // Verify that other config values remain unchanged + assert.Equal(t, "test", config.ProjectId) + assert.Equal(t, uint16(54321), config.Api.Port) + assert.Equal(t, "http://127.0.0.1:3000", config.Auth.SiteUrl) + assert.Equal(t, true, config.Auth.EnableSignup) + assert.Equal(t, true, config.Auth.External["azure"].Enabled) + assert.Equal(t, "AZURE_CLIENT_ID", config.Auth.External["azure"].ClientId) + }) + + t.Run("can load feature-auth-branch with env override value", func(t *testing.T) { + config := NewConfig() + // Setup in-memory fs + fsys := fs.MapFS{ + "supabase/config.toml": &fs.MapFile{Data: testInitRemotesConfigWithEnvOverrideEmbed}, + "supabase/templates/invite.html": &fs.MapFile{}, + } + t.Setenv("AZURE_CLIENT_ID", "this-is-a-env-value") + // Run test + // First load the config + assert.NoError(t, config.Load("", fsys)) + assert.Equal(t, "AZURE_CLIENT_ID_ORIGINAL_VALUE", config.Auth.External["azure"].ClientId) + assert.Equal(t, "https://login.microsoftonline.com/tenant", config.Auth.External["azure"].Url) + // Load our branch values + assert.NoError(t, config.LoadRemoteConfigOverrides("", "feature-auth-branch", fsys)) + // Check that feature-auth-branch config replaced default config + assert.Equal(t, "http://feature-auth-branch.com/", config.Auth.SiteUrl) + assert.Equal(t, false, config.Auth.EnableSignup) + assert.Equal(t, true, config.Auth.External["azure"].Enabled) + assert.Equal(t, "http://overriden-url.com/tenant", config.Auth.External["azure"].Url) + assert.Equal(t, "this-is-a-env-value", config.Auth.External["azure"].ClientId) + // Verify that other config values remain unchanged + assert.Equal(t, "test", config.ProjectId) + assert.Equal(t, uint16(54321), config.Api.Port) + assert.Equal(t, []string{"image/png", "image/jpeg"}, config.Storage.Buckets["images"].AllowedMimeTypes) + }) +} diff --git a/pkg/config/templates/config.toml b/pkg/config/templates/config.toml index 37646aa63..bcb6a5bfd 100644 --- a/pkg/config/templates/config.toml +++ b/pkg/config/templates/config.toml @@ -53,7 +53,7 @@ port = 54323 # External URL of the API server that frontend connects to. api_url = "http://127.0.0.1" # OpenAI API Key to use for Supabase AI in the Supabase Studio. -openai_api_key = "env(OPENAI_API_KEY)" +# openai_api_key = "env(OPENAI_API_KEY)" # Email testing server. Emails sent with the local dev setup are not actually sent - rather, they # are monitored, and you can view the emails that would have been sent from the web interface. @@ -160,7 +160,7 @@ enabled = false account_sid = "" message_service_sid = "" # DO NOT commit your Twilio auth token to git. Use environment variable substitution instead: -auth_token = "env(SUPABASE_AUTH_SMS_TWILIO_AUTH_TOKEN)" +# auth_token = "env(SUPABASE_AUTH_SMS_TWILIO_AUTH_TOKEN)" [auth.mfa] # Control how many MFA factors can be enrolled at once per user. @@ -186,7 +186,7 @@ verify_enabled = true enabled = false client_id = "" # DO NOT commit your OAuth provider secret to git. Use environment variable substitution instead: -secret = "env(SUPABASE_AUTH_EXTERNAL_APPLE_SECRET)" +# secret = "env(SUPABASE_AUTH_EXTERNAL_APPLE_SECRET)" # Overrides the default auth redirectUrl. redirect_uri = "" # Overrides the default auth provider URL. Used to support self-hosted gitlab, single-tenant Azure, @@ -230,10 +230,10 @@ backend = "postgres" # Configures Postgres storage engine to use OrioleDB (S3) orioledb_version = "{{ .Experimental.OrioleDBVersion }}" # Configures S3 bucket URL, eg. .s3-.amazonaws.com -s3_host = "env(S3_HOST)" +# s3_host = "env(S3_HOST)" # Configures S3 bucket region, eg. us-east-1 -s3_region = "env(S3_REGION)" +# s3_region = "env(S3_REGION)" # Configures AWS_ACCESS_KEY_ID for S3 bucket -s3_access_key = "env(S3_ACCESS_KEY)" +# s3_access_key = "env(S3_ACCESS_KEY)" # Configures AWS_SECRET_ACCESS_KEY for S3 bucket -s3_secret_key = "env(S3_SECRET_KEY)" +# s3_secret_key = "env(S3_SECRET_KEY)" diff --git a/pkg/config/testdata/config-remotes-env-overrides.toml b/pkg/config/testdata/config-remotes-env-overrides.toml new file mode 100644 index 000000000..da84e1bfe --- /dev/null +++ b/pkg/config/testdata/config-remotes-env-overrides.toml @@ -0,0 +1,231 @@ +# A string used to distinguish different Supabase projects on the same host. Defaults to the +# working directory name when running `supabase init`. +project_id = "test" + +[api] +enabled = true +# Port to use for the API URL. +port = 54321 +# Schemas to expose in your API. Tables, views and stored procedures in this schema will getE API +# endpoints. public and storage are always included. +schemas = ["public", "graphql_public"] +# Extra schemas to add to the search_path of every request. public is always included. +extra_search_path = ["public", "extensions"] +# The maximum number of rows returns from a view, table, or stored procedure. Limits payload size +# for accidental or malicious requests. +max_rows = 1000 + +[api.tls] +enabled = true + +[db] +# Port to use for the local database URL. +port = 54322 +# Port used by db diff command to initialize the shadow database. +shadow_port = 54320 +# The database major version to use. This has to be the same as your remote database's. Run `SHOW +# server_version;` on the remote database to check. +major_version = 15 + +[db.pooler] +enabled = true +# Port to use for the local connection pooler. +port = 54329 +# Specifies when a server connection can be reused by other clients. +# Configure one of the supported pooler modes: `transaction`, `session`. +pool_mode = "transaction" +# How many server connections to allow per user/database pair. +default_pool_size = 20 +# Maximum number of client connections allowed. +max_client_conn = 100 + +[realtime] +enabled = true +# Bind realtime via either IPv4 or IPv6. (default: IPv6) +ip_version = "IPv4" +# The maximum length in bytes of HTTP request headers. (default: 4096) +max_header_length = 8192 + +[studio] +enabled = true +# Port to use for Supabase Studio. +port = 54323 +# External URL of the API server that frontend connects to. +api_url = "http://127.0.0.1" +# OpenAI API Key to use for Supabase AI in the Supabase Studio. +openai_api_key = "OPENAI_API_KEY" + +# Email testing server. Emails sent with the local dev setup are not actually sent - rather, they +# are monitored, and you can view the emails that would have been sent from the web interface. +[inbucket] +enabled = true +# Port to use for the email testing server web interface. +port = 54324 +# Uncomment to expose additional ports for testing user applications that send emails. +# smtp_port = 54325 +# pop3_port = 54326 + +[storage] +enabled = true +# The maximum file size allowed (e.g. "5MB", "500KB"). +file_size_limit = "50MiB" + +[storage.image_transformation] +enabled = false + +# Uncomment to configure local storage buckets +[storage.buckets.images] +public = false +file_size_limit = "50MiB" +allowed_mime_types = ["image/png", "image/jpeg"] +objects_path = "./images" + +[auth] +enabled = true +# The base URL of your website. Used as an allow-list for redirects and for constructing URLs used +# in emails. +site_url = "http://127.0.0.1:3000" +# A list of *exact* URLs that auth providers are permitted to redirect to post authentication. +additional_redirect_urls = ["https://127.0.0.1:3000"] +# How long tokens are valid for, in seconds. Defaults to 3600 (1 hour), maximum 604,800 (1 week). +jwt_expiry = 3600 +# If disabled, the refresh token will never expire. +enable_refresh_token_rotation = true +# Allows refresh tokens to be reused after expiry, up to the specified interval in seconds. +# Requires enable_refresh_token_rotation = true. +refresh_token_reuse_interval = 10 +# Allow/disallow new user signups to your project. +enable_signup = true +# Allow/disallow testing manual linking of accounts +enable_manual_linking = true + +[auth.email] +# Allow/disallow new user signups via email to your project. +enable_signup = true +# If enabled, a user will be required to confirm any email change on both the old, and new email +# addresses. If disabled, only the new email is required to confirm. +double_confirm_changes = true +# If enabled, users need to confirm their email address before signing in. +enable_confirmations = false +# Controls the minimum amount of time that must pass before sending another signup confirmation or password reset email. +max_frequency = "1s" + +# Use a production-ready SMTP server +[auth.email.smtp] +host = "smtp.sendgrid.net" +port = 587 +user = "apikey" +pass = "SENDGRID_API_KEY" +admin_email = "admin@email.com" +sender_name = "Admin" + +# Uncomment to customize email template +[auth.email.template.invite] +subject = "You have been invited" +content_path = "./supabase/templates/invite.html" + +[auth.sms] +# Allow/disallow new user signups via SMS to your project. +enable_signup = true +# If enabled, users need to confirm their phone number before signing in. +enable_confirmations = false +# Template for sending OTP to users +template = "Your code is {{ `{{ .Code }}` }} ." +# Controls the minimum amount of time that must pass before sending another sms otp. +max_frequency = "5s" + +# Use pre-defined map of phone number to OTP for testing. +[auth.sms.test_otp] +4152127777 = "123456" + +# Configure logged in session timeouts. +[auth.sessions] +# Force log out after the specified duration. +timebox = "24h" +# Force log out if the user has been inactive longer than the specified duration. +inactivity_timeout = "8h" + +[auth.hook.custom_access_token] +enabled = true +uri = "pg-functions://postgres/auth/custom-access-token-hook" + +[auth.hook.send_sms] +enabled = true +uri = "http://host.docker.internal/functions/v1/send_sms" +secrets = "AUTH_SEND_SMS_SECRETS" + + +# Configure one of the supported SMS providers: `twilio`, `twilio_verify`, `messagebird`, `textlocal`, `vonage`. +[auth.sms.twilio] +enabled = true +account_sid = "account_sid" +message_service_sid = "message_service_sid" +# DO NOT commit your Twilio auth token to git. Use environment variable substitution instead: +auth_token = "TWILIO_AUTH_TOKEN" + +[auth.mfa] +max_enrolled_factors = 10 + +# Configure MFA via App Authenticator (TOTP) +[auth.mfa.totp] +enroll_enabled = true +verify_enabled = true + +# Configure Multi-factor-authentication via Phone Messaging +[auth.mfa.phone] +enroll_enabled = true +verify_enabled = true +otp_length = 6 +template = "Your code is {{ `{{ .Code }}` }} ." +max_frequency = "10s" + +# Use an external OAuth provider. The full list of providers are: `apple`, `azure`, `bitbucket`, +# `discord`, `facebook`, `github`, `gitlab`, `google`, `keycloak`, `linkedin_oidc`, `notion`, `twitch`, +# `twitter`, `slack`, `spotify`, `workos`, `zoom`. +[auth.external.azure] +enabled = true +client_id = "AZURE_CLIENT_ID_ORIGINAL_VALUE" +secret = "AZURE_SECRET" +# Overrides the default auth provider URL. Used to support self-hosted gitlab, single-tenant Azure, +# or any other third-party OIDC providers. +url = "https://login.microsoftonline.com/tenant" +# If enabled, the nonce check will be skipped. Required for local sign in with Google auth. +skip_nonce_check = true + + +[remotes.feature-auth-branch.auth.external.azure] +url = "http://overriden-url.com/tenant" +client_id = "env(AZURE_CLIENT_ID)" + +[edge_runtime] +enabled = true +# Configure one of the supported request policies: `oneshot`, `per_worker`. +# Use `oneshot` for hot reload, or `per_worker` for load testing. +policy = "per_worker" +inspector_port = 8083 + +[analytics] +enabled = true +port = 54327 +# Configure one of the supported backends: `postgres`, `bigquery`. +backend = "postgres" + +# Experimental features may be deprecated any time +[experimental] +# Configures Postgres storage engine to use OrioleDB (S3) +orioledb_version = "15.1.0.150" +# Configures S3 bucket URL, eg. .s3-.amazonaws.com +s3_host = "orioledb.s3-accelerate.amazonaws.com" +# Configures S3 bucket region, eg. us-east-1 +s3_region = "ap-southeast-1" +# Configures AWS_ACCESS_KEY_ID for S3 bucket +s3_access_key = "" +# Configures AWS_SECRET_ACCESS_KEY for S3 bucket +s3_secret_key = "" + +[remotes.feature-auth-branch.auth] +site_url = "http://feature-auth-branch.com/" +enable_signup = false + +[remotes.feature-storage-branch.storage.buckets.images] +allowed_mime_types = ["image/png", "image/jpeg", "image/svg+xml"] \ No newline at end of file diff --git a/pkg/config/testdata/config-remotes-overrides.toml b/pkg/config/testdata/config-remotes-overrides.toml new file mode 100644 index 000000000..317e165f7 --- /dev/null +++ b/pkg/config/testdata/config-remotes-overrides.toml @@ -0,0 +1,230 @@ +# A string used to distinguish different Supabase projects on the same host. Defaults to the +# working directory name when running `supabase init`. +project_id = "test" + +[api] +enabled = true +# Port to use for the API URL. +port = 54321 +# Schemas to expose in your API. Tables, views and stored procedures in this schema will getE API +# endpoints. public and storage are always included. +schemas = ["public", "graphql_public"] +# Extra schemas to add to the search_path of every request. public is always included. +extra_search_path = ["public", "extensions"] +# The maximum number of rows returns from a view, table, or stored procedure. Limits payload size +# for accidental or malicious requests. +max_rows = 1000 + +[api.tls] +enabled = true + +[db] +# Port to use for the local database URL. +port = 54322 +# Port used by db diff command to initialize the shadow database. +shadow_port = 54320 +# The database major version to use. This has to be the same as your remote database's. Run `SHOW +# server_version;` on the remote database to check. +major_version = 15 + +[db.pooler] +enabled = true +# Port to use for the local connection pooler. +port = 54329 +# Specifies when a server connection can be reused by other clients. +# Configure one of the supported pooler modes: `transaction`, `session`. +pool_mode = "transaction" +# How many server connections to allow per user/database pair. +default_pool_size = 20 +# Maximum number of client connections allowed. +max_client_conn = 100 + +[realtime] +enabled = true +# Bind realtime via either IPv4 or IPv6. (default: IPv6) +ip_version = "IPv4" +# The maximum length in bytes of HTTP request headers. (default: 4096) +max_header_length = 8192 + +[studio] +enabled = true +# Port to use for Supabase Studio. +port = 54323 +# External URL of the API server that frontend connects to. +api_url = "http://127.0.0.1" +# OpenAI API Key to use for Supabase AI in the Supabase Studio. +openai_api_key = "OPENAI_API_KEY" + +# Email testing server. Emails sent with the local dev setup are not actually sent - rather, they +# are monitored, and you can view the emails that would have been sent from the web interface. +[inbucket] +enabled = true +# Port to use for the email testing server web interface. +port = 54324 +# Uncomment to expose additional ports for testing user applications that send emails. +# smtp_port = 54325 +# pop3_port = 54326 + +[storage] +enabled = true +# The maximum file size allowed (e.g. "5MB", "500KB"). +file_size_limit = "50MiB" + +[storage.image_transformation] +enabled = false + +# Uncomment to configure local storage buckets +[storage.buckets.images] +public = false +file_size_limit = "50MiB" +allowed_mime_types = ["image/png", "image/jpeg"] +objects_path = "./images" + +[auth] +enabled = true +# The base URL of your website. Used as an allow-list for redirects and for constructing URLs used +# in emails. +site_url = "http://127.0.0.1:3000" +# A list of *exact* URLs that auth providers are permitted to redirect to post authentication. +additional_redirect_urls = ["https://127.0.0.1:3000"] +# How long tokens are valid for, in seconds. Defaults to 3600 (1 hour), maximum 604,800 (1 week). +jwt_expiry = 3600 +# If disabled, the refresh token will never expire. +enable_refresh_token_rotation = true +# Allows refresh tokens to be reused after expiry, up to the specified interval in seconds. +# Requires enable_refresh_token_rotation = true. +refresh_token_reuse_interval = 10 +# Allow/disallow new user signups to your project. +enable_signup = true +# Allow/disallow testing manual linking of accounts +enable_manual_linking = true + +[auth.email] +# Allow/disallow new user signups via email to your project. +enable_signup = true +# If enabled, a user will be required to confirm any email change on both the old, and new email +# addresses. If disabled, only the new email is required to confirm. +double_confirm_changes = true +# If enabled, users need to confirm their email address before signing in. +enable_confirmations = false +# Controls the minimum amount of time that must pass before sending another signup confirmation or password reset email. +max_frequency = "1s" + +# Use a production-ready SMTP server +[auth.email.smtp] +host = "smtp.sendgrid.net" +port = 587 +user = "apikey" +pass = "SENDGRID_API_KEY" +admin_email = "admin@email.com" +sender_name = "Admin" + +# Uncomment to customize email template +[auth.email.template.invite] +subject = "You have been invited" +content_path = "./supabase/templates/invite.html" + +[auth.sms] +# Allow/disallow new user signups via SMS to your project. +enable_signup = true +# If enabled, users need to confirm their phone number before signing in. +enable_confirmations = false +# Template for sending OTP to users +template = "Your code is {{ `{{ .Code }}` }} ." +# Controls the minimum amount of time that must pass before sending another sms otp. +max_frequency = "5s" + +# Use pre-defined map of phone number to OTP for testing. +[auth.sms.test_otp] +4152127777 = "123456" + +# Configure logged in session timeouts. +[auth.sessions] +# Force log out after the specified duration. +timebox = "24h" +# Force log out if the user has been inactive longer than the specified duration. +inactivity_timeout = "8h" + +[auth.hook.custom_access_token] +enabled = true +uri = "pg-functions://postgres/auth/custom-access-token-hook" + +[auth.hook.send_sms] +enabled = true +uri = "http://host.docker.internal/functions/v1/send_sms" +secrets = "AUTH_SEND_SMS_SECRETS" + + +# Configure one of the supported SMS providers: `twilio`, `twilio_verify`, `messagebird`, `textlocal`, `vonage`. +[auth.sms.twilio] +enabled = true +account_sid = "account_sid" +message_service_sid = "message_service_sid" +# DO NOT commit your Twilio auth token to git. Use environment variable substitution instead: +auth_token = "TWILIO_AUTH_TOKEN" + +[auth.mfa] +max_enrolled_factors = 10 + +# Configure MFA via App Authenticator (TOTP) +[auth.mfa.totp] +enroll_enabled = true +verify_enabled = true + +# Configure Multi-factor-authentication via Phone Messaging +[auth.mfa.phone] +enroll_enabled = true +verify_enabled = true +otp_length = 6 +template = "Your code is {{ `{{ .Code }}` }} ." +max_frequency = "10s" + +# Use an external OAuth provider. The full list of providers are: `apple`, `azure`, `bitbucket`, +# `discord`, `facebook`, `github`, `gitlab`, `google`, `keycloak`, `linkedin_oidc`, `notion`, `twitch`, +# `twitter`, `slack`, `spotify`, `workos`, `zoom`. +[auth.external.azure] +enabled = true +client_id = "AZURE_CLIENT_ID" +secret = "AZURE_SECRET" +# Overrides the default auth provider URL. Used to support self-hosted gitlab, single-tenant Azure, +# or any other third-party OIDC providers. +url = "https://login.microsoftonline.com/tenant" +# If enabled, the nonce check will be skipped. Required for local sign in with Google auth. +skip_nonce_check = true + +[edge_runtime] +enabled = true +# Configure one of the supported request policies: `oneshot`, `per_worker`. +# Use `oneshot` for hot reload, or `per_worker` for load testing. +policy = "per_worker" +inspector_port = 8083 + +[analytics] +enabled = true +port = 54327 +# Configure one of the supported backends: `postgres`, `bigquery`. +backend = "postgres" + +# Experimental features may be deprecated any time +[experimental] +# Configures Postgres storage engine to use OrioleDB (S3) +orioledb_version = "15.1.0.150" +# Configures S3 bucket URL, eg. .s3-.amazonaws.com +s3_host = "orioledb.s3-accelerate.amazonaws.com" +# Configures S3 bucket region, eg. us-east-1 +s3_region = "ap-southeast-1" +# Configures AWS_ACCESS_KEY_ID for S3 bucket +s3_access_key = "" +# Configures AWS_SECRET_ACCESS_KEY for S3 bucket +s3_secret_key = "" + +[remotes.feature-auth-branch.auth] +site_url = "http://feature-auth-branch.com/" +enable_signup = false + +[remotes.feature-auth-branch.auth.external.azure] +enabled = false +client_id = "nope" + +[remotes.feature-storage-branch.storage.buckets.images] +allowed_mime_types = ["image/png", "image/jpeg", "image/svg+xml"] \ No newline at end of file