diff --git a/.github/workflows/api-sync.yml b/.github/workflows/api-sync.yml new file mode 100644 index 000000000..8193d927a --- /dev/null +++ b/.github/workflows/api-sync.yml @@ -0,0 +1,54 @@ +name: API Sync + +on: + repository_dispatch: + types: [api-sync] + workflow_dispatch: # allow manual triggering + +# Add explicit permissions +permissions: + contents: write + pull-requests: write + +jobs: + sync: + name: Sync API Types + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-go@v5 + with: + go-version-file: go.mod + cache: true + + - name: Run codegen + run: go generate + + - name: Check for changes + id: check + run: | + if git diff --ignore-space-at-eol --exit-code --quiet pkg; then + echo "No changes detected" + echo "has_changes=false" >> $GITHUB_OUTPUT + else + echo "Changes detected" + echo "has_changes=true" >> $GITHUB_OUTPUT + fi + + - name: Create Pull Request + if: steps.check.outputs.has_changes == 'true' + uses: peter-evans/create-pull-request@v7 + with: + token: ${{ secrets.GH_PAT }} + commit-message: "chore: sync API types from infrastructure" + title: "chore: sync API types from infrastructure" + body: | + This PR was automatically created to sync API types from the infrastructure repository. + + Changes were detected in the generated API code after syncing with the latest spec from infrastructure. + branch: sync/api-types + base: develop + labels: | + automated pr + api-sync diff --git a/cmd/backups.go b/cmd/backups.go new file mode 100644 index 000000000..10f74a22d --- /dev/null +++ b/cmd/backups.go @@ -0,0 +1,46 @@ +package cmd + +import ( + "github.com/spf13/cobra" + "github.com/supabase/cli/internal/backups/list" + "github.com/supabase/cli/internal/backups/restore" + "github.com/supabase/cli/internal/utils/flags" +) + +var ( + backupsCmd = &cobra.Command{ + GroupID: groupManagementAPI, + Use: "backups", + Short: "Manage Supabase physical backups", + } + + backupListCmd = &cobra.Command{ + Use: "list", + Short: "Lists available physical backups", + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + return list.Run(cmd.Context()) + }, + } + + timestamp int64 + + backupRestoreCmd = &cobra.Command{ + Use: "restore", + Short: "Restore to a specific timestamp using PITR", + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + return restore.Run(cmd.Context(), timestamp) + }, + } +) + +func init() { + backupFlags := backupsCmd.PersistentFlags() + backupFlags.StringVar(&flags.ProjectRef, "project-ref", "", "Project ref of the Supabase project.") + backupsCmd.AddCommand(backupListCmd) + restoreFlags := backupRestoreCmd.Flags() + restoreFlags.Int64VarP(×tamp, "timestamp", "t", 0, "The recovery time target in seconds since epoch.") + backupsCmd.AddCommand(backupRestoreCmd) + rootCmd.AddCommand(backupsCmd) +} diff --git a/cmd/db.go b/cmd/db.go index 948f77beb..02e3e97b6 100644 --- a/cmd/db.go +++ b/cmd/db.go @@ -23,6 +23,7 @@ import ( "github.com/supabase/cli/internal/db/test" "github.com/supabase/cli/internal/utils" "github.com/supabase/cli/internal/utils/flags" + "github.com/supabase/cli/pkg/migration" ) var ( @@ -120,7 +121,13 @@ var ( } }, RunE: func(cmd *cobra.Command, args []string) error { - return dump.Run(cmd.Context(), file, flags.DbConfig, schema, excludeTable, dataOnly, roleOnly, keepComments, useCopy, dryRun, afero.NewOsFs()) + opts := []migration.DumpOptionFunc{ + migration.WithSchema(schema...), + migration.WithoutTable(excludeTable...), + migration.WithComments(keepComments), + migration.WithColumnInsert(!useCopy), + } + return dump.Run(cmd.Context(), file, flags.DbConfig, dataOnly, roleOnly, dryRun, afero.NewOsFs(), opts...) }, PostRun: func(cmd *cobra.Command, args []string) { if len(file) > 0 { @@ -195,7 +202,7 @@ var ( if noSeed { utils.Config.Db.Seed.Enabled = false } - return reset.Run(cmd.Context(), migrationVersion, flags.DbConfig, afero.NewOsFs()) + return reset.Run(cmd.Context(), migrationVersion, nLastVersion, flags.DbConfig, afero.NewOsFs()) }, } @@ -319,6 +326,8 @@ func init() { resetFlags.BoolVar(&noSeed, "no-seed", false, "Skip running the seed script after reset.") dbResetCmd.MarkFlagsMutuallyExclusive("db-url", "linked", "local") resetFlags.StringVar(&migrationVersion, "version", "", "Reset up to the specified version.") + resetFlags.UintVar(&nLastVersion, "last", 0, "Reset up to the last n migration versions.") + dbResetCmd.MarkFlagsMutuallyExclusive("version", "last") dbCmd.AddCommand(dbResetCmd) // Build lint command lintFlags := dbLintCmd.Flags() diff --git a/cmd/migration.go b/cmd/migration.go index 30b7716ac..fc92e40d2 100644 --- a/cmd/migration.go +++ b/cmd/migration.go @@ -8,6 +8,7 @@ import ( "github.com/spf13/afero" "github.com/spf13/cobra" "github.com/spf13/viper" + "github.com/supabase/cli/internal/migration/down" "github.com/supabase/cli/internal/migration/fetch" "github.com/supabase/cli/internal/migration/list" "github.com/supabase/cli/internal/migration/new" @@ -90,6 +91,17 @@ var ( }, } + nLastVersion uint + + migrationDownCmd = &cobra.Command{ + Use: "down", + Short: "Resets applied migrations up to the last n versions", + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + return down.Run(cmd.Context(), nLastVersion, flags.DbConfig, afero.NewOsFs()) + }, + } + migrationFetchCmd = &cobra.Command{ Use: "fetch", Short: "Fetch migration files from history table", @@ -141,6 +153,13 @@ func init() { upFlags.Bool("local", true, "Applies pending migrations to the local database.") migrationUpCmd.MarkFlagsMutuallyExclusive("db-url", "linked", "local") migrationCmd.AddCommand(migrationUpCmd) + downFlags := migrationDownCmd.Flags() + downFlags.UintVar(&nLastVersion, "last", 1, "Reset up to the last n migration versions.") + downFlags.String("db-url", "", "Resets applied migrations on the database specified by the connection string (must be percent-encoded).") + downFlags.Bool("linked", false, "Resets applied migrations on the linked project.") + downFlags.Bool("local", true, "Resets applied migrations on the local database.") + migrationDownCmd.MarkFlagsMutuallyExclusive("db-url", "linked", "local") + migrationCmd.AddCommand(migrationDownCmd) // Build up command fetchFlags := migrationFetchCmd.Flags() fetchFlags.String("db-url", "", "Fetches migrations from the database specified by the connection string (must be percent-encoded).") diff --git a/go.mod b/go.mod index 7edb9539d..236ba65ea 100644 --- a/go.mod +++ b/go.mod @@ -11,13 +11,13 @@ require ( github.com/charmbracelet/glamour v0.9.1 github.com/charmbracelet/lipgloss v1.1.0 github.com/containerd/errdefs v1.0.0 - github.com/containers/common v0.63.0 + github.com/containers/common v0.63.1 github.com/docker/cli v28.2.2+incompatible github.com/docker/docker v28.2.2+incompatible github.com/docker/go-connections v0.5.0 github.com/getsentry/sentry-go v0.33.0 github.com/go-errors/errors v1.5.1 - github.com/go-git/go-git/v5 v5.16.0 + github.com/go-git/go-git/v5 v5.16.2 github.com/go-xmlfmt/xmlfmt v1.1.3 github.com/golangci/golangci-lint/v2 v2.1.6 github.com/google/go-github/v62 v62.0.0 @@ -31,7 +31,7 @@ require ( github.com/joho/godotenv v1.5.1 github.com/muesli/reflow v0.3.0 github.com/oapi-codegen/oapi-codegen/v2 v2.4.1 - github.com/slack-go/slack v0.17.0 + github.com/slack-go/slack v0.17.1 github.com/spf13/afero v1.14.0 github.com/spf13/cobra v1.9.1 github.com/spf13/pflag v1.0.6 @@ -43,10 +43,10 @@ require ( github.com/withfig/autocomplete-tools/packages/cobra v1.2.0 github.com/zalando/go-keyring v0.2.6 go.opentelemetry.io/otel v1.36.0 - golang.org/x/mod v0.24.0 + golang.org/x/mod v0.25.0 golang.org/x/oauth2 v0.30.0 golang.org/x/term v0.32.0 - google.golang.org/grpc v1.72.2 + google.golang.org/grpc v1.73.0 gopkg.in/yaml.v3 v3.0.1 gotest.tools/gotestsum v1.12.2 ) @@ -318,8 +318,8 @@ require ( go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.34.0 // indirect go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.34.0 // indirect go.opentelemetry.io/otel/metric v1.36.0 // indirect - go.opentelemetry.io/otel/sdk v1.34.0 // indirect - go.opentelemetry.io/otel/sdk/metric v1.34.0 // indirect + go.opentelemetry.io/otel/sdk v1.35.0 // indirect + go.opentelemetry.io/otel/sdk/metric v1.35.0 // indirect go.opentelemetry.io/otel/trace v1.36.0 // indirect go.opentelemetry.io/proto/otlp v1.5.0 // indirect go.uber.org/atomic v1.9.0 // indirect @@ -333,8 +333,8 @@ require ( golang.org/x/sys v0.33.0 // indirect golang.org/x/text v0.24.0 // indirect golang.org/x/tools v0.32.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20250303144028-a0af3efb3deb // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20250313205543-e70fdf4c4cb4 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20250324211829-b45e905df463 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463 // indirect google.golang.org/protobuf v1.36.6 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect diff --git a/go.sum b/go.sum index 1388a0c3e..522289a4b 100644 --- a/go.sum +++ b/go.sum @@ -199,8 +199,8 @@ github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151X github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk= github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= -github.com/containers/common v0.63.0 h1:ox6vgUYX5TSvt4W+bE36sYBVz/aXMAfRGVAgvknSjBg= -github.com/containers/common v0.63.0/go.mod h1:+3GCotSqNdIqM3sPs152VvW7m5+Mg8Kk+PExT3G9hZw= +github.com/containers/common v0.63.1 h1:6g02gbW34PaRVH4Heb2Pk11x0SdbQ+8AfeKKeQGqYBE= +github.com/containers/common v0.63.1/go.mod h1:+3GCotSqNdIqM3sPs152VvW7m5+Mg8Kk+PExT3G9hZw= github.com/containers/storage v1.58.0 h1:Q7SyyCCjqgT3wYNgRNIL8o/wUS92heIj2/cc8Sewvcc= github.com/containers/storage v1.58.0/go.mod h1:w7Jl6oG+OpeLGLzlLyOZPkmUso40kjpzgrHUk5tyBlo= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= @@ -312,8 +312,8 @@ github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UN github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU= github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4= github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII= -github.com/go-git/go-git/v5 v5.16.0 h1:k3kuOEpkc0DeY7xlL6NaaNg39xdgQbtH5mwCafHO9AQ= -github.com/go-git/go-git/v5 v5.16.0/go.mod h1:4Ge4alE/5gPs30F2H1esi2gPd69R0C39lolkucHBOp8= +github.com/go-git/go-git/v5 v5.16.2 h1:fT6ZIOjE5iEnkzKyxTHK1W4HGAsPhqEqiSAssSO77hM= +github.com/go-git/go-git/v5 v5.16.2/go.mod h1:4Ge4alE/5gPs30F2H1esi2gPd69R0C39lolkucHBOp8= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -895,8 +895,8 @@ github.com/sivchari/containedctx v1.0.3 h1:x+etemjbsh2fB5ewm5FeLNi5bUjK0V8n0RB+W github.com/sivchari/containedctx v1.0.3/go.mod h1:c1RDvCbnJLtH4lLcYD/GqwiBSSf4F5Qk0xld2rBqzJ4= github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8= github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY= -github.com/slack-go/slack v0.17.0 h1:Vqd4GGIcwwgEu80GBs3cXoPPho5bkDGSFnuZbSG0NhA= -github.com/slack-go/slack v0.17.0/go.mod h1:X+UqOufi3LYQHDnMG1vxf0J8asC6+WllXrVrhl8/Prk= +github.com/slack-go/slack v0.17.1 h1:x0Mnc6biHBea5vfxLR+x4JFl/Rm3eIo0iS3xDZenX+o= +github.com/slack-go/slack v0.17.1/go.mod h1:X+UqOufi3LYQHDnMG1vxf0J8asC6+WllXrVrhl8/Prk= github.com/sonatard/noctx v0.1.0 h1:JjqOc2WN16ISWAjAk8M5ej0RfExEXtkEyExl2hLW+OM= github.com/sonatard/noctx v0.1.0/go.mod h1:0RvBxqY8D4j9cTTTWE8ylt2vqj2EPI8fHmrxHdsaZ2c= github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= @@ -1048,10 +1048,10 @@ go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.34.0 h1:BEj3S go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.34.0/go.mod h1:9cKLGBDzI/F3NoHLQGm4ZrYdIHsvGt6ej6hUowxY0J4= go.opentelemetry.io/otel/metric v1.36.0 h1:MoWPKVhQvJ+eeXWHFBOPoBOi20jh6Iq2CcCREuTYufE= go.opentelemetry.io/otel/metric v1.36.0/go.mod h1:zC7Ks+yeyJt4xig9DEw9kuUFe5C3zLbVjV2PzT6qzbs= -go.opentelemetry.io/otel/sdk v1.34.0 h1:95zS4k/2GOy069d321O8jWgYsW3MzVV+KuSPKp7Wr1A= -go.opentelemetry.io/otel/sdk v1.34.0/go.mod h1:0e/pNiaMAqaykJGKbi+tSjWfNNHMTxoC9qANsCzbyxU= -go.opentelemetry.io/otel/sdk/metric v1.34.0 h1:5CeK9ujjbFVL5c1PhLuStg1wxA7vQv7ce1EK0Gyvahk= -go.opentelemetry.io/otel/sdk/metric v1.34.0/go.mod h1:jQ/r8Ze28zRKoNRdkjCZxfs6YvBTG1+YIqyFVFYec5w= +go.opentelemetry.io/otel/sdk v1.35.0 h1:iPctf8iprVySXSKJffSS79eOjl9pvxV9ZqOWT0QejKY= +go.opentelemetry.io/otel/sdk v1.35.0/go.mod h1:+ga1bZliga3DxJ3CQGg3updiaAJoNECOgJREo9KHGQg= +go.opentelemetry.io/otel/sdk/metric v1.35.0 h1:1RriWBmCKgkeHEhM7a2uMjMUfP7MsOF5JpUCaEqEI9o= +go.opentelemetry.io/otel/sdk/metric v1.35.0/go.mod h1:is6XYCUMpcKi+ZsOvfluY5YstFnhW0BidkR+gL+qN+w= go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w= go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA= go.opentelemetry.io/proto/otlp v1.5.0 h1:xJvq7gMzB31/d406fB8U5CBdyQGw4P399D1aQWU/3i4= @@ -1144,8 +1144,8 @@ golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= -golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= +golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w= +golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1444,10 +1444,10 @@ google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7Fc google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto/googleapis/api v0.0.0-20250303144028-a0af3efb3deb h1:p31xT4yrYrSM/G4Sn2+TNUkVhFCbG9y8itM2S6Th950= -google.golang.org/genproto/googleapis/api v0.0.0-20250303144028-a0af3efb3deb/go.mod h1:jbe3Bkdp+Dh2IrslsFCklNhweNTBgSYanP1UXhJDhKg= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250313205543-e70fdf4c4cb4 h1:iK2jbkWL86DXjEx0qiHcRE9dE4/Ahua5k6V8OWFb//c= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250313205543-e70fdf4c4cb4/go.mod h1:LuRYeWDFV6WOn90g357N17oMCaxpgCnbi/44qJvDn2I= +google.golang.org/genproto/googleapis/api v0.0.0-20250324211829-b45e905df463 h1:hE3bRWtU6uceqlh4fhrSnUyjKHMKB9KrTLLG+bc0ddM= +google.golang.org/genproto/googleapis/api v0.0.0-20250324211829-b45e905df463/go.mod h1:U90ffi8eUL9MwPcrJylN5+Mk2v3vuPDptd5yyNUiRR8= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463 h1:e0AIkUUhxyBKh6ssZNrAMeqhA7RKUj42346d1y02i2g= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= google.golang.org/grpc v1.0.5/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= @@ -1461,8 +1461,8 @@ google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKa google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.72.2 h1:TdbGzwb82ty4OusHWepvFWGLgIbNo1/SUynEN0ssqv8= -google.golang.org/grpc v1.72.2/go.mod h1:wH5Aktxcg25y1I3w7H69nHfXdOG3UiadoBtjh3izSDM= +google.golang.org/grpc v1.73.0 h1:VIWSmpI2MegBtTuFt5/JWy2oXxtjJ/e89Z70ImfD2ok= +google.golang.org/grpc v1.73.0/go.mod h1:50sbHOUqWoCQGI8V2HQLJM0B+LMlIUjNSZmow7EVBQc= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= diff --git a/internal/backups/list/list.go b/internal/backups/list/list.go new file mode 100644 index 000000000..bcff285ff --- /dev/null +++ b/internal/backups/list/list.go @@ -0,0 +1,69 @@ +package list + +import ( + "context" + "fmt" + "os" + + "github.com/go-errors/errors" + "github.com/supabase/cli/internal/migration/list" + "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/internal/utils/flags" + "github.com/supabase/cli/pkg/api" + "github.com/supabase/cli/pkg/cast" +) + +func Run(ctx context.Context) error { + resp, err := utils.GetSupabase().V1ListAllBackupsWithResponse(ctx, flags.ProjectRef) + if err != nil { + return errors.Errorf("failed to list physical backups: %w", err) + } else if resp.JSON200 == nil { + return errors.Errorf("unexpected list backup status %d: %s", resp.StatusCode(), string(resp.Body)) + } + switch utils.OutputFormat.Value { + case utils.OutputPretty: + if len(resp.JSON200.Backups) > 0 { + return listLogicalBackups(*resp.JSON200) + } + table := `REGION|WALG|PITR|EARLIEST TIMESTAMP|LATEST TIMESTAMP +|-|-|-|-|-| +` + table += fmt.Sprintf( + "|`%s`|`%t`|`%t`|`%d`|`%d`|\n", + utils.FormatRegion(resp.JSON200.Region), + resp.JSON200.WalgEnabled, + resp.JSON200.PitrEnabled, + cast.Val(resp.JSON200.PhysicalBackupData.EarliestPhysicalBackupDateUnix, 0), + cast.Val(resp.JSON200.PhysicalBackupData.LatestPhysicalBackupDateUnix, 0), + ) + return list.RenderTable(table) + case utils.OutputEnv: + return errors.Errorf("--output env flag is not supported") + } + return utils.EncodeOutput(utils.OutputFormat.Value, os.Stdout, *resp.JSON200) +} + +const ( + BACKUP_LOGICAL = "LOGICAL" + BACKUP_PHYSICAL = "PHYSICAL" +) + +func listLogicalBackups(resp api.V1BackupsResponse) error { + table := `REGION|BACKUP TYPE|STATUS|CREATED AT (UTC) +|-|-|-|-| +` + for _, backup := range resp.Backups { + backupType := BACKUP_LOGICAL + if backup.IsPhysicalBackup { + backupType = BACKUP_PHYSICAL + } + table += fmt.Sprintf( + "|`%s`|`%s`|`%s`|`%s`|\n", + utils.FormatRegion(resp.Region), + backupType, + backup.Status, + utils.FormatTimestamp(backup.InsertedAt), + ) + } + return list.RenderTable(table) +} diff --git a/internal/backups/restore/restore.go b/internal/backups/restore/restore.go new file mode 100644 index 000000000..8118f1bcd --- /dev/null +++ b/internal/backups/restore/restore.go @@ -0,0 +1,24 @@ +package restore + +import ( + "context" + "fmt" + "net/http" + + "github.com/go-errors/errors" + "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/internal/utils/flags" + "github.com/supabase/cli/pkg/api" +) + +func Run(ctx context.Context, timestamp int64) error { + body := api.V1RestorePitrBody{RecoveryTimeTargetUnix: timestamp} + resp, err := utils.GetSupabase().V1RestorePitrBackupWithResponse(ctx, flags.ProjectRef, body) + if err != nil { + return errors.Errorf("failed to restore backup: %w", err) + } else if resp.StatusCode() != http.StatusCreated { + return errors.Errorf("unexpected restore backup status %d: %s", resp.StatusCode(), string(resp.Body)) + } + fmt.Println("Started PITR restore:", flags.ProjectRef) + return nil +} diff --git a/internal/db/dump/dump.go b/internal/db/dump/dump.go index d9cd2af7c..7b40ddccc 100644 --- a/internal/db/dump/dump.go +++ b/internal/db/dump/dump.go @@ -14,33 +14,23 @@ import ( "github.com/jackc/pgconn" "github.com/spf13/afero" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/pkg/migration" ) -var ( - //go:embed templates/dump_schema.sh - dumpSchemaScript string - //go:embed templates/dump_data.sh - dumpDataScript string - //go:embed templates/dump_role.sh - dumpRoleScript string -) - -func Run(ctx context.Context, path string, config pgconn.Config, schema, excludeTable []string, dataOnly, roleOnly, keepComments, useCopy, dryRun bool, fsys afero.Fs) error { +func Run(ctx context.Context, path string, config pgconn.Config, dataOnly, roleOnly, dryRun bool, fsys afero.Fs, opts ...migration.DumpOptionFunc) error { // Initialize output stream - var outStream afero.File - if len(path) > 0 { + outStream := (io.Writer)(os.Stdout) + exec := DockerExec + if dryRun { + fmt.Fprintln(os.Stderr, "DRY RUN: *only* printing the pg_dump script to console.") + exec = noExec + } else if len(path) > 0 { f, err := fsys.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644) if err != nil { return errors.Errorf("failed to open dump file: %w", err) } defer f.Close() outStream = f - } else { - outStream = os.Stdout - } - // Load the requested script - if dryRun { - fmt.Fprintln(os.Stderr, "DRY RUN: *only* printing the pg_dump script to console.") } db := "remote" if utils.IsLocalDatabase(config) { @@ -48,132 +38,42 @@ func Run(ctx context.Context, path string, config pgconn.Config, schema, exclude } if dataOnly { fmt.Fprintf(os.Stderr, "Dumping data from %s database...\n", db) - return dumpData(ctx, config, schema, excludeTable, useCopy, dryRun, outStream) + return migration.DumpData(ctx, config, outStream, exec, opts...) } else if roleOnly { fmt.Fprintf(os.Stderr, "Dumping roles from %s database...\n", db) - return dumpRole(ctx, config, keepComments, dryRun, outStream) + return migration.DumpRole(ctx, config, outStream, exec, opts...) } fmt.Fprintf(os.Stderr, "Dumping schemas from %s database...\n", db) - return DumpSchema(ctx, config, schema, keepComments, dryRun, outStream) + return migration.DumpSchema(ctx, config, outStream, exec, opts...) } -func DumpSchema(ctx context.Context, config pgconn.Config, schema []string, keepComments, dryRun bool, stdout io.Writer) error { - var env []string - if len(schema) > 0 { - // Must append flag because empty string results in error - env = append(env, "EXTRA_FLAGS=--schema="+strings.Join(schema, "|")) - } else { - env = append(env, "EXCLUDED_SCHEMAS="+strings.Join(utils.InternalSchemas, "|")) - } - if !keepComments { - env = append(env, "EXTRA_SED=/^--/d") - } - return dump(ctx, config, dumpSchemaScript, env, dryRun, stdout) -} - -func dumpData(ctx context.Context, config pgconn.Config, schema, excludeTable []string, useCopy, dryRun bool, stdout io.Writer) error { - // We want to dump user data in auth, storage, etc. for migrating to new project - excludedSchemas := []string{ - "information_schema", - "pg_*", // Wildcard pattern follows pg_dump - // Owned by extensions - // "cron", - "graphql", - "graphql_public", - // "net", - // "pgmq", - "pgsodium", - "pgsodium_masks", - "pgtle", - "repack", - "tiger", - "tiger_data", - "timescaledb_*", - "_timescaledb_*", - "topology", - "vault", - // Managed by Supabase - // "auth", - "extensions", - "pgbouncer", - "realtime", - // "storage", - // "supabase_functions", - "supabase_migrations", - // TODO: Remove in a few version in favor of _supabase internal db - "_analytics", - "_realtime", - "_supavisor", - } - var env []string - if len(schema) > 0 { - env = append(env, "INCLUDED_SCHEMAS="+strings.Join(schema, "|")) - } else { - env = append(env, "INCLUDED_SCHEMAS=*", "EXCLUDED_SCHEMAS="+strings.Join(excludedSchemas, "|")) - } - var extraFlags []string - if !useCopy { - extraFlags = append(extraFlags, "--column-inserts", "--rows-per-insert 100000") - } - for _, table := range excludeTable { - escaped := quoteUpperCase(table) - // Use separate flags to avoid error: too many dotted names - extraFlags = append(extraFlags, "--exclude-table "+escaped) - } - if len(extraFlags) > 0 { - env = append(env, "EXTRA_FLAGS="+strings.Join(extraFlags, " ")) - } - return dump(ctx, config, dumpDataScript, env, dryRun, stdout) -} - -func quoteUpperCase(table string) string { - escaped := strings.ReplaceAll(table, ".", `"."`) - return fmt.Sprintf(`"%s"`, escaped) -} - -func dumpRole(ctx context.Context, config pgconn.Config, keepComments, dryRun bool, stdout io.Writer) error { - env := []string{} - if !keepComments { - env = append(env, "EXTRA_SED=/^--/d") +func noExec(ctx context.Context, script string, env []string, w io.Writer) error { + envMap := make(map[string]string, len(env)) + for _, e := range env { + index := strings.IndexByte(e, '=') + if index < 0 { + continue + } + envMap[e[:index]] = e[index+1:] } - return dump(ctx, config, dumpRoleScript, env, dryRun, stdout) + expanded := os.Expand(script, func(key string) string { + // Bash variable expansion is unsupported: + // https://github.com/golang/go/issues/47187 + parts := strings.Split(key, ":") + value := envMap[parts[0]] + // Escape double quotes in env vars + return strings.ReplaceAll(value, `"`, `\"`) + }) + fmt.Fprintln(w, expanded) + return nil } -func dump(ctx context.Context, config pgconn.Config, script string, env []string, dryRun bool, stdout io.Writer) error { - allEnvs := append(env, - "PGHOST="+config.Host, - fmt.Sprintf("PGPORT=%d", config.Port), - "PGUSER="+config.User, - "PGPASSWORD="+config.Password, - "PGDATABASE="+config.Database, - "RESERVED_ROLES="+strings.Join(utils.ReservedRoles, "|"), - "ALLOWED_CONFIGS="+strings.Join(utils.AllowedConfigs, "|"), - ) - if dryRun { - envMap := make(map[string]string, len(allEnvs)) - for _, e := range allEnvs { - index := strings.IndexByte(e, '=') - if index < 0 { - continue - } - envMap[e[:index]] = e[index+1:] - } - expanded := os.Expand(script, func(key string) string { - // Bash variable expansion is unsupported: - // https://github.com/golang/go/issues/47187 - parts := strings.Split(key, ":") - value := envMap[parts[0]] - // Escape double quotes in env vars - return strings.ReplaceAll(value, `"`, `\"`) - }) - fmt.Println(expanded) - return nil - } +func DockerExec(ctx context.Context, script string, env []string, w io.Writer) error { return utils.DockerRunOnceWithConfig( ctx, container.Config{ Image: utils.Config.Db.Image, - Env: allEnvs, + Env: env, Cmd: []string{"bash", "-c", script, "--"}, }, container.HostConfig{ @@ -181,7 +81,7 @@ func dump(ctx context.Context, config pgconn.Config, script string, env []string }, network.NetworkingConfig{}, "", - stdout, + w, os.Stderr, ) } diff --git a/internal/db/dump/dump_test.go b/internal/db/dump/dump_test.go index e8e4b2d1a..bfdccb128 100644 --- a/internal/db/dump/dump_test.go +++ b/internal/db/dump/dump_test.go @@ -12,6 +12,7 @@ import ( "github.com/stretchr/testify/require" "github.com/supabase/cli/internal/testing/apitest" "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/pkg/migration" ) var dbConfig = pgconn.Config{ @@ -22,7 +23,7 @@ var dbConfig = pgconn.Config{ Database: "postgres", } -func TestPullCommand(t *testing.T) { +func TestDumpCommand(t *testing.T) { imageUrl := utils.GetRegistryImageUrl(utils.Config.Db.Image) const containerId = "test-container" @@ -35,7 +36,7 @@ func TestPullCommand(t *testing.T) { apitest.MockDockerStart(utils.Docker, imageUrl, containerId) require.NoError(t, apitest.MockDockerLogs(utils.Docker, containerId, "hello world")) // Run test - err := Run(context.Background(), "schema.sql", dbConfig, nil, nil, false, false, false, false, false, fsys) + err := Run(context.Background(), "schema.sql", dbConfig, false, false, false, fsys) // Check error assert.NoError(t, err) assert.Empty(t, apitest.ListUnmatchedRequests()) @@ -54,7 +55,7 @@ func TestPullCommand(t *testing.T) { apitest.MockDockerStart(utils.Docker, imageUrl, containerId) require.NoError(t, apitest.MockDockerLogs(utils.Docker, containerId, "hello world\n")) // Run test - err := Run(context.Background(), "", dbConfig, []string{"public"}, nil, false, false, false, false, false, fsys) + err := Run(context.Background(), "", dbConfig, false, false, false, fsys, migration.WithSchema("public")) // Check error assert.NoError(t, err) assert.Empty(t, apitest.ListUnmatchedRequests()) @@ -70,7 +71,7 @@ func TestPullCommand(t *testing.T) { Get("/v" + utils.Docker.ClientVersion() + "/images"). Reply(http.StatusServiceUnavailable) // Run test - err := Run(context.Background(), "", dbConfig, nil, nil, false, false, false, false, false, fsys) + err := Run(context.Background(), "", dbConfig, false, false, false, fsys) // Check error assert.ErrorContains(t, err, "request returned 503 Service Unavailable for API route and version") assert.Empty(t, apitest.ListUnmatchedRequests()) @@ -85,7 +86,7 @@ func TestPullCommand(t *testing.T) { apitest.MockDockerStart(utils.Docker, imageUrl, containerId) require.NoError(t, apitest.MockDockerLogs(utils.Docker, containerId, "hello world\n")) // Run test - err := Run(context.Background(), "schema.sql", dbConfig, nil, nil, false, false, false, false, false, fsys) + err := Run(context.Background(), "schema.sql", dbConfig, false, false, false, fsys) // Check error assert.ErrorContains(t, err, "operation not permitted") assert.Empty(t, apitest.ListUnmatchedRequests()) diff --git a/internal/db/pull/pull.go b/internal/db/pull/pull.go index 3f99f972f..9c302e61b 100644 --- a/internal/db/pull/pull.go +++ b/internal/db/pull/pull.go @@ -6,6 +6,7 @@ import ( "fmt" "math" "os" + "path/filepath" "strconv" "github.com/go-errors/errors" @@ -89,7 +90,7 @@ func run(p utils.Program, ctx context.Context, schema []string, path string, con func dumpRemoteSchema(p utils.Program, ctx context.Context, path string, config pgconn.Config, fsys afero.Fs) error { // Special case if this is the first migration p.Send(utils.StatusMsg("Dumping schema from remote database...")) - if err := utils.MkdirIfNotExistFS(fsys, utils.MigrationsDir); err != nil { + if err := utils.MkdirIfNotExistFS(fsys, filepath.Dir(path)); err != nil { return err } f, err := fsys.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644) @@ -97,7 +98,7 @@ func dumpRemoteSchema(p utils.Program, ctx context.Context, path string, config return errors.Errorf("failed to open dump file: %w", err) } defer f.Close() - return dump.DumpSchema(ctx, config, nil, false, false, f) + return migration.DumpSchema(ctx, config, f, dump.DockerExec) } func diffRemoteSchema(p utils.Program, ctx context.Context, schema []string, path string, config pgconn.Config, fsys afero.Fs) error { diff --git a/internal/db/push/push.go b/internal/db/push/push.go index 4a1fdf6df..6960702d1 100644 --- a/internal/db/push/push.go +++ b/internal/db/push/push.go @@ -26,8 +26,10 @@ func Run(ctx context.Context, dryRun, ignoreVersionMismatch bool, includeRoles, return err } defer conn.Close(context.Background()) - pending, err := up.GetPendingMigrations(ctx, ignoreVersionMismatch, conn, fsys) - if err != nil { + var pending []string + if !utils.Config.Db.Migrations.Enabled { + fmt.Fprintln(os.Stderr, "Skipping migrations because it is disabled in config.toml for project:", flags.ProjectRef) + } else if pending, err = up.GetPendingMigrations(ctx, ignoreVersionMismatch, conn, fsys); err != nil { return err } var seeds []migration.SeedFile diff --git a/internal/db/reset/reset.go b/internal/db/reset/reset.go index 3455ffbc0..fd5b7e711 100644 --- a/internal/db/reset/reset.go +++ b/internal/db/reset/reset.go @@ -23,14 +23,15 @@ import ( "github.com/supabase/cli/internal/db/start" "github.com/supabase/cli/internal/gen/keys" "github.com/supabase/cli/internal/migration/apply" + "github.com/supabase/cli/internal/migration/down" + "github.com/supabase/cli/internal/migration/list" "github.com/supabase/cli/internal/migration/repair" "github.com/supabase/cli/internal/seed/buckets" "github.com/supabase/cli/internal/utils" "github.com/supabase/cli/pkg/migration" - "github.com/supabase/cli/pkg/vault" ) -func Run(ctx context.Context, version string, config pgconn.Config, fsys afero.Fs, options ...func(*pgx.ConnConfig)) error { +func Run(ctx context.Context, version string, last uint, config pgconn.Config, fsys afero.Fs, options ...func(*pgx.ConnConfig)) error { if len(version) > 0 { if _, err := strconv.Atoi(version); err != nil { return errors.New(repair.ErrInvalidVersion) @@ -38,14 +39,19 @@ func Run(ctx context.Context, version string, config pgconn.Config, fsys afero.F if _, err := repair.GetMigrationFile(version, fsys); err != nil { return err } - } - if !utils.IsLocalDatabase(config) { - msg := "Do you want to reset the remote database?" - if shouldReset, err := utils.NewConsole().PromptYesNo(ctx, msg, false); err != nil { + } else if last > 0 { + localMigrations, err := list.LoadLocalVersions(fsys) + if err != nil { return err - } else if !shouldReset { - return errors.New(context.Canceled) } + if total := uint(len(localMigrations)); last < total { + version = localMigrations[total-last-1] + } else { + // Negative skips all migrations + version = "-" + } + } + if !utils.IsLocalDatabase(config) { return resetRemote(ctx, version, config, fsys, options...) } // Config file is loaded before parsing --linked or --local flags @@ -233,19 +239,19 @@ func listServicesToRestart() []string { } func resetRemote(ctx context.Context, version string, config pgconn.Config, fsys afero.Fs, options ...func(*pgx.ConnConfig)) error { + msg := "Do you want to reset the remote database?" + if shouldReset, err := utils.NewConsole().PromptYesNo(ctx, msg, false); err != nil { + return err + } else if !shouldReset { + return errors.New(context.Canceled) + } fmt.Fprintln(os.Stderr, "Resetting remote database"+toLogMessage(version)) conn, err := utils.ConnectByConfigStream(ctx, config, io.Discard, options...) if err != nil { return err } defer conn.Close(context.Background()) - if err := migration.DropUserSchemas(ctx, conn); err != nil { - return err - } - if err := vault.UpsertVaultSecrets(ctx, utils.Config.Db.Vault, conn); err != nil { - return err - } - return apply.MigrateAndSeed(ctx, version, conn, fsys) + return down.ResetAll(ctx, version, conn, fsys) } func LikeEscapeSchema(schemas []string) (result []string) { diff --git a/internal/db/reset/reset_test.go b/internal/db/reset/reset_test.go index bb87bd912..23454d070 100644 --- a/internal/db/reset/reset_test.go +++ b/internal/db/reset/reset_test.go @@ -5,7 +5,6 @@ import ( "errors" "io" "net/http" - "path/filepath" "testing" "time" @@ -20,9 +19,7 @@ import ( "github.com/supabase/cli/internal/db/start" "github.com/supabase/cli/internal/testing/apitest" "github.com/supabase/cli/internal/testing/fstest" - "github.com/supabase/cli/internal/testing/helper" "github.com/supabase/cli/internal/utils" - "github.com/supabase/cli/pkg/migration" "github.com/supabase/cli/pkg/pgtest" "github.com/supabase/cli/pkg/storage" ) @@ -96,7 +93,7 @@ func TestResetCommand(t *testing.T) { Reply(http.StatusOK). JSON([]storage.BucketResponse{}) // Run test - err := Run(context.Background(), "", dbConfig, fsys, conn.Intercept) + err := Run(context.Background(), "", 0, dbConfig, fsys, conn.Intercept) // Check error assert.NoError(t, err) assert.Empty(t, apitest.ListUnmatchedRequests()) @@ -106,7 +103,7 @@ func TestResetCommand(t *testing.T) { // Setup in-memory fs fsys := afero.NewMemMapFs() // Run test - err := Run(context.Background(), "", pgconn.Config{Host: "db.supabase.co"}, fsys) + err := Run(context.Background(), "", 0, pgconn.Config{Host: "db.supabase.co"}, fsys) // Check error assert.ErrorIs(t, err, context.Canceled) }) @@ -116,7 +113,7 @@ func TestResetCommand(t *testing.T) { // Setup in-memory fs fsys := afero.NewMemMapFs() // Run test - err := Run(context.Background(), "", pgconn.Config{Host: "db.supabase.co"}, fsys) + err := Run(context.Background(), "", 0, pgconn.Config{Host: "db.supabase.co"}, fsys) // Check error assert.ErrorContains(t, err, "invalid port (outside range)") }) @@ -131,7 +128,7 @@ func TestResetCommand(t *testing.T) { Get("/v" + utils.Docker.ClientVersion() + "/containers"). Reply(http.StatusNotFound) // Run test - err := Run(context.Background(), "", dbConfig, fsys) + err := Run(context.Background(), "", 0, dbConfig, fsys) // Check error assert.ErrorIs(t, err, utils.ErrNotRunning) assert.Empty(t, apitest.ListUnmatchedRequests()) @@ -153,7 +150,7 @@ func TestResetCommand(t *testing.T) { Delete("/v" + utils.Docker.ClientVersion() + "/containers/" + utils.DbId). ReplyError(errors.New("network error")) // Run test - err := Run(context.Background(), "", dbConfig, fsys) + err := Run(context.Background(), "", 0, dbConfig, fsys) // Check error assert.ErrorContains(t, err, "network error") assert.Empty(t, apitest.ListUnmatchedRequests()) @@ -406,90 +403,3 @@ func TestRestartDatabase(t *testing.T) { assert.Empty(t, apitest.ListUnmatchedRequests()) }) } - -var escapedSchemas = append(migration.ManagedSchemas, "extensions", "public") - -func TestResetRemote(t *testing.T) { - dbConfig := pgconn.Config{ - Host: "db.supabase.co", - Port: 5432, - User: "admin", - Password: "password", - Database: "postgres", - } - - t.Run("resets remote database", func(t *testing.T) { - // Setup in-memory fs - fsys := afero.NewMemMapFs() - path := filepath.Join(utils.MigrationsDir, "0_schema.sql") - require.NoError(t, afero.WriteFile(fsys, path, nil, 0644)) - // Setup mock postgres - conn := pgtest.NewConn() - defer conn.Close(t) - conn.Query(migration.ListSchemas, escapedSchemas). - Reply("SELECT 1", []interface{}{"private"}). - Query("DROP SCHEMA IF EXISTS private CASCADE"). - Reply("DROP SCHEMA"). - Query(migration.DropObjects). - Reply("INSERT 0") - helper.MockMigrationHistory(conn). - Query(migration.INSERT_MIGRATION_VERSION, "0", "schema", nil). - Reply("INSERT 0 1") - // Run test - err := resetRemote(context.Background(), "", dbConfig, fsys, conn.Intercept) - // Check error - assert.NoError(t, err) - }) - - t.Run("resets remote database with seed config disabled", func(t *testing.T) { - // Setup in-memory fs - fsys := afero.NewMemMapFs() - path := filepath.Join(utils.MigrationsDir, "0_schema.sql") - require.NoError(t, afero.WriteFile(fsys, path, nil, 0644)) - seedPath := filepath.Join(utils.SupabaseDirPath, "seed.sql") - // Will raise an error when seeding - require.NoError(t, afero.WriteFile(fsys, seedPath, []byte("INSERT INTO test_table;"), 0644)) - // Setup mock postgres - conn := pgtest.NewConn() - defer conn.Close(t) - conn.Query(migration.ListSchemas, escapedSchemas). - Reply("SELECT 1", []interface{}{"private"}). - Query("DROP SCHEMA IF EXISTS private CASCADE"). - Reply("DROP SCHEMA"). - Query(migration.DropObjects). - Reply("INSERT 0") - helper.MockMigrationHistory(conn). - Query(migration.INSERT_MIGRATION_VERSION, "0", "schema", nil). - Reply("INSERT 0 1") - utils.Config.Db.Seed.Enabled = false - // Run test - err := resetRemote(context.Background(), "", dbConfig, fsys, conn.Intercept) - // No error should be raised since we're skipping the seed - assert.NoError(t, err) - }) - - t.Run("throws error on connect failure", func(t *testing.T) { - // Setup in-memory fs - fsys := afero.NewMemMapFs() - // Run test - err := resetRemote(context.Background(), "", pgconn.Config{}, fsys) - // Check error - assert.ErrorContains(t, err, "invalid port (outside range)") - }) - - t.Run("throws error on drop schema failure", func(t *testing.T) { - // Setup in-memory fs - fsys := afero.NewMemMapFs() - // Setup mock postgres - conn := pgtest.NewConn() - defer conn.Close(t) - conn.Query(migration.ListSchemas, escapedSchemas). - Reply("SELECT 0"). - Query(migration.DropObjects). - ReplyError(pgerrcode.InsufficientPrivilege, "permission denied for relation supabase_migrations") - // Run test - err := resetRemote(context.Background(), "", dbConfig, fsys, conn.Intercept) - // Check error - assert.ErrorContains(t, err, "ERROR: permission denied for relation supabase_migrations (SQLSTATE 42501)") - }) -} diff --git a/internal/functions/deploy/deploy.go b/internal/functions/deploy/deploy.go index a3dba3e18..470a54304 100644 --- a/internal/functions/deploy/deploy.go +++ b/internal/functions/deploy/deploy.go @@ -51,7 +51,11 @@ func Run(ctx context.Context, slugs []string, useDocker bool, noVerifyJWT *bool, } opt := function.WithMaxJobs(maxJobs) if useDocker { - opt = function.WithBundler(NewDockerBundler(fsys)) + if utils.IsDockerRunning(ctx) { + opt = function.WithBundler(NewDockerBundler(fsys)) + } else { + fmt.Fprintln(os.Stderr, utils.Yellow("WARNING:"), "Docker is not running") + } } api := function.NewEdgeRuntimeAPI(flags.ProjectRef, *utils.GetSupabase(), opt) if err := api.Deploy(ctx, functionConfig, afero.NewIOFS(fsys)); errors.Is(err, function.ErrNoDeploy) { diff --git a/internal/functions/deploy/deploy_test.go b/internal/functions/deploy/deploy_test.go index 6b59dd3cf..9f0fb79db 100644 --- a/internal/functions/deploy/deploy_test.go +++ b/internal/functions/deploy/deploy_test.go @@ -4,6 +4,7 @@ import ( "context" "fmt" "net/http" + "net/url" "os" "path/filepath" "testing" @@ -26,6 +27,11 @@ func TestDeployCommand(t *testing.T) { const containerId = "test-container" imageUrl := utils.GetRegistryImageUrl(utils.Config.EdgeRuntime.Image) + parsed, err := url.Parse(utils.Docker.DaemonHost()) + require.NoError(t, err) + parsed.Scheme = "http:" + dockerHost := parsed.String() + t.Run("deploys multiple functions", func(t *testing.T) { functions := []string{slug, slug + "-2"} // Setup in-memory fs @@ -39,6 +45,9 @@ func TestDeployCommand(t *testing.T) { require.NoError(t, err) // Setup mock api defer gock.OffAll() + gock.New(dockerHost). + Head("/_ping"). + Reply(http.StatusOK) gock.New(utils.DefaultApiHost). Get("/v1/projects/" + flags.ProjectRef + "/functions"). Reply(http.StatusOK). @@ -99,6 +108,9 @@ import_map = "./import_map.json" require.NoError(t, err) // Setup mock api defer gock.OffAll() + gock.New(dockerHost). + Head("/_ping"). + Reply(http.StatusOK) gock.New(utils.DefaultApiHost). Get("/v1/projects/" + flags.ProjectRef + "/functions"). Reply(http.StatusOK). @@ -151,6 +163,9 @@ import_map = "./import_map.json" require.NoError(t, err) // Setup mock api defer gock.OffAll() + gock.New(dockerHost). + Head("/_ping"). + Reply(http.StatusOK) gock.New(utils.DefaultApiHost). Get("/v1/projects/" + flags.ProjectRef + "/functions"). Reply(http.StatusOK). @@ -214,6 +229,9 @@ verify_jwt = false require.NoError(t, err) // Setup mock api defer gock.OffAll() + gock.New(dockerHost). + Head("/_ping"). + Reply(http.StatusOK) gock.New(utils.DefaultApiHost). Get("/v1/projects/" + flags.ProjectRef + "/functions"). Reply(http.StatusOK). @@ -257,6 +275,9 @@ verify_jwt = false require.NoError(t, err) // Setup mock api defer gock.OffAll() + gock.New(dockerHost). + Head("/_ping"). + Reply(http.StatusOK) gock.New(utils.DefaultApiHost). Get("/v1/projects/" + flags.ProjectRef + "/functions"). Reply(http.StatusOK). diff --git a/internal/functions/download/download.go b/internal/functions/download/download.go index ca99c1bcf..d37d4a54e 100644 --- a/internal/functions/download/download.go +++ b/internal/functions/download/download.go @@ -10,11 +10,14 @@ import ( "os/exec" "path" "path/filepath" + "strings" + "github.com/andybalholm/brotli" "github.com/docker/docker/api/types/container" "github.com/docker/docker/api/types/network" "github.com/go-errors/errors" "github.com/spf13/afero" + "github.com/spf13/viper" "github.com/supabase/cli/internal/utils" "github.com/supabase/cli/internal/utils/flags" "github.com/supabase/cli/pkg/api" @@ -121,11 +124,13 @@ func Run(ctx context.Context, slug string, projectRef string, useLegacyBundle bo if err != nil { return err } - defer func() { - if err := fsys.Remove(eszipPath); err != nil { - fmt.Fprintln(os.Stderr, err) - } - }() + if !viper.GetBool("DEBUG") { + defer func() { + if err := fsys.Remove(eszipPath); err != nil { + fmt.Fprintln(os.Stderr, err) + } + }() + } // Extract eszip to functions directory err = extractOne(ctx, slug, eszipPath) if err != nil { @@ -148,12 +153,16 @@ func downloadOne(ctx context.Context, slug, projectRef string, fsys afero.Fs) (s } return "", errors.Errorf("Error status %d: %s", resp.StatusCode, string(body)) } + r := io.Reader(resp.Body) + if strings.EqualFold(resp.Header.Get("Content-Encoding"), "br") { + r = brotli.NewReader(resp.Body) + } // Create temp file to store downloaded eszip eszipPath := filepath.Join(utils.TempDir, fmt.Sprintf("output_%s.eszip", slug)) if err := utils.MkdirIfNotExistFS(fsys, utils.TempDir); err != nil { return "", err } - if err := afero.WriteReader(fsys, eszipPath, resp.Body); err != nil { + if err := afero.WriteReader(fsys, eszipPath, r); err != nil { return "", errors.Errorf("failed to download file: %w", err) } return eszipPath, nil diff --git a/internal/migration/apply/apply.go b/internal/migration/apply/apply.go index 224b342f7..f9c517ace 100644 --- a/internal/migration/apply/apply.go +++ b/internal/migration/apply/apply.go @@ -11,14 +11,21 @@ import ( ) func MigrateAndSeed(ctx context.Context, version string, conn *pgx.Conn, fsys afero.Fs) error { - migrations, err := list.LoadPartialMigrations(version, fsys) - if err != nil { + if err := applyMigrationFiles(ctx, version, conn, fsys); err != nil { return err } - if err := migration.ApplyMigrations(ctx, migrations, conn, afero.NewIOFS(fsys)); err != nil { + return applySeedFiles(ctx, conn, fsys) +} + +func applyMigrationFiles(ctx context.Context, version string, conn *pgx.Conn, fsys afero.Fs) error { + if !utils.Config.Db.Migrations.Enabled { + return nil + } + migrations, err := list.LoadPartialMigrations(version, fsys) + if err != nil { return err } - return applySeedFiles(ctx, conn, fsys) + return migration.ApplyMigrations(ctx, migrations, conn, afero.NewIOFS(fsys)) } func applySeedFiles(ctx context.Context, conn *pgx.Conn, fsys afero.Fs) error { diff --git a/internal/migration/down/down.go b/internal/migration/down/down.go new file mode 100644 index 000000000..35a9fa7e8 --- /dev/null +++ b/internal/migration/down/down.go @@ -0,0 +1,64 @@ +package down + +import ( + "context" + "fmt" + "os" + + "github.com/go-errors/errors" + "github.com/jackc/pgconn" + "github.com/jackc/pgx/v4" + "github.com/spf13/afero" + "github.com/supabase/cli/internal/migration/apply" + "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/pkg/migration" + "github.com/supabase/cli/pkg/vault" +) + +func Run(ctx context.Context, last uint, config pgconn.Config, fsys afero.Fs, options ...func(*pgx.ConnConfig)) error { + if last == 0 { + return errors.Errorf("--last must be greater than 0") + } + conn, err := utils.ConnectByConfig(ctx, config, options...) + if err != nil { + return err + } + defer conn.Close(context.Background()) + remoteMigrations, err := migration.ListRemoteMigrations(ctx, conn) + if err != nil { + return err + } + total := uint(len(remoteMigrations)) + if total <= last { + utils.CmdSuggestion = fmt.Sprintf("Try %s if you want to revert all migrations.", utils.Aqua("supabase db reset")) + return errors.Errorf("--last must be smaller than total applied migrations: %d", total) + } + msg := confirmResetAll(remoteMigrations[total-last:]) + if shouldReset, err := utils.NewConsole().PromptYesNo(ctx, msg, false); err != nil { + return err + } else if !shouldReset { + return errors.New(context.Canceled) + } + version := remoteMigrations[total-last-1] + fmt.Fprintln(os.Stderr, "Resetting database to version:", version) + return ResetAll(ctx, version, conn, fsys) +} + +func ResetAll(ctx context.Context, version string, conn *pgx.Conn, fsys afero.Fs) error { + if err := migration.DropUserSchemas(ctx, conn); err != nil { + return err + } + if err := vault.UpsertVaultSecrets(ctx, utils.Config.Db.Vault, conn); err != nil { + return err + } + return apply.MigrateAndSeed(ctx, version, conn, fsys) +} + +func confirmResetAll(pending []string) string { + msg := fmt.Sprintln("Do you want to revert the following migrations?") + for _, v := range pending { + msg += fmt.Sprintf(" • %s\n", utils.Bold(v)) + } + msg += fmt.Sprintf("%s you will lose all data in this database.", utils.Yellow("WARNING:")) + return msg +} diff --git a/internal/migration/down/down_test.go b/internal/migration/down/down_test.go new file mode 100644 index 000000000..e8d08b806 --- /dev/null +++ b/internal/migration/down/down_test.go @@ -0,0 +1,147 @@ +package down + +import ( + "context" + "path/filepath" + "testing" + + "github.com/jackc/pgconn" + "github.com/jackc/pgerrcode" + "github.com/spf13/afero" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/supabase/cli/internal/testing/helper" + "github.com/supabase/cli/internal/utils" + "github.com/supabase/cli/pkg/migration" + "github.com/supabase/cli/pkg/pgtest" +) + +var dbConfig = pgconn.Config{ + Host: "127.0.0.1", + Port: 5432, + User: "admin", + Password: "password", + Database: "postgres", +} + +func TestMigrationsDown(t *testing.T) { + t.Run("resets last n migrations", func(t *testing.T) { + // Setup in-memory fs + fsys := afero.NewMemMapFs() + files := []string{ + filepath.Join(utils.MigrationsDir, "20221201000000_test.sql"), + filepath.Join(utils.MigrationsDir, "20221201000001_test.sql"), + filepath.Join(utils.MigrationsDir, "20221201000002_test.sql"), + } + for _, path := range files { + require.NoError(t, afero.WriteFile(fsys, path, []byte(""), 0644)) + } + // Setup mock postgres + conn := pgtest.NewConn() + defer conn.Close(t) + conn.Query(migration.LIST_MIGRATION_VERSION). + Reply("SELECT 2", []interface{}{"20221201000000"}, []interface{}{"20221201000001"}) + // Run test + err := Run(context.Background(), 1, dbConfig, fsys, conn.Intercept) + // Check error + assert.ErrorIs(t, err, context.Canceled) + }) + + t.Run("throws error on out of range", func(t *testing.T) { + // Setup in-memory fs + fsys := afero.NewMemMapFs() + // Setup mock postgres + conn := pgtest.NewConn() + defer conn.Close(t) + conn.Query(migration.LIST_MIGRATION_VERSION). + Reply("SELECT 2", []interface{}{"20221201000000"}, []interface{}{"20221201000001"}) + // Run test + err := Run(context.Background(), 2, dbConfig, fsys, conn.Intercept) + // Check error + assert.ErrorContains(t, err, "--last must be smaller than total applied migrations: 2") + }) + + t.Run("throws error on insufficient privilege", func(t *testing.T) { + // Setup in-memory fs + fsys := afero.NewMemMapFs() + // Setup mock postgres + conn := pgtest.NewConn() + defer conn.Close(t) + conn.Query(migration.LIST_MIGRATION_VERSION). + ReplyError(pgerrcode.InsufficientPrivilege, "permission denied for relation supabase_migrations") + // Run test + err := Run(context.Background(), 1, dbConfig, fsys, conn.Intercept) + // Check error + assert.ErrorContains(t, err, "ERROR: permission denied for relation supabase_migrations (SQLSTATE 42501)") + }) +} + +var escapedSchemas = append(migration.ManagedSchemas, "extensions", "public") + +func TestResetRemote(t *testing.T) { + t.Run("resets remote database", func(t *testing.T) { + // Setup in-memory fs + fsys := afero.NewMemMapFs() + path := filepath.Join(utils.MigrationsDir, "0_schema.sql") + require.NoError(t, afero.WriteFile(fsys, path, nil, 0644)) + // Setup mock postgres + conn := pgtest.NewConn() + defer conn.Close(t) + conn.Query(migration.ListSchemas, escapedSchemas). + Reply("SELECT 1", []interface{}{"private"}). + Query("DROP SCHEMA IF EXISTS private CASCADE"). + Reply("DROP SCHEMA"). + Query(migration.DropObjects). + Reply("INSERT 0") + helper.MockMigrationHistory(conn). + Query(migration.INSERT_MIGRATION_VERSION, "0", "schema", nil). + Reply("INSERT 0 1") + // Run test + err := ResetAll(context.Background(), "", conn.MockClient(t), fsys) + // Check error + assert.NoError(t, err) + }) + + t.Run("resets remote database with seed config disabled", func(t *testing.T) { + // Setup in-memory fs + fsys := afero.NewMemMapFs() + path := filepath.Join(utils.MigrationsDir, "0_schema.sql") + require.NoError(t, afero.WriteFile(fsys, path, nil, 0644)) + seedPath := filepath.Join(utils.SupabaseDirPath, "seed.sql") + // Will raise an error when seeding + require.NoError(t, afero.WriteFile(fsys, seedPath, []byte("INSERT INTO test_table;"), 0644)) + // Setup mock postgres + conn := pgtest.NewConn() + defer conn.Close(t) + conn.Query(migration.ListSchemas, escapedSchemas). + Reply("SELECT 1", []interface{}{"private"}). + Query("DROP SCHEMA IF EXISTS private CASCADE"). + Reply("DROP SCHEMA"). + Query(migration.DropObjects). + Reply("INSERT 0") + helper.MockMigrationHistory(conn). + Query(migration.INSERT_MIGRATION_VERSION, "0", "schema", nil). + Reply("INSERT 0 1") + utils.Config.Db.Seed.Enabled = false + // Run test + err := ResetAll(context.Background(), "", conn.MockClient(t), fsys) + // No error should be raised since we're skipping the seed + assert.NoError(t, err) + }) + + t.Run("throws error on drop schema failure", func(t *testing.T) { + // Setup in-memory fs + fsys := afero.NewMemMapFs() + // Setup mock postgres + conn := pgtest.NewConn() + defer conn.Close(t) + conn.Query(migration.ListSchemas, escapedSchemas). + Reply("SELECT 0"). + Query(migration.DropObjects). + ReplyError(pgerrcode.InsufficientPrivilege, "permission denied for relation supabase_migrations") + // Run test + err := ResetAll(context.Background(), "", conn.MockClient(t), fsys) + // Check error + assert.ErrorContains(t, err, "ERROR: permission denied for relation supabase_migrations (SQLSTATE 42501)") + }) +} diff --git a/internal/migration/squash/squash.go b/internal/migration/squash/squash.go index 22f3278de..1ba5c55a9 100644 --- a/internal/migration/squash/squash.go +++ b/internal/migration/squash/squash.go @@ -96,7 +96,7 @@ func squashMigrations(ctx context.Context, migrations []string, fsys afero.Fs, o return err } // Assuming entities in managed schemas are not altered, we can simply diff the dumps before and after migrations. - schemas := []string{"auth", "storage"} + opt := migration.WithSchema("auth", "storage") config := pgconn.Config{ Host: utils.Config.Hostname, Port: utils.Config.Db.ShadowPort, @@ -105,14 +105,14 @@ func squashMigrations(ctx context.Context, migrations []string, fsys afero.Fs, o Database: "postgres", } var before, after bytes.Buffer - if err := dump.DumpSchema(ctx, config, schemas, false, false, &before); err != nil { + if err := migration.DumpSchema(ctx, config, &before, dump.DockerExec, opt); err != nil { return err } // 2. Migrate to target version if err := migration.ApplyMigrations(ctx, migrations, conn, afero.NewIOFS(fsys)); err != nil { return err } - if err := dump.DumpSchema(ctx, config, schemas, false, false, &after); err != nil { + if err := migration.DumpSchema(ctx, config, &after, dump.DockerExec, opt); err != nil { return err } // 3. Dump migrated schema @@ -122,7 +122,7 @@ func squashMigrations(ctx context.Context, migrations []string, fsys afero.Fs, o return errors.Errorf("failed to open migration file: %w", err) } defer f.Close() - if err := dump.DumpSchema(ctx, config, nil, false, false, f); err != nil { + if err := migration.DumpSchema(ctx, config, f, dump.DockerExec); err != nil { return err } // 4. Append managed schema diffs diff --git a/internal/projects/list/list.go b/internal/projects/list/list.go index babded41d..da788de68 100644 --- a/internal/projects/list/list.go +++ b/internal/projects/list/list.go @@ -53,7 +53,7 @@ func Run(ctx context.Context, fsys afero.Fs) error { project.OrganizationId, project.Id, strings.ReplaceAll(project.Name, "|", "\\|"), - formatRegion(project.Region), + utils.FormatRegion(project.Region), utils.FormatTimestamp(project.CreatedAt), ) } @@ -77,10 +77,3 @@ func formatBullet(value bool) string { } return " " } - -func formatRegion(region string) string { - if readable, ok := utils.RegionMap[region]; ok { - return readable - } - return region -} diff --git a/internal/start/start.go b/internal/start/start.go index da619fe13..122080c58 100644 --- a/internal/start/start.go +++ b/internal/start/start.go @@ -176,7 +176,7 @@ func run(p utils.Program, ctx context.Context, fsys afero.Fs, excludedContainers "LOGFLARE_MIN_CLUSTER_SIZE=1", "LOGFLARE_SINGLE_TENANT=true", "LOGFLARE_SUPABASE_MODE=true", - "LOGFLARE_API_KEY=" + utils.Config.Analytics.ApiKey, + "LOGFLARE_PRIVATE_ACCESS_TOKEN=" + utils.Config.Analytics.ApiKey, "LOGFLARE_LOG_LEVEL=warn", "LOGFLARE_NODE_HOST=127.0.0.1", "LOGFLARE_FEATURE_FLAG_OVERRIDE='multibackend=true'", @@ -986,7 +986,7 @@ EOF "AUTH_JWT_SECRET=" + utils.Config.Auth.JwtSecret.Value, "SUPABASE_ANON_KEY=" + utils.Config.Auth.AnonKey.Value, "SUPABASE_SERVICE_KEY=" + utils.Config.Auth.ServiceRoleKey.Value, - "LOGFLARE_API_KEY=" + utils.Config.Analytics.ApiKey, + "LOGFLARE_PRIVATE_ACCESS_TOKEN=" + utils.Config.Analytics.ApiKey, "OPENAI_API_KEY=" + utils.Config.Studio.OpenaiApiKey.Value, fmt.Sprintf("LOGFLARE_URL=http://%v:4000", utils.LogflareId), fmt.Sprintf("NEXT_PUBLIC_ENABLE_LOGS=%v", utils.Config.Analytics.Enabled), diff --git a/internal/start/templates/vector.yaml b/internal/start/templates/vector.yaml index 4c29864c0..1b9e5f57c 100644 --- a/internal/start/templates/vector.yaml +++ b/internal/start/templates/vector.yaml @@ -165,7 +165,9 @@ sinks: method: "post" request: retry_max_duration_secs: 10 - uri: "http://{{ .LogflareId }}:4000/api/logs?source_name=gotrue.logs.prod&api_key={{ .ApiKey }}" + headers: + x-api-key: "{{ .ApiKey }}" + uri: "http://{{ .LogflareId }}:4000/api/logs?source_name=gotrue.logs.prod" logflare_realtime: type: "http" inputs: @@ -175,7 +177,9 @@ sinks: method: "post" request: retry_max_duration_secs: 10 - uri: "http://{{ .LogflareId }}:4000/api/logs?source_name=realtime.logs.prod&api_key={{ .ApiKey }}" + headers: + x-api-key: "{{ .ApiKey }}" + uri: "http://{{ .LogflareId }}:4000/api/logs?source_name=realtime.logs.prod" logflare_rest: type: "http" inputs: @@ -185,7 +189,9 @@ sinks: method: "post" request: retry_max_duration_secs: 10 - uri: "http://{{ .LogflareId }}:4000/api/logs?source_name=postgREST.logs.prod&api_key={{ .ApiKey }}" + headers: + x-api-key: "{{ .ApiKey }}" + uri: "http://{{ .LogflareId }}:4000/api/logs?source_name=postgREST.logs.prod" logflare_db: type: "http" inputs: @@ -195,7 +201,9 @@ sinks: method: "post" request: retry_max_duration_secs: 10 - uri: "http://{{ .LogflareId }}:4000/api/logs?source_name=postgres.logs&api_key={{ .ApiKey }}" + headers: + x-api-key: "{{ .ApiKey }}" + uri: "http://{{ .LogflareId }}:4000/api/logs?source_name=postgres.logs" logflare_functions: type: "http" inputs: @@ -205,7 +213,9 @@ sinks: method: "post" request: retry_max_duration_secs: 10 - uri: "http://{{ .LogflareId }}:4000/api/logs?source_name=deno-relay-logs&api_key={{ .ApiKey }}" + headers: + x-api-key: "{{ .ApiKey }}" + uri: "http://{{ .LogflareId }}:4000/api/logs?source_name=deno-relay-logs" logflare_storage: type: "http" inputs: @@ -215,7 +225,9 @@ sinks: method: "post" request: retry_max_duration_secs: 10 - uri: "http://{{ .LogflareId }}:4000/api/logs?source_name=storage.logs.prod.2&api_key={{ .ApiKey }}" + headers: + x-api-key: "{{ .ApiKey }}" + uri: "http://{{ .LogflareId }}:4000/api/logs?source_name=storage.logs.prod.2" logflare_kong: type: "http" inputs: @@ -226,4 +238,6 @@ sinks: method: "post" request: retry_max_duration_secs: 10 - uri: "http://{{ .LogflareId }}:4000/api/logs?source_name=cloudflare.logs.prod&api_key={{ .ApiKey }}" + headers: + x-api-key: "{{ .ApiKey }}" + uri: "http://{{ .LogflareId }}:4000/api/logs?source_name=cloudflare.logs.prod" diff --git a/internal/utils/docker.go b/internal/utils/docker.go index 1ee1da7ef..374182a3e 100644 --- a/internal/utils/docker.go +++ b/internal/utils/docker.go @@ -460,6 +460,11 @@ func DockerExecOnceWithStream(ctx context.Context, containerId, workdir string, return err } +func IsDockerRunning(ctx context.Context) bool { + _, err := Docker.Ping(ctx) + return !client.IsErrConnectionFailed(err) +} + var portErrorPattern = regexp.MustCompile("Bind for (.*) failed: port is already allocated") func parsePortBindError(err error) string { diff --git a/internal/utils/misc.go b/internal/utils/misc.go index 4d58b81c8..46a9d3d12 100644 --- a/internal/utils/misc.go +++ b/internal/utils/misc.go @@ -16,6 +16,7 @@ import ( "github.com/go-git/go-git/v5" "github.com/spf13/afero" "github.com/spf13/viper" + "github.com/supabase/cli/pkg/migration" ) // Assigned using `-ldflags` https://stackoverflow.com/q/11354518 @@ -53,69 +54,8 @@ var ( ImageNamePattern = regexp.MustCompile(`\/(.*):`) // These schemas are ignored from db diff and db dump - PgSchemas = []string{ - "information_schema", - "pg_*", // Wildcard pattern follows pg_dump - } - // Initialised by postgres image and owned by postgres role - InternalSchemas = append([]string{ - "_analytics", - "_realtime", - "_supavisor", - "auth", - "extensions", - "pgbouncer", - "realtime", - "storage", - "supabase_functions", - "supabase_migrations", - // Owned by extensions - "cron", - "dbdev", - "graphql", - "graphql_public", - "net", - "pgmq", - "pgsodium", - "pgsodium_masks", - "pgtle", - "repack", - "tiger", - "tiger_data", - "timescaledb_*", - "_timescaledb_*", - "topology", - "vault", - }, PgSchemas...) - ReservedRoles = []string{ - "anon", - "authenticated", - "authenticator", - "dashboard_user", - "pgbouncer", - "postgres", - "service_role", - "supabase_admin", - "supabase_auth_admin", - "supabase_functions_admin", - "supabase_read_only_user", - "supabase_realtime_admin", - "supabase_replication_admin", - "supabase_storage_admin", - // Managed by extensions - "pgsodium_keyholder", - "pgsodium_keyiduser", - "pgsodium_keymaker", - "pgtle_admin", - } - AllowedConfigs = []string{ - // Ref: https://github.com/supabase/postgres/blob/develop/ansible/files/postgresql_config/supautils.conf.j2#L10 - "pgaudit.*", - "pgrst.*", - "session_replication_role", - "statement_timeout", - "track_io_timing", - } + PgSchemas = migration.InternalSchemas[:2] + InternalSchemas = migration.InternalSchemas SupabaseDirPath = "supabase" ConfigPath = filepath.Join(SupabaseDirPath, "config.toml") @@ -150,7 +90,7 @@ var ( func GetCurrentTimestamp() string { // Magic number: https://stackoverflow.com/q/45160822. - return time.Now().UTC().Format("20060102150405") + return time.Now().UTC().Format(layoutVersion) } func GetCurrentBranchFS(fsys afero.Fs) (string, error) { diff --git a/internal/utils/render.go b/internal/utils/render.go index 2d99f368a..247b9c7f6 100644 --- a/internal/utils/render.go +++ b/internal/utils/render.go @@ -26,3 +26,10 @@ func parse(layout, value string) string { } return t.UTC().Format(layoutHuman) } + +func FormatRegion(region string) string { + if readable, ok := RegionMap[region]; ok { + return readable + } + return region +} diff --git a/pkg/api/client.gen.go b/pkg/api/client.gen.go index 50cd731ea..b5659e914 100644 --- a/pkg/api/client.gen.go +++ b/pkg/api/client.gen.go @@ -101,6 +101,9 @@ type ClientInterface interface { V1UpdateABranchConfig(ctx context.Context, branchId openapi_types.UUID, body V1UpdateABranchConfigJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + // V1DiffABranch request + V1DiffABranch(ctx context.Context, branchId openapi_types.UUID, params *V1DiffABranchParams, reqEditors ...RequestEditorFn) (*http.Response, error) + // V1MergeABranchWithBody request with any body V1MergeABranchWithBody(ctx context.Context, branchId openapi_types.UUID, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -143,12 +146,6 @@ type ClientInterface interface { // V1ListOrganizationMembers request V1ListOrganizationMembers(ctx context.Context, slug string, reqEditors ...RequestEditorFn) (*http.Response, error) - // V1GetOrganizationProjectClaim request - V1GetOrganizationProjectClaim(ctx context.Context, slug string, token string, reqEditors ...RequestEditorFn) (*http.Response, error) - - // V1ClaimProjectForOrganization request - V1ClaimProjectForOrganization(ctx context.Context, slug string, token string, reqEditors ...RequestEditorFn) (*http.Response, error) - // V1ListAllProjects request V1ListAllProjects(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -186,6 +183,12 @@ type ClientInterface interface { CreateApiKey(ctx context.Context, ref string, params *CreateApiKeyParams, body CreateApiKeyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + // CheckLegacyApiKeys request + CheckLegacyApiKeys(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateLegacyApiKeys request + UpdateLegacyApiKeys(ctx context.Context, ref string, params *UpdateLegacyApiKeysParams, reqEditors ...RequestEditorFn) (*http.Response, error) + // DeleteApiKey request DeleteApiKey(ctx context.Context, ref string, id openapi_types.UUID, params *DeleteApiKeyParams, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -219,15 +222,6 @@ type ClientInterface interface { V1CreateABranch(ctx context.Context, ref string, body V1CreateABranchJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) - // V1DeleteProjectClaimToken request - V1DeleteProjectClaimToken(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) - - // V1GetProjectClaimToken request - V1GetProjectClaimToken(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) - - // V1CreateProjectClaimToken request - V1CreateProjectClaimToken(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) - // V1GetAuthServiceConfig request V1GetAuthServiceConfig(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -351,6 +345,11 @@ type ClientInterface interface { V1ApplyAMigration(ctx context.Context, ref string, params *V1ApplyAMigrationParams, body V1ApplyAMigrationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + // V1UpsertAMigrationWithBody request with any body + V1UpsertAMigrationWithBody(ctx context.Context, ref string, params *V1UpsertAMigrationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + V1UpsertAMigration(ctx context.Context, ref string, params *V1UpsertAMigrationParams, body V1UpsertAMigrationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + // V1RunAQueryWithBody request with any body V1RunAQueryWithBody(ctx context.Context, ref string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -564,6 +563,18 @@ func (c *Client) V1UpdateABranchConfig(ctx context.Context, branchId openapi_typ return c.Client.Do(req) } +func (c *Client) V1DiffABranch(ctx context.Context, branchId openapi_types.UUID, params *V1DiffABranchParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1DiffABranchRequest(c.Server, branchId, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + func (c *Client) V1MergeABranchWithBody(ctx context.Context, branchId openapi_types.UUID, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { req, err := NewV1MergeABranchRequestWithBody(c.Server, branchId, contentType, body) if err != nil { @@ -756,30 +767,6 @@ func (c *Client) V1ListOrganizationMembers(ctx context.Context, slug string, req return c.Client.Do(req) } -func (c *Client) V1GetOrganizationProjectClaim(ctx context.Context, slug string, token string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewV1GetOrganizationProjectClaimRequest(c.Server, slug, token) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) V1ClaimProjectForOrganization(ctx context.Context, slug string, token string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewV1ClaimProjectForOrganizationRequest(c.Server, slug, token) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - func (c *Client) V1ListAllProjects(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { req, err := NewV1ListAllProjectsRequest(c.Server) if err != nil { @@ -936,6 +923,30 @@ func (c *Client) CreateApiKey(ctx context.Context, ref string, params *CreateApi return c.Client.Do(req) } +func (c *Client) CheckLegacyApiKeys(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCheckLegacyApiKeysRequest(c.Server, ref) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateLegacyApiKeys(ctx context.Context, ref string, params *UpdateLegacyApiKeysParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateLegacyApiKeysRequest(c.Server, ref, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + func (c *Client) DeleteApiKey(ctx context.Context, ref string, id openapi_types.UUID, params *DeleteApiKeyParams, reqEditors ...RequestEditorFn) (*http.Response, error) { req, err := NewDeleteApiKeyRequest(c.Server, ref, id, params) if err != nil { @@ -1080,42 +1091,6 @@ func (c *Client) V1CreateABranch(ctx context.Context, ref string, body V1CreateA return c.Client.Do(req) } -func (c *Client) V1DeleteProjectClaimToken(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewV1DeleteProjectClaimTokenRequest(c.Server, ref) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) V1GetProjectClaimToken(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewV1GetProjectClaimTokenRequest(c.Server, ref) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) V1CreateProjectClaimToken(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewV1CreateProjectClaimTokenRequest(c.Server, ref) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - func (c *Client) V1GetAuthServiceConfig(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*http.Response, error) { req, err := NewV1GetAuthServiceConfigRequest(c.Server, ref) if err != nil { @@ -1656,6 +1631,30 @@ func (c *Client) V1ApplyAMigration(ctx context.Context, ref string, params *V1Ap return c.Client.Do(req) } +func (c *Client) V1UpsertAMigrationWithBody(ctx context.Context, ref string, params *V1UpsertAMigrationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1UpsertAMigrationRequestWithBody(c.Server, ref, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) V1UpsertAMigration(ctx context.Context, ref string, params *V1UpsertAMigrationParams, body V1UpsertAMigrationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1UpsertAMigrationRequest(c.Server, ref, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + func (c *Client) V1RunAQueryWithBody(ctx context.Context, ref string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { req, err := NewV1RunAQueryRequestWithBody(c.Server, ref, contentType, body) if err != nil { @@ -2491,6 +2490,62 @@ func NewV1UpdateABranchConfigRequestWithBody(server string, branchId openapi_typ return req, nil } +// NewV1DiffABranchRequest generates requests for V1DiffABranch +func NewV1DiffABranchRequest(server string, branchId openapi_types.UUID, params *V1DiffABranchParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "branch_id", runtime.ParamLocationPath, branchId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/v1/branches/%s/diff", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.IncludedSchemas != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "included_schemas", runtime.ParamLocationQuery, *params.IncludedSchemas); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + // NewV1MergeABranchRequest calls the generic V1MergeABranch builder with application/json body func NewV1MergeABranchRequest(server string, branchId openapi_types.UUID, body V1MergeABranchJSONRequestBody) (*http.Request, error) { var bodyReader io.Reader @@ -2996,88 +3051,6 @@ func NewV1ListOrganizationMembersRequest(server string, slug string) (*http.Requ return req, nil } -// NewV1GetOrganizationProjectClaimRequest generates requests for V1GetOrganizationProjectClaim -func NewV1GetOrganizationProjectClaimRequest(server string, slug string, token string) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "slug", runtime.ParamLocationPath, slug) - if err != nil { - return nil, err - } - - var pathParam1 string - - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "token", runtime.ParamLocationPath, token) - if err != nil { - return nil, err - } - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/v1/organizations/%s/project-claim/%s", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err - } - - return req, nil -} - -// NewV1ClaimProjectForOrganizationRequest generates requests for V1ClaimProjectForOrganization -func NewV1ClaimProjectForOrganizationRequest(server string, slug string, token string) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "slug", runtime.ParamLocationPath, slug) - if err != nil { - return nil, err - } - - var pathParam1 string - - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "token", runtime.ParamLocationPath, token) - if err != nil { - return nil, err - } - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/v1/organizations/%s/project-claim/%s", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("POST", queryURL.String(), nil) - if err != nil { - return nil, err - } - - return req, nil -} - // NewV1ListAllProjectsRequest generates requests for V1ListAllProjects func NewV1ListAllProjectsRequest(server string) (*http.Request, error) { var err error @@ -3584,8 +3557,8 @@ func NewCreateApiKeyRequestWithBody(server string, ref string, params *CreateApi return req, nil } -// NewDeleteApiKeyRequest generates requests for DeleteApiKey -func NewDeleteApiKeyRequest(server string, ref string, id openapi_types.UUID, params *DeleteApiKeyParams) (*http.Request, error) { +// NewCheckLegacyApiKeysRequest generates requests for CheckLegacyApiKeys +func NewCheckLegacyApiKeysRequest(server string, ref string) (*http.Request, error) { var err error var pathParam0 string @@ -3595,9 +3568,36 @@ func NewDeleteApiKeyRequest(server string, ref string, id openapi_types.UUID, pa return nil, err } - var pathParam1 string + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + operationPath := fmt.Sprintf("/v1/projects/%s/api-keys/legacy", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewUpdateLegacyApiKeysRequest generates requests for UpdateLegacyApiKeys +func NewUpdateLegacyApiKeysRequest(server string, ref string, params *UpdateLegacyApiKeysParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ref", runtime.ParamLocationPath, ref) if err != nil { return nil, err } @@ -3607,7 +3607,7 @@ func NewDeleteApiKeyRequest(server string, ref string, id openapi_types.UUID, pa return nil, err } - operationPath := fmt.Sprintf("/v1/projects/%s/api-keys/%s", pathParam0, pathParam1) + operationPath := fmt.Sprintf("/v1/projects/%s/api-keys/legacy", pathParam0) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -3620,26 +3620,22 @@ func NewDeleteApiKeyRequest(server string, ref string, id openapi_types.UUID, pa if params != nil { queryValues := queryURL.Query() - if params.Reveal != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "reveal", runtime.ParamLocationQuery, *params.Reveal); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "enabled", runtime.ParamLocationQuery, params.Enabled); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) } } - } queryURL.RawQuery = queryValues.Encode() } - req, err := http.NewRequest("DELETE", queryURL.String(), nil) + req, err := http.NewRequest("PUT", queryURL.String(), nil) if err != nil { return nil, err } @@ -3647,8 +3643,8 @@ func NewDeleteApiKeyRequest(server string, ref string, id openapi_types.UUID, pa return req, nil } -// NewGetApiKeyRequest generates requests for GetApiKey -func NewGetApiKeyRequest(server string, ref string, id openapi_types.UUID, params *GetApiKeyParams) (*http.Request, error) { +// NewDeleteApiKeyRequest generates requests for DeleteApiKey +func NewDeleteApiKeyRequest(server string, ref string, id openapi_types.UUID, params *DeleteApiKeyParams) (*http.Request, error) { var err error var pathParam0 string @@ -3699,10 +3695,42 @@ func NewGetApiKeyRequest(server string, ref string, id openapi_types.UUID, param } + if params.WasCompromised != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "was_compromised", runtime.ParamLocationQuery, *params.WasCompromised); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Reason != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "reason", runtime.ParamLocationQuery, *params.Reason); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + queryURL.RawQuery = queryValues.Encode() } - req, err := http.NewRequest("GET", queryURL.String(), nil) + req, err := http.NewRequest("DELETE", queryURL.String(), nil) if err != nil { return nil, err } @@ -3710,19 +3738,8 @@ func NewGetApiKeyRequest(server string, ref string, id openapi_types.UUID, param return req, nil } -// NewUpdateApiKeyRequest calls the generic UpdateApiKey builder with application/json body -func NewUpdateApiKeyRequest(server string, ref string, id openapi_types.UUID, params *UpdateApiKeyParams, body UpdateApiKeyJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewUpdateApiKeyRequestWithBody(server, ref, id, params, "application/json", bodyReader) -} - -// NewUpdateApiKeyRequestWithBody generates requests for UpdateApiKey with any type of body -func NewUpdateApiKeyRequestWithBody(server string, ref string, id openapi_types.UUID, params *UpdateApiKeyParams, contentType string, body io.Reader) (*http.Request, error) { +// NewGetApiKeyRequest generates requests for GetApiKey +func NewGetApiKeyRequest(server string, ref string, id openapi_types.UUID, params *GetApiKeyParams) (*http.Request, error) { var err error var pathParam0 string @@ -3776,42 +3793,6 @@ func NewUpdateApiKeyRequestWithBody(server string, ref string, id openapi_types. queryURL.RawQuery = queryValues.Encode() } - req, err := http.NewRequest("PATCH", queryURL.String(), body) - if err != nil { - return nil, err - } - - req.Header.Add("Content-Type", contentType) - - return req, nil -} - -// NewV1ListProjectAddonsRequest generates requests for V1ListProjectAddons -func NewV1ListProjectAddonsRequest(server string, ref string) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ref", runtime.ParamLocationPath, ref) - if err != nil { - return nil, err - } - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/v1/projects/%s/billing/addons", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - req, err := http.NewRequest("GET", queryURL.String(), nil) if err != nil { return nil, err @@ -3820,19 +3801,19 @@ func NewV1ListProjectAddonsRequest(server string, ref string) (*http.Request, er return req, nil } -// NewV1ApplyProjectAddonRequest calls the generic V1ApplyProjectAddon builder with application/json body -func NewV1ApplyProjectAddonRequest(server string, ref string, body V1ApplyProjectAddonJSONRequestBody) (*http.Request, error) { +// NewUpdateApiKeyRequest calls the generic UpdateApiKey builder with application/json body +func NewUpdateApiKeyRequest(server string, ref string, id openapi_types.UUID, params *UpdateApiKeyParams, body UpdateApiKeyJSONRequestBody) (*http.Request, error) { var bodyReader io.Reader buf, err := json.Marshal(body) if err != nil { return nil, err } bodyReader = bytes.NewReader(buf) - return NewV1ApplyProjectAddonRequestWithBody(server, ref, "application/json", bodyReader) + return NewUpdateApiKeyRequestWithBody(server, ref, id, params, "application/json", bodyReader) } -// NewV1ApplyProjectAddonRequestWithBody generates requests for V1ApplyProjectAddon with any type of body -func NewV1ApplyProjectAddonRequestWithBody(server string, ref string, contentType string, body io.Reader) (*http.Request, error) { +// NewUpdateApiKeyRequestWithBody generates requests for UpdateApiKey with any type of body +func NewUpdateApiKeyRequestWithBody(server string, ref string, id openapi_types.UUID, params *UpdateApiKeyParams, contentType string, body io.Reader) (*http.Request, error) { var err error var pathParam0 string @@ -3842,12 +3823,19 @@ func NewV1ApplyProjectAddonRequestWithBody(server string, ref string, contentTyp return nil, err } + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + serverURL, err := url.Parse(server) if err != nil { return nil, err } - operationPath := fmt.Sprintf("/v1/projects/%s/billing/addons", pathParam0) + operationPath := fmt.Sprintf("/v1/projects/%s/api-keys/%s", pathParam0, pathParam1) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -3857,59 +3845,40 @@ func NewV1ApplyProjectAddonRequestWithBody(server string, ref string, contentTyp return nil, err } - req, err := http.NewRequest("PATCH", queryURL.String(), body) - if err != nil { - return nil, err - } - - req.Header.Add("Content-Type", contentType) - - return req, nil -} - -// NewV1RemoveProjectAddonRequest generates requests for V1RemoveProjectAddon -func NewV1RemoveProjectAddonRequest(server string, ref string, addonVariant interface{}) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ref", runtime.ParamLocationPath, ref) - if err != nil { - return nil, err - } + if params != nil { + queryValues := queryURL.Query() - var pathParam1 string + if params.Reveal != nil { - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "addon_variant", runtime.ParamLocationPath, addonVariant) - if err != nil { - return nil, err - } + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "reveal", runtime.ParamLocationQuery, *params.Reveal); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } + } - operationPath := fmt.Sprintf("/v1/projects/%s/billing/addons/%s", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath + queryURL.RawQuery = queryValues.Encode() } - queryURL, err := serverURL.Parse(operationPath) + req, err := http.NewRequest("PATCH", queryURL.String(), body) if err != nil { return nil, err } - req, err := http.NewRequest("DELETE", queryURL.String(), nil) - if err != nil { - return nil, err - } + req.Header.Add("Content-Type", contentType) return req, nil } -// NewV1DisablePreviewBranchingRequest generates requests for V1DisablePreviewBranching -func NewV1DisablePreviewBranchingRequest(server string, ref string) (*http.Request, error) { +// NewV1ListProjectAddonsRequest generates requests for V1ListProjectAddons +func NewV1ListProjectAddonsRequest(server string, ref string) (*http.Request, error) { var err error var pathParam0 string @@ -3924,7 +3893,7 @@ func NewV1DisablePreviewBranchingRequest(server string, ref string) (*http.Reque return nil, err } - operationPath := fmt.Sprintf("/v1/projects/%s/branches", pathParam0) + operationPath := fmt.Sprintf("/v1/projects/%s/billing/addons", pathParam0) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -3934,7 +3903,7 @@ func NewV1DisablePreviewBranchingRequest(server string, ref string) (*http.Reque return nil, err } - req, err := http.NewRequest("DELETE", queryURL.String(), nil) + req, err := http.NewRequest("GET", queryURL.String(), nil) if err != nil { return nil, err } @@ -3942,8 +3911,19 @@ func NewV1DisablePreviewBranchingRequest(server string, ref string) (*http.Reque return req, nil } -// NewV1ListAllBranchesRequest generates requests for V1ListAllBranches -func NewV1ListAllBranchesRequest(server string, ref string) (*http.Request, error) { +// NewV1ApplyProjectAddonRequest calls the generic V1ApplyProjectAddon builder with application/json body +func NewV1ApplyProjectAddonRequest(server string, ref string, body V1ApplyProjectAddonJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewV1ApplyProjectAddonRequestWithBody(server, ref, "application/json", bodyReader) +} + +// NewV1ApplyProjectAddonRequestWithBody generates requests for V1ApplyProjectAddon with any type of body +func NewV1ApplyProjectAddonRequestWithBody(server string, ref string, contentType string, body io.Reader) (*http.Request, error) { var err error var pathParam0 string @@ -3958,7 +3938,7 @@ func NewV1ListAllBranchesRequest(server string, ref string) (*http.Request, erro return nil, err } - operationPath := fmt.Sprintf("/v1/projects/%s/branches", pathParam0) + operationPath := fmt.Sprintf("/v1/projects/%s/billing/addons", pathParam0) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -3968,27 +3948,18 @@ func NewV1ListAllBranchesRequest(server string, ref string) (*http.Request, erro return nil, err } - req, err := http.NewRequest("GET", queryURL.String(), nil) + req, err := http.NewRequest("PATCH", queryURL.String(), body) if err != nil { return nil, err } - return req, nil -} + req.Header.Add("Content-Type", contentType) -// NewV1CreateABranchRequest calls the generic V1CreateABranch builder with application/json body -func NewV1CreateABranchRequest(server string, ref string, body V1CreateABranchJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewV1CreateABranchRequestWithBody(server, ref, "application/json", bodyReader) + return req, nil } -// NewV1CreateABranchRequestWithBody generates requests for V1CreateABranch with any type of body -func NewV1CreateABranchRequestWithBody(server string, ref string, contentType string, body io.Reader) (*http.Request, error) { +// NewV1RemoveProjectAddonRequest generates requests for V1RemoveProjectAddon +func NewV1RemoveProjectAddonRequest(server string, ref string, addonVariant interface{}) (*http.Request, error) { var err error var pathParam0 string @@ -3998,12 +3969,19 @@ func NewV1CreateABranchRequestWithBody(server string, ref string, contentType st return nil, err } + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "addon_variant", runtime.ParamLocationPath, addonVariant) + if err != nil { + return nil, err + } + serverURL, err := url.Parse(server) if err != nil { return nil, err } - operationPath := fmt.Sprintf("/v1/projects/%s/branches", pathParam0) + operationPath := fmt.Sprintf("/v1/projects/%s/billing/addons/%s", pathParam0, pathParam1) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -4013,18 +3991,16 @@ func NewV1CreateABranchRequestWithBody(server string, ref string, contentType st return nil, err } - req, err := http.NewRequest("POST", queryURL.String(), body) + req, err := http.NewRequest("DELETE", queryURL.String(), nil) if err != nil { return nil, err } - req.Header.Add("Content-Type", contentType) - return req, nil } -// NewV1DeleteProjectClaimTokenRequest generates requests for V1DeleteProjectClaimToken -func NewV1DeleteProjectClaimTokenRequest(server string, ref string) (*http.Request, error) { +// NewV1DisablePreviewBranchingRequest generates requests for V1DisablePreviewBranching +func NewV1DisablePreviewBranchingRequest(server string, ref string) (*http.Request, error) { var err error var pathParam0 string @@ -4039,7 +4015,7 @@ func NewV1DeleteProjectClaimTokenRequest(server string, ref string) (*http.Reque return nil, err } - operationPath := fmt.Sprintf("/v1/projects/%s/claim-token", pathParam0) + operationPath := fmt.Sprintf("/v1/projects/%s/branches", pathParam0) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -4057,8 +4033,8 @@ func NewV1DeleteProjectClaimTokenRequest(server string, ref string) (*http.Reque return req, nil } -// NewV1GetProjectClaimTokenRequest generates requests for V1GetProjectClaimToken -func NewV1GetProjectClaimTokenRequest(server string, ref string) (*http.Request, error) { +// NewV1ListAllBranchesRequest generates requests for V1ListAllBranches +func NewV1ListAllBranchesRequest(server string, ref string) (*http.Request, error) { var err error var pathParam0 string @@ -4073,7 +4049,7 @@ func NewV1GetProjectClaimTokenRequest(server string, ref string) (*http.Request, return nil, err } - operationPath := fmt.Sprintf("/v1/projects/%s/claim-token", pathParam0) + operationPath := fmt.Sprintf("/v1/projects/%s/branches", pathParam0) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -4091,8 +4067,19 @@ func NewV1GetProjectClaimTokenRequest(server string, ref string) (*http.Request, return req, nil } -// NewV1CreateProjectClaimTokenRequest generates requests for V1CreateProjectClaimToken -func NewV1CreateProjectClaimTokenRequest(server string, ref string) (*http.Request, error) { +// NewV1CreateABranchRequest calls the generic V1CreateABranch builder with application/json body +func NewV1CreateABranchRequest(server string, ref string, body V1CreateABranchJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewV1CreateABranchRequestWithBody(server, ref, "application/json", bodyReader) +} + +// NewV1CreateABranchRequestWithBody generates requests for V1CreateABranch with any type of body +func NewV1CreateABranchRequestWithBody(server string, ref string, contentType string, body io.Reader) (*http.Request, error) { var err error var pathParam0 string @@ -4107,7 +4094,7 @@ func NewV1CreateProjectClaimTokenRequest(server string, ref string) (*http.Reque return nil, err } - operationPath := fmt.Sprintf("/v1/projects/%s/claim-token", pathParam0) + operationPath := fmt.Sprintf("/v1/projects/%s/branches", pathParam0) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -4117,11 +4104,13 @@ func NewV1CreateProjectClaimTokenRequest(server string, ref string) (*http.Reque return nil, err } - req, err := http.NewRequest("POST", queryURL.String(), nil) + req, err := http.NewRequest("POST", queryURL.String(), body) if err != nil { return nil, err } + req.Header.Add("Content-Type", contentType) + return req, nil } @@ -5474,6 +5463,68 @@ func NewV1ApplyAMigrationRequestWithBody(server string, ref string, params *V1Ap return req, nil } +// NewV1UpsertAMigrationRequest calls the generic V1UpsertAMigration builder with application/json body +func NewV1UpsertAMigrationRequest(server string, ref string, params *V1UpsertAMigrationParams, body V1UpsertAMigrationJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewV1UpsertAMigrationRequestWithBody(server, ref, params, "application/json", bodyReader) +} + +// NewV1UpsertAMigrationRequestWithBody generates requests for V1UpsertAMigration with any type of body +func NewV1UpsertAMigrationRequestWithBody(server string, ref string, params *V1UpsertAMigrationParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ref", runtime.ParamLocationPath, ref) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/v1/projects/%s/database/migrations", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params != nil { + + if params.IdempotencyKey != nil { + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "Idempotency-Key", runtime.ParamLocationHeader, *params.IdempotencyKey) + if err != nil { + return nil, err + } + + req.Header.Set("Idempotency-Key", headerParam0) + } + + } + + return req, nil +} + // NewV1RunAQueryRequest calls the generic V1RunAQuery builder with application/json body func NewV1RunAQueryRequest(server string, ref string, body V1RunAQueryJSONRequestBody) (*http.Request, error) { var bodyReader io.Reader @@ -7661,6 +7712,9 @@ type ClientWithResponsesInterface interface { V1UpdateABranchConfigWithResponse(ctx context.Context, branchId openapi_types.UUID, body V1UpdateABranchConfigJSONRequestBody, reqEditors ...RequestEditorFn) (*V1UpdateABranchConfigResponse, error) + // V1DiffABranchWithResponse request + V1DiffABranchWithResponse(ctx context.Context, branchId openapi_types.UUID, params *V1DiffABranchParams, reqEditors ...RequestEditorFn) (*V1DiffABranchResponse, error) + // V1MergeABranchWithBodyWithResponse request with any body V1MergeABranchWithBodyWithResponse(ctx context.Context, branchId openapi_types.UUID, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*V1MergeABranchResponse, error) @@ -7703,12 +7757,6 @@ type ClientWithResponsesInterface interface { // V1ListOrganizationMembersWithResponse request V1ListOrganizationMembersWithResponse(ctx context.Context, slug string, reqEditors ...RequestEditorFn) (*V1ListOrganizationMembersResponse, error) - // V1GetOrganizationProjectClaimWithResponse request - V1GetOrganizationProjectClaimWithResponse(ctx context.Context, slug string, token string, reqEditors ...RequestEditorFn) (*V1GetOrganizationProjectClaimResponse, error) - - // V1ClaimProjectForOrganizationWithResponse request - V1ClaimProjectForOrganizationWithResponse(ctx context.Context, slug string, token string, reqEditors ...RequestEditorFn) (*V1ClaimProjectForOrganizationResponse, error) - // V1ListAllProjectsWithResponse request V1ListAllProjectsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*V1ListAllProjectsResponse, error) @@ -7746,6 +7794,12 @@ type ClientWithResponsesInterface interface { CreateApiKeyWithResponse(ctx context.Context, ref string, params *CreateApiKeyParams, body CreateApiKeyJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateApiKeyResponse, error) + // CheckLegacyApiKeysWithResponse request + CheckLegacyApiKeysWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*CheckLegacyApiKeysResponse, error) + + // UpdateLegacyApiKeysWithResponse request + UpdateLegacyApiKeysWithResponse(ctx context.Context, ref string, params *UpdateLegacyApiKeysParams, reqEditors ...RequestEditorFn) (*UpdateLegacyApiKeysResponse, error) + // DeleteApiKeyWithResponse request DeleteApiKeyWithResponse(ctx context.Context, ref string, id openapi_types.UUID, params *DeleteApiKeyParams, reqEditors ...RequestEditorFn) (*DeleteApiKeyResponse, error) @@ -7779,15 +7833,6 @@ type ClientWithResponsesInterface interface { V1CreateABranchWithResponse(ctx context.Context, ref string, body V1CreateABranchJSONRequestBody, reqEditors ...RequestEditorFn) (*V1CreateABranchResponse, error) - // V1DeleteProjectClaimTokenWithResponse request - V1DeleteProjectClaimTokenWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1DeleteProjectClaimTokenResponse, error) - - // V1GetProjectClaimTokenWithResponse request - V1GetProjectClaimTokenWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1GetProjectClaimTokenResponse, error) - - // V1CreateProjectClaimTokenWithResponse request - V1CreateProjectClaimTokenWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1CreateProjectClaimTokenResponse, error) - // V1GetAuthServiceConfigWithResponse request V1GetAuthServiceConfigWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1GetAuthServiceConfigResponse, error) @@ -7911,6 +7956,11 @@ type ClientWithResponsesInterface interface { V1ApplyAMigrationWithResponse(ctx context.Context, ref string, params *V1ApplyAMigrationParams, body V1ApplyAMigrationJSONRequestBody, reqEditors ...RequestEditorFn) (*V1ApplyAMigrationResponse, error) + // V1UpsertAMigrationWithBodyWithResponse request with any body + V1UpsertAMigrationWithBodyWithResponse(ctx context.Context, ref string, params *V1UpsertAMigrationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*V1UpsertAMigrationResponse, error) + + V1UpsertAMigrationWithResponse(ctx context.Context, ref string, params *V1UpsertAMigrationParams, body V1UpsertAMigrationJSONRequestBody, reqEditors ...RequestEditorFn) (*V1UpsertAMigrationResponse, error) + // V1RunAQueryWithBodyWithResponse request with any body V1RunAQueryWithBodyWithResponse(ctx context.Context, ref string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*V1RunAQueryResponse, error) @@ -8142,6 +8192,27 @@ func (r V1UpdateABranchConfigResponse) StatusCode() int { return 0 } +type V1DiffABranchResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r V1DiffABranchResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r V1DiffABranchResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + type V1MergeABranchResponse struct { Body []byte HTTPResponse *http.Response @@ -8360,49 +8431,6 @@ func (r V1ListOrganizationMembersResponse) StatusCode() int { return 0 } -type V1GetOrganizationProjectClaimResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *OrganizationProjectClaimResponse -} - -// Status returns HTTPResponse.Status -func (r V1GetOrganizationProjectClaimResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r V1GetOrganizationProjectClaimResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type V1ClaimProjectForOrganizationResponse struct { - Body []byte - HTTPResponse *http.Response -} - -// Status returns HTTPResponse.Status -func (r V1ClaimProjectForOrganizationResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r V1ClaimProjectForOrganizationResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - type V1ListAllProjectsResponse struct { Body []byte HTTPResponse *http.Response @@ -8645,14 +8673,14 @@ func (r CreateApiKeyResponse) StatusCode() int { return 0 } -type DeleteApiKeyResponse struct { +type CheckLegacyApiKeysResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *ApiKeyResponse + JSON200 *LegacyApiKeysResponse } // Status returns HTTPResponse.Status -func (r DeleteApiKeyResponse) Status() string { +func (r CheckLegacyApiKeysResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -8660,21 +8688,21 @@ func (r DeleteApiKeyResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r DeleteApiKeyResponse) StatusCode() int { +func (r CheckLegacyApiKeysResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type GetApiKeyResponse struct { +type UpdateLegacyApiKeysResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *ApiKeyResponse + JSON200 *LegacyApiKeysResponse } // Status returns HTTPResponse.Status -func (r GetApiKeyResponse) Status() string { +func (r UpdateLegacyApiKeysResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -8682,43 +8710,21 @@ func (r GetApiKeyResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r GetApiKeyResponse) StatusCode() int { +func (r UpdateLegacyApiKeysResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type UpdateApiKeyResponse struct { +type DeleteApiKeyResponse struct { Body []byte HTTPResponse *http.Response JSON200 *ApiKeyResponse } // Status returns HTTPResponse.Status -func (r UpdateApiKeyResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r UpdateApiKeyResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type V1ListProjectAddonsResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *ListProjectAddonsResponse -} - -// Status returns HTTPResponse.Status -func (r V1ListProjectAddonsResponse) Status() string { +func (r DeleteApiKeyResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -8726,20 +8732,21 @@ func (r V1ListProjectAddonsResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r V1ListProjectAddonsResponse) StatusCode() int { +func (r DeleteApiKeyResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type V1ApplyProjectAddonResponse struct { +type GetApiKeyResponse struct { Body []byte HTTPResponse *http.Response + JSON200 *ApiKeyResponse } // Status returns HTTPResponse.Status -func (r V1ApplyProjectAddonResponse) Status() string { +func (r GetApiKeyResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -8747,20 +8754,21 @@ func (r V1ApplyProjectAddonResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r V1ApplyProjectAddonResponse) StatusCode() int { +func (r GetApiKeyResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type V1RemoveProjectAddonResponse struct { +type UpdateApiKeyResponse struct { Body []byte HTTPResponse *http.Response + JSON200 *ApiKeyResponse } // Status returns HTTPResponse.Status -func (r V1RemoveProjectAddonResponse) Status() string { +func (r UpdateApiKeyResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -8768,20 +8776,21 @@ func (r V1RemoveProjectAddonResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r V1RemoveProjectAddonResponse) StatusCode() int { +func (r UpdateApiKeyResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type V1DisablePreviewBranchingResponse struct { +type V1ListProjectAddonsResponse struct { Body []byte HTTPResponse *http.Response + JSON200 *ListProjectAddonsResponse } // Status returns HTTPResponse.Status -func (r V1DisablePreviewBranchingResponse) Status() string { +func (r V1ListProjectAddonsResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -8789,21 +8798,20 @@ func (r V1DisablePreviewBranchingResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r V1DisablePreviewBranchingResponse) StatusCode() int { +func (r V1ListProjectAddonsResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type V1ListAllBranchesResponse struct { +type V1ApplyProjectAddonResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *[]BranchResponse } // Status returns HTTPResponse.Status -func (r V1ListAllBranchesResponse) Status() string { +func (r V1ApplyProjectAddonResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -8811,21 +8819,20 @@ func (r V1ListAllBranchesResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r V1ListAllBranchesResponse) StatusCode() int { +func (r V1ApplyProjectAddonResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type V1CreateABranchResponse struct { +type V1RemoveProjectAddonResponse struct { Body []byte HTTPResponse *http.Response - JSON201 *BranchResponse } // Status returns HTTPResponse.Status -func (r V1CreateABranchResponse) Status() string { +func (r V1RemoveProjectAddonResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -8833,20 +8840,20 @@ func (r V1CreateABranchResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r V1CreateABranchResponse) StatusCode() int { +func (r V1RemoveProjectAddonResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type V1DeleteProjectClaimTokenResponse struct { +type V1DisablePreviewBranchingResponse struct { Body []byte HTTPResponse *http.Response } // Status returns HTTPResponse.Status -func (r V1DeleteProjectClaimTokenResponse) Status() string { +func (r V1DisablePreviewBranchingResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -8854,21 +8861,21 @@ func (r V1DeleteProjectClaimTokenResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r V1DeleteProjectClaimTokenResponse) StatusCode() int { +func (r V1DisablePreviewBranchingResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type V1GetProjectClaimTokenResponse struct { +type V1ListAllBranchesResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *ProjectClaimTokenResponse + JSON200 *[]BranchResponse } // Status returns HTTPResponse.Status -func (r V1GetProjectClaimTokenResponse) Status() string { +func (r V1ListAllBranchesResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -8876,21 +8883,21 @@ func (r V1GetProjectClaimTokenResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r V1GetProjectClaimTokenResponse) StatusCode() int { +func (r V1ListAllBranchesResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type V1CreateProjectClaimTokenResponse struct { +type V1CreateABranchResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *CreateProjectClaimTokenResponse + JSON201 *BranchResponse } // Status returns HTTPResponse.Status -func (r V1CreateProjectClaimTokenResponse) Status() string { +func (r V1CreateABranchResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -8898,7 +8905,7 @@ func (r V1CreateProjectClaimTokenResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r V1CreateProjectClaimTokenResponse) StatusCode() int { +func (r V1CreateABranchResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } @@ -9627,6 +9634,27 @@ func (r V1ApplyAMigrationResponse) StatusCode() int { return 0 } +type V1UpsertAMigrationResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r V1UpsertAMigrationResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r V1UpsertAMigrationResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + type V1RunAQueryResponse struct { Body []byte HTTPResponse *http.Response @@ -10617,6 +10645,15 @@ func (c *ClientWithResponses) V1UpdateABranchConfigWithResponse(ctx context.Cont return ParseV1UpdateABranchConfigResponse(rsp) } +// V1DiffABranchWithResponse request returning *V1DiffABranchResponse +func (c *ClientWithResponses) V1DiffABranchWithResponse(ctx context.Context, branchId openapi_types.UUID, params *V1DiffABranchParams, reqEditors ...RequestEditorFn) (*V1DiffABranchResponse, error) { + rsp, err := c.V1DiffABranch(ctx, branchId, params, reqEditors...) + if err != nil { + return nil, err + } + return ParseV1DiffABranchResponse(rsp) +} + // V1MergeABranchWithBodyWithResponse request with arbitrary body returning *V1MergeABranchResponse func (c *ClientWithResponses) V1MergeABranchWithBodyWithResponse(ctx context.Context, branchId openapi_types.UUID, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*V1MergeABranchResponse, error) { rsp, err := c.V1MergeABranchWithBody(ctx, branchId, contentType, body, reqEditors...) @@ -10755,24 +10792,6 @@ func (c *ClientWithResponses) V1ListOrganizationMembersWithResponse(ctx context. return ParseV1ListOrganizationMembersResponse(rsp) } -// V1GetOrganizationProjectClaimWithResponse request returning *V1GetOrganizationProjectClaimResponse -func (c *ClientWithResponses) V1GetOrganizationProjectClaimWithResponse(ctx context.Context, slug string, token string, reqEditors ...RequestEditorFn) (*V1GetOrganizationProjectClaimResponse, error) { - rsp, err := c.V1GetOrganizationProjectClaim(ctx, slug, token, reqEditors...) - if err != nil { - return nil, err - } - return ParseV1GetOrganizationProjectClaimResponse(rsp) -} - -// V1ClaimProjectForOrganizationWithResponse request returning *V1ClaimProjectForOrganizationResponse -func (c *ClientWithResponses) V1ClaimProjectForOrganizationWithResponse(ctx context.Context, slug string, token string, reqEditors ...RequestEditorFn) (*V1ClaimProjectForOrganizationResponse, error) { - rsp, err := c.V1ClaimProjectForOrganization(ctx, slug, token, reqEditors...) - if err != nil { - return nil, err - } - return ParseV1ClaimProjectForOrganizationResponse(rsp) -} - // V1ListAllProjectsWithResponse request returning *V1ListAllProjectsResponse func (c *ClientWithResponses) V1ListAllProjectsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*V1ListAllProjectsResponse, error) { rsp, err := c.V1ListAllProjects(ctx, reqEditors...) @@ -10888,6 +10907,24 @@ func (c *ClientWithResponses) CreateApiKeyWithResponse(ctx context.Context, ref return ParseCreateApiKeyResponse(rsp) } +// CheckLegacyApiKeysWithResponse request returning *CheckLegacyApiKeysResponse +func (c *ClientWithResponses) CheckLegacyApiKeysWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*CheckLegacyApiKeysResponse, error) { + rsp, err := c.CheckLegacyApiKeys(ctx, ref, reqEditors...) + if err != nil { + return nil, err + } + return ParseCheckLegacyApiKeysResponse(rsp) +} + +// UpdateLegacyApiKeysWithResponse request returning *UpdateLegacyApiKeysResponse +func (c *ClientWithResponses) UpdateLegacyApiKeysWithResponse(ctx context.Context, ref string, params *UpdateLegacyApiKeysParams, reqEditors ...RequestEditorFn) (*UpdateLegacyApiKeysResponse, error) { + rsp, err := c.UpdateLegacyApiKeys(ctx, ref, params, reqEditors...) + if err != nil { + return nil, err + } + return ParseUpdateLegacyApiKeysResponse(rsp) +} + // DeleteApiKeyWithResponse request returning *DeleteApiKeyResponse func (c *ClientWithResponses) DeleteApiKeyWithResponse(ctx context.Context, ref string, id openapi_types.UUID, params *DeleteApiKeyParams, reqEditors ...RequestEditorFn) (*DeleteApiKeyResponse, error) { rsp, err := c.DeleteApiKey(ctx, ref, id, params, reqEditors...) @@ -10993,33 +11030,6 @@ func (c *ClientWithResponses) V1CreateABranchWithResponse(ctx context.Context, r return ParseV1CreateABranchResponse(rsp) } -// V1DeleteProjectClaimTokenWithResponse request returning *V1DeleteProjectClaimTokenResponse -func (c *ClientWithResponses) V1DeleteProjectClaimTokenWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1DeleteProjectClaimTokenResponse, error) { - rsp, err := c.V1DeleteProjectClaimToken(ctx, ref, reqEditors...) - if err != nil { - return nil, err - } - return ParseV1DeleteProjectClaimTokenResponse(rsp) -} - -// V1GetProjectClaimTokenWithResponse request returning *V1GetProjectClaimTokenResponse -func (c *ClientWithResponses) V1GetProjectClaimTokenWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1GetProjectClaimTokenResponse, error) { - rsp, err := c.V1GetProjectClaimToken(ctx, ref, reqEditors...) - if err != nil { - return nil, err - } - return ParseV1GetProjectClaimTokenResponse(rsp) -} - -// V1CreateProjectClaimTokenWithResponse request returning *V1CreateProjectClaimTokenResponse -func (c *ClientWithResponses) V1CreateProjectClaimTokenWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1CreateProjectClaimTokenResponse, error) { - rsp, err := c.V1CreateProjectClaimToken(ctx, ref, reqEditors...) - if err != nil { - return nil, err - } - return ParseV1CreateProjectClaimTokenResponse(rsp) -} - // V1GetAuthServiceConfigWithResponse request returning *V1GetAuthServiceConfigResponse func (c *ClientWithResponses) V1GetAuthServiceConfigWithResponse(ctx context.Context, ref string, reqEditors ...RequestEditorFn) (*V1GetAuthServiceConfigResponse, error) { rsp, err := c.V1GetAuthServiceConfig(ctx, ref, reqEditors...) @@ -11413,6 +11423,23 @@ func (c *ClientWithResponses) V1ApplyAMigrationWithResponse(ctx context.Context, return ParseV1ApplyAMigrationResponse(rsp) } +// V1UpsertAMigrationWithBodyWithResponse request with arbitrary body returning *V1UpsertAMigrationResponse +func (c *ClientWithResponses) V1UpsertAMigrationWithBodyWithResponse(ctx context.Context, ref string, params *V1UpsertAMigrationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*V1UpsertAMigrationResponse, error) { + rsp, err := c.V1UpsertAMigrationWithBody(ctx, ref, params, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseV1UpsertAMigrationResponse(rsp) +} + +func (c *ClientWithResponses) V1UpsertAMigrationWithResponse(ctx context.Context, ref string, params *V1UpsertAMigrationParams, body V1UpsertAMigrationJSONRequestBody, reqEditors ...RequestEditorFn) (*V1UpsertAMigrationResponse, error) { + rsp, err := c.V1UpsertAMigration(ctx, ref, params, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseV1UpsertAMigrationResponse(rsp) +} + // V1RunAQueryWithBodyWithResponse request with arbitrary body returning *V1RunAQueryResponse func (c *ClientWithResponses) V1RunAQueryWithBodyWithResponse(ctx context.Context, ref string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*V1RunAQueryResponse, error) { rsp, err := c.V1RunAQueryWithBody(ctx, ref, contentType, body, reqEditors...) @@ -12015,6 +12042,22 @@ func ParseV1UpdateABranchConfigResponse(rsp *http.Response) (*V1UpdateABranchCon return response, nil } +// ParseV1DiffABranchResponse parses an HTTP response from a V1DiffABranchWithResponse call +func ParseV1DiffABranchResponse(rsp *http.Response) (*V1DiffABranchResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &V1DiffABranchResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil +} + // ParseV1MergeABranchResponse parses an HTTP response from a V1MergeABranchWithResponse call func ParseV1MergeABranchResponse(rsp *http.Response) (*V1MergeABranchResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) @@ -12255,48 +12298,6 @@ func ParseV1ListOrganizationMembersResponse(rsp *http.Response) (*V1ListOrganiza return response, nil } -// ParseV1GetOrganizationProjectClaimResponse parses an HTTP response from a V1GetOrganizationProjectClaimWithResponse call -func ParseV1GetOrganizationProjectClaimResponse(rsp *http.Response) (*V1GetOrganizationProjectClaimResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &V1GetOrganizationProjectClaimResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest OrganizationProjectClaimResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - - } - - return response, nil -} - -// ParseV1ClaimProjectForOrganizationResponse parses an HTTP response from a V1ClaimProjectForOrganizationWithResponse call -func ParseV1ClaimProjectForOrganizationResponse(rsp *http.Response) (*V1ClaimProjectForOrganizationResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &V1ClaimProjectForOrganizationResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - return response, nil -} - // ParseV1ListAllProjectsResponse parses an HTTP response from a V1ListAllProjectsWithResponse call func ParseV1ListAllProjectsResponse(rsp *http.Response) (*V1ListAllProjectsResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) @@ -12583,6 +12584,58 @@ func ParseCreateApiKeyResponse(rsp *http.Response) (*CreateApiKeyResponse, error return response, nil } +// ParseCheckLegacyApiKeysResponse parses an HTTP response from a CheckLegacyApiKeysWithResponse call +func ParseCheckLegacyApiKeysResponse(rsp *http.Response) (*CheckLegacyApiKeysResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &CheckLegacyApiKeysResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest LegacyApiKeysResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + +// ParseUpdateLegacyApiKeysResponse parses an HTTP response from a UpdateLegacyApiKeysWithResponse call +func ParseUpdateLegacyApiKeysResponse(rsp *http.Response) (*UpdateLegacyApiKeysResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &UpdateLegacyApiKeysResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest LegacyApiKeysResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + // ParseDeleteApiKeyResponse parses an HTTP response from a DeleteApiKeyWithResponse call func ParseDeleteApiKeyResponse(rsp *http.Response) (*DeleteApiKeyResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) @@ -12787,74 +12840,6 @@ func ParseV1CreateABranchResponse(rsp *http.Response) (*V1CreateABranchResponse, return response, nil } -// ParseV1DeleteProjectClaimTokenResponse parses an HTTP response from a V1DeleteProjectClaimTokenWithResponse call -func ParseV1DeleteProjectClaimTokenResponse(rsp *http.Response) (*V1DeleteProjectClaimTokenResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &V1DeleteProjectClaimTokenResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - return response, nil -} - -// ParseV1GetProjectClaimTokenResponse parses an HTTP response from a V1GetProjectClaimTokenWithResponse call -func ParseV1GetProjectClaimTokenResponse(rsp *http.Response) (*V1GetProjectClaimTokenResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &V1GetProjectClaimTokenResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest ProjectClaimTokenResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - - } - - return response, nil -} - -// ParseV1CreateProjectClaimTokenResponse parses an HTTP response from a V1CreateProjectClaimTokenWithResponse call -func ParseV1CreateProjectClaimTokenResponse(rsp *http.Response) (*V1CreateProjectClaimTokenResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &V1CreateProjectClaimTokenResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest CreateProjectClaimTokenResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - - } - - return response, nil -} - // ParseV1GetAuthServiceConfigResponse parses an HTTP response from a V1GetAuthServiceConfigWithResponse call func ParseV1GetAuthServiceConfigResponse(rsp *http.Response) (*V1GetAuthServiceConfigResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) @@ -13673,6 +13658,22 @@ func ParseV1ApplyAMigrationResponse(rsp *http.Response) (*V1ApplyAMigrationRespo return response, nil } +// ParseV1UpsertAMigrationResponse parses an HTTP response from a V1UpsertAMigrationWithResponse call +func ParseV1UpsertAMigrationResponse(rsp *http.Response) (*V1UpsertAMigrationResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &V1UpsertAMigrationResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil +} + // ParseV1RunAQueryResponse parses an HTTP response from a V1RunAQueryWithResponse call func ParseV1RunAQueryResponse(rsp *http.Response) (*V1RunAQueryResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) diff --git a/pkg/api/types.gen.go b/pkg/api/types.gen.go index 5d6a77197..50148b79f 100644 --- a/pkg/api/types.gen.go +++ b/pkg/api/types.gen.go @@ -462,22 +462,6 @@ const ( Bearer OAuthTokenResponseTokenType = "Bearer" ) -// Defines values for OrganizationProjectClaimResponsePreviewSourceSubscriptionPlan. -const ( - OrganizationProjectClaimResponsePreviewSourceSubscriptionPlanEnterprise OrganizationProjectClaimResponsePreviewSourceSubscriptionPlan = "enterprise" - OrganizationProjectClaimResponsePreviewSourceSubscriptionPlanFree OrganizationProjectClaimResponsePreviewSourceSubscriptionPlan = "free" - OrganizationProjectClaimResponsePreviewSourceSubscriptionPlanPro OrganizationProjectClaimResponsePreviewSourceSubscriptionPlan = "pro" - OrganizationProjectClaimResponsePreviewSourceSubscriptionPlanTeam OrganizationProjectClaimResponsePreviewSourceSubscriptionPlan = "team" -) - -// Defines values for OrganizationProjectClaimResponsePreviewTargetSubscriptionPlan. -const ( - OrganizationProjectClaimResponsePreviewTargetSubscriptionPlanEnterprise OrganizationProjectClaimResponsePreviewTargetSubscriptionPlan = "enterprise" - OrganizationProjectClaimResponsePreviewTargetSubscriptionPlanFree OrganizationProjectClaimResponsePreviewTargetSubscriptionPlan = "free" - OrganizationProjectClaimResponsePreviewTargetSubscriptionPlanPro OrganizationProjectClaimResponsePreviewTargetSubscriptionPlan = "pro" - OrganizationProjectClaimResponsePreviewTargetSubscriptionPlanTeam OrganizationProjectClaimResponsePreviewTargetSubscriptionPlan = "team" -) - // Defines values for PostgresConfigResponseSessionReplicationRole. const ( PostgresConfigResponseSessionReplicationRoleLocal PostgresConfigResponseSessionReplicationRole = "local" @@ -705,6 +689,7 @@ const ( V1CreateProjectBodyDesiredInstanceSizeN48xlargeOptimizedMemory V1CreateProjectBodyDesiredInstanceSize = "48xlarge_optimized_memory" V1CreateProjectBodyDesiredInstanceSizeN4xlarge V1CreateProjectBodyDesiredInstanceSize = "4xlarge" V1CreateProjectBodyDesiredInstanceSizeN8xlarge V1CreateProjectBodyDesiredInstanceSize = "8xlarge" + V1CreateProjectBodyDesiredInstanceSizeNano V1CreateProjectBodyDesiredInstanceSize = "nano" V1CreateProjectBodyDesiredInstanceSizePico V1CreateProjectBodyDesiredInstanceSize = "pico" V1CreateProjectBodyDesiredInstanceSizeSmall V1CreateProjectBodyDesiredInstanceSize = "small" V1CreateProjectBodyDesiredInstanceSizeXlarge V1CreateProjectBodyDesiredInstanceSize = "xlarge" @@ -750,7 +735,9 @@ const ( // Defines values for V1OrganizationSlugResponseOptInTags. const ( - AISQLGENERATOROPTIN V1OrganizationSlugResponseOptInTags = "AI_SQL_GENERATOR_OPT_IN" + AIDATAGENERATOROPTIN V1OrganizationSlugResponseOptInTags = "AI_DATA_GENERATOR_OPT_IN" + AILOGGENERATOROPTIN V1OrganizationSlugResponseOptInTags = "AI_LOG_GENERATOR_OPT_IN" + AISQLGENERATOROPTIN V1OrganizationSlugResponseOptInTags = "AI_SQL_GENERATOR_OPT_IN" ) // Defines values for V1OrganizationSlugResponsePlan. @@ -1096,6 +1083,9 @@ type AuthConfigResponse struct { ExternalZoomClientId nullable.Nullable[string] `json:"external_zoom_client_id"` ExternalZoomEnabled nullable.Nullable[bool] `json:"external_zoom_enabled"` ExternalZoomSecret nullable.Nullable[string] `json:"external_zoom_secret"` + HookBeforeUserCreatedEnabled nullable.Nullable[bool] `json:"hook_before_user_created_enabled"` + HookBeforeUserCreatedSecrets nullable.Nullable[string] `json:"hook_before_user_created_secrets"` + HookBeforeUserCreatedUri nullable.Nullable[string] `json:"hook_before_user_created_uri"` HookCustomAccessTokenEnabled nullable.Nullable[bool] `json:"hook_custom_access_token_enabled"` HookCustomAccessTokenSecrets nullable.Nullable[string] `json:"hook_custom_access_token_secrets"` HookCustomAccessTokenUri nullable.Nullable[string] `json:"hook_custom_access_token_uri"` @@ -1346,15 +1336,6 @@ type CreateOrganizationV1 struct { Name string `json:"name"` } -// CreateProjectClaimTokenResponse defines model for CreateProjectClaimTokenResponse. -type CreateProjectClaimTokenResponse struct { - CreatedAt string `json:"created_at"` - CreatedBy openapi_types.UUID `json:"created_by"` - ExpiresAt string `json:"expires_at"` - Token string `json:"token"` - TokenAlias string `json:"token_alias"` -} - // CreateProviderBody defines model for CreateProviderBody. type CreateProviderBody struct { AttributeMapping *struct { @@ -1600,6 +1581,11 @@ type GetProviderResponse struct { UpdatedAt *string `json:"updated_at,omitempty"` } +// LegacyApiKeysResponse defines model for LegacyApiKeysResponse. +type LegacyApiKeysResponse struct { + Enabled bool `json:"enabled"` +} + // ListProjectAddonsResponse defines model for ListProjectAddonsResponse. type ListProjectAddonsResponse struct { AvailableAddons []struct { @@ -1811,46 +1797,6 @@ type OAuthTokenResponse struct { // OAuthTokenResponseTokenType defines model for OAuthTokenResponse.TokenType. type OAuthTokenResponseTokenType string -// OrganizationProjectClaimResponse defines model for OrganizationProjectClaimResponse. -type OrganizationProjectClaimResponse struct { - CreatedAt string `json:"created_at"` - CreatedBy openapi_types.UUID `json:"created_by"` - ExpiresAt string `json:"expires_at"` - Preview struct { - Errors []struct { - Key string `json:"key"` - Message string `json:"message"` - } `json:"errors"` - Info []struct { - Key string `json:"key"` - Message string `json:"message"` - } `json:"info"` - MembersExceedingFreeProjectLimit []struct { - Limit float32 `json:"limit"` - Name string `json:"name"` - } `json:"members_exceeding_free_project_limit"` - SourceSubscriptionPlan OrganizationProjectClaimResponsePreviewSourceSubscriptionPlan `json:"source_subscription_plan"` - TargetOrganizationEligible nullable.Nullable[bool] `json:"target_organization_eligible"` - TargetOrganizationHasFreeProjectSlots nullable.Nullable[bool] `json:"target_organization_has_free_project_slots"` - TargetSubscriptionPlan nullable.Nullable[OrganizationProjectClaimResponsePreviewTargetSubscriptionPlan] `json:"target_subscription_plan"` - Valid bool `json:"valid"` - Warnings []struct { - Key string `json:"key"` - Message string `json:"message"` - } `json:"warnings"` - } `json:"preview"` - Project struct { - Name string `json:"name"` - Ref string `json:"ref"` - } `json:"project"` -} - -// OrganizationProjectClaimResponsePreviewSourceSubscriptionPlan defines model for OrganizationProjectClaimResponse.Preview.SourceSubscriptionPlan. -type OrganizationProjectClaimResponsePreviewSourceSubscriptionPlan string - -// OrganizationProjectClaimResponsePreviewTargetSubscriptionPlan defines model for OrganizationProjectClaimResponse.Preview.TargetSubscriptionPlan. -type OrganizationProjectClaimResponsePreviewTargetSubscriptionPlan string - // OrganizationResponseV1 defines model for OrganizationResponseV1. type OrganizationResponseV1 struct { Id string `json:"id"` @@ -1903,14 +1849,6 @@ type PostgrestConfigWithJWTSecretResponse struct { MaxRows int `json:"max_rows"` } -// ProjectClaimTokenResponse defines model for ProjectClaimTokenResponse. -type ProjectClaimTokenResponse struct { - CreatedAt string `json:"created_at"` - CreatedBy openapi_types.UUID `json:"created_by"` - ExpiresAt string `json:"expires_at"` - TokenAlias string `json:"token_alias"` -} - // ProjectUpgradeEligibilityResponse defines model for ProjectUpgradeEligibilityResponse. type ProjectUpgradeEligibilityResponse struct { CurrentAppVersion string `json:"current_app_version"` @@ -2236,6 +2174,9 @@ type UpdateAuthConfigBody struct { ExternalZoomClientId nullable.Nullable[string] `json:"external_zoom_client_id,omitempty"` ExternalZoomEnabled nullable.Nullable[bool] `json:"external_zoom_enabled,omitempty"` ExternalZoomSecret nullable.Nullable[string] `json:"external_zoom_secret,omitempty"` + HookBeforeUserCreatedEnabled nullable.Nullable[bool] `json:"hook_before_user_created_enabled,omitempty"` + HookBeforeUserCreatedSecrets nullable.Nullable[string] `json:"hook_before_user_created_secrets,omitempty"` + HookBeforeUserCreatedUri nullable.Nullable[string] `json:"hook_before_user_created_uri,omitempty"` HookCustomAccessTokenEnabled nullable.Nullable[bool] `json:"hook_custom_access_token_enabled,omitempty"` HookCustomAccessTokenSecrets nullable.Nullable[string] `json:"hook_custom_access_token_secrets,omitempty"` HookCustomAccessTokenUri nullable.Nullable[string] `json:"hook_custom_access_token_uri,omitempty"` @@ -2824,6 +2765,12 @@ type V1UpdatePostgrestConfigBody struct { MaxRows *int `json:"max_rows,omitempty"` } +// V1UpsertMigrationBody defines model for V1UpsertMigrationBody. +type V1UpsertMigrationBody struct { + Name *string `json:"name,omitempty"` + Query string `json:"query"` +} + // VanitySubdomainBody defines model for VanitySubdomainBody. type VanitySubdomainBody struct { VanitySubdomain string `json:"vanity_subdomain"` @@ -2838,6 +2785,11 @@ type VanitySubdomainConfigResponse struct { // VanitySubdomainConfigResponseStatus defines model for VanitySubdomainConfigResponse.Status. type VanitySubdomainConfigResponseStatus string +// V1DiffABranchParams defines parameters for V1DiffABranch. +type V1DiffABranchParams struct { + IncludedSchemas *string `form:"included_schemas,omitempty" json:"included_schemas,omitempty"` +} + // V1AuthorizeUserParams defines parameters for V1AuthorizeUser. type V1AuthorizeUserParams struct { ClientId openapi_types.UUID `form:"client_id" json:"client_id"` @@ -2883,10 +2835,20 @@ type CreateApiKeyParams struct { Reveal *bool `form:"reveal,omitempty" json:"reveal,omitempty"` } +// UpdateLegacyApiKeysParams defines parameters for UpdateLegacyApiKeys. +type UpdateLegacyApiKeysParams struct { + // Enabled Boolean string, true or false + Enabled bool `form:"enabled" json:"enabled"` +} + // DeleteApiKeyParams defines parameters for DeleteApiKey. type DeleteApiKeyParams struct { // Reveal Boolean string, true or false Reveal *bool `form:"reveal,omitempty" json:"reveal,omitempty"` + + // WasCompromised Boolean string, true or false + WasCompromised *bool `form:"was_compromised,omitempty" json:"was_compromised,omitempty"` + Reason *string `form:"reason,omitempty" json:"reason,omitempty"` } // GetApiKeyParams defines parameters for GetApiKey. @@ -2907,6 +2869,12 @@ type V1ApplyAMigrationParams struct { IdempotencyKey *string `json:"Idempotency-Key,omitempty"` } +// V1UpsertAMigrationParams defines parameters for V1UpsertAMigration. +type V1UpsertAMigrationParams struct { + // IdempotencyKey A unique key to ensure the same migration is tracked only once. + IdempotencyKey *string `json:"Idempotency-Key,omitempty"` +} + // V1CreateAFunctionParams defines parameters for V1CreateAFunction. type V1CreateAFunctionParams struct { Slug *string `form:"slug,omitempty" json:"slug,omitempty"` @@ -3053,6 +3021,9 @@ type V1RestorePitrBackupJSONRequestBody = V1RestorePitrBody // V1ApplyAMigrationJSONRequestBody defines body for V1ApplyAMigration for application/json ContentType. type V1ApplyAMigrationJSONRequestBody = V1CreateMigrationBody +// V1UpsertAMigrationJSONRequestBody defines body for V1UpsertAMigration for application/json ContentType. +type V1UpsertAMigrationJSONRequestBody = V1UpsertMigrationBody + // V1RunAQueryJSONRequestBody defines body for V1RunAQuery for application/json ContentType. type V1RunAQueryJSONRequestBody = V1RunQueryBody diff --git a/pkg/config/config.go b/pkg/config/config.go index 04142d783..c6edda1b7 100644 --- a/pkg/config/config.go +++ b/pkg/config/config.go @@ -317,10 +317,12 @@ func (c *baseConfig) Clone() baseConfig { return copy } -type ConfigEditor func(*config) +type Config *config + +type ConfigEditor func(Config) func WithHostname(hostname string) ConfigEditor { - return func(c *config) { + return func(c Config) { c.Hostname = hostname } } @@ -344,6 +346,9 @@ func NewConfig(editors ...ConfigEditor) config { EncryptionKey: "12345678901234567890123456789032", SecretKeyBase: "EAx3IQ/wRG1v47ZD4NE4/9RzBI8Jmil3x0yhcW4V2NHBP6c2iPIzwjofi2Ep4HIG", }, + Migrations: migrations{ + Enabled: true, + }, Seed: seed{ Enabled: true, SqlPaths: []string{"seed.sql"}, diff --git a/pkg/config/db.go b/pkg/config/db.go index bdbc4da5d..0f216a23f 100644 --- a/pkg/config/db.go +++ b/pkg/config/db.go @@ -82,6 +82,7 @@ type ( } migrations struct { + Enabled bool `toml:"enabled"` SchemaPaths Glob `toml:"schema_paths"` } diff --git a/pkg/config/templates/Dockerfile b/pkg/config/templates/Dockerfile index f1458637e..0cab55e94 100644 --- a/pkg/config/templates/Dockerfile +++ b/pkg/config/templates/Dockerfile @@ -1,19 +1,19 @@ # Exposed for updates by .github/dependabot.yml -FROM supabase/postgres:17.4.1.038 AS pg +FROM supabase/postgres:17.4.1.043 AS pg # Append to ServiceImages when adding new dependencies below FROM library/kong:2.8.1 AS kong FROM axllent/mailpit:v1.22.3 AS mailpit FROM postgrest/postgrest:v12.2.12 AS postgrest FROM supabase/postgres-meta:v0.89.3 AS pgmeta -FROM supabase/studio:2025.06.02-sha-8f2993d AS studio +FROM supabase/studio:2025.06.16-sha-c4316c3 AS studio FROM darthsim/imgproxy:v3.8.0 AS imgproxy FROM supabase/edge-runtime:v1.67.4 AS edgeruntime FROM timberio/vector:0.28.1-alpine AS vector -FROM supabase/supavisor:2.5.1 AS supavisor -FROM supabase/gotrue:v2.174.0 AS gotrue -FROM supabase/realtime:v2.36.7 AS realtime -FROM supabase/storage-api:v1.23.0 AS storage -FROM supabase/logflare:1.12.0 AS logflare +FROM supabase/supavisor:2.5.2 AS supavisor +FROM supabase/gotrue:v2.176.1 AS gotrue +FROM supabase/realtime:v2.36.18 AS realtime +FROM supabase/storage-api:v1.24.6 AS storage +FROM supabase/logflare:1.14.2 AS logflare # Append to JobImages when adding new dependencies below FROM supabase/pgadmin-schema-diff:cli-0.0.5 AS differ FROM supabase/migra:3.0.1663481299 AS migra diff --git a/pkg/config/templates/config.toml b/pkg/config/templates/config.toml index c8aa68ad1..d72ae5045 100644 --- a/pkg/config/templates/config.toml +++ b/pkg/config/templates/config.toml @@ -28,7 +28,7 @@ port = 54322 shadow_port = 54320 # The database major version to use. This has to be the same as your remote database's. Run `SHOW # server_version;` on the remote database to check. -major_version = 15 +major_version = 17 [db.pooler] enabled = false @@ -46,6 +46,8 @@ max_client_conn = 100 # secret_key = "env(SECRET_VALUE)" [db.migrations] +# If disabled, migrations will be skipped during a db push or reset. +enabled = true # Specifies an ordered list of schema files that describe your database. # Supports glob patterns relative to supabase directory: "./schemas/*.sql" schema_paths = [] diff --git a/pkg/config/testdata/config.toml b/pkg/config/testdata/config.toml index 98ad26ecf..ed0d508fc 100644 --- a/pkg/config/testdata/config.toml +++ b/pkg/config/testdata/config.toml @@ -28,9 +28,11 @@ port = 54322 shadow_port = 54320 # The database major version to use. This has to be the same as your remote database's. Run `SHOW # server_version;` on the remote database to check. -major_version = 15 +major_version = 17 [db.migrations] +# If disabled, migrations will be skipped during a db push or reset. +enabled = true # Specifies an ordered list of schema files that describe your database. # Supports glob patterns relative to supabase directory: "./schemas/*.sql" schema_paths = ["./schemas/*.sql"] diff --git a/pkg/go.mod b/pkg/go.mod index 6fb1fec23..0340f3cba 100644 --- a/pkg/go.mod +++ b/pkg/go.mod @@ -25,8 +25,8 @@ require ( github.com/spf13/viper v1.20.1 github.com/stretchr/testify v1.10.0 github.com/tidwall/jsonc v0.3.2 - golang.org/x/mod v0.24.0 - google.golang.org/grpc v1.72.2 + golang.org/x/mod v0.25.0 + google.golang.org/grpc v1.73.0 ) require ( @@ -35,7 +35,6 @@ require ( github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 // indirect github.com/ethereum/go-ethereum v1.15.8 // indirect github.com/fsnotify/fsnotify v1.9.0 // indirect - github.com/google/go-cmp v0.7.0 // indirect github.com/google/uuid v1.6.0 // indirect github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect github.com/jackc/chunkreader/v2 v2.0.1 // indirect diff --git a/pkg/go.sum b/pkg/go.sum new file mode 100644 index 000000000..c25f97883 --- /dev/null +++ b/pkg/go.sum @@ -0,0 +1,307 @@ +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/toml v1.5.0 h1:W5quZX/G/csjUnuI8SUYlsHs9M38FC7znL0lIO+DvMg= +github.com/BurntSushi/toml v1.5.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= +github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= +github.com/RaveNoX/go-jsoncommentstrip v1.0.0/go.mod h1:78ihd09MekBnJnxpICcwzCMzGrKSKYe4AqU6PDYYpjk= +github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA= +github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA= +github.com/apapsch/go-jsonmerge/v2 v2.0.0 h1:axGnT1gRIfimI7gJifB699GoE/oq+F2MU7Dml6nw9rQ= +github.com/apapsch/go-jsonmerge/v2 v2.0.0/go.mod h1:lvDnEdqiQrp0O42VQGgmlKpxL1AP2+08jFMw88y4klk= +github.com/bmatcuk/doublestar v1.1.1/go.mod h1:UD6OnuiIn0yFxxA2le/rnRU1G4RaI4UvFv1sNto9p6w= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I= +github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= +github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 h1:NMZiJj8QnKe1LgsbDayM4UoHwbvwDRwnI3hwNaAHRnc= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0/go.mod h1:ZXNYxsqcloTdSy/rNShjYzMhyjf0LaoftYK0p+A3h40= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/ecies/go/v2 v2.0.11 h1:xYhtMdLiqNi02oLirFmLyNbVXw6250h3WM6zJryQdiM= +github.com/ecies/go/v2 v2.0.11/go.mod h1:LPRzoefP0Tam+1uesQOq3Gtb6M2OwlFUnXBTtBAKfDQ= +github.com/ethereum/go-ethereum v1.15.8 h1:H6NilvRXFVoHiXZ3zkuTqKW5XcxjLZniV5UjxJt1GJU= +github.com/ethereum/go-ethereum v1.15.8/go.mod h1:+S9k+jFzlyVTNcYGvqFhzN/SFhI6vA+aOY4T5tLSPL0= +github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= +github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= +github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k= +github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= +github.com/go-errors/errors v1.5.1 h1:ZwEMSLRCapFLflTpT7NKaAc7ukJ8ZPEjzlxt8rPN8bk= +github.com/go-errors/errors v1.5.1/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss= +github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= +github.com/gofrs/uuid v4.0.0+incompatible h1:1SD/1F5pU8p29ybwgQSwpQk+mwdRrXCYuPhW6m+TnJw= +github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8= +github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/h2non/gock v1.2.0 h1:K6ol8rfrRkUOefooBC8elXoaNGYkpp7y2qcxGG6BzUE= +github.com/h2non/gock v1.2.0/go.mod h1:tNhoxHYW2W42cYkYb1WqzdbYIieALC99kpYr7rH/BQk= +github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 h1:2VTzZjLZBgl62/EtslCrtky5vbi9dd7HrQPQIx6wqiw= +github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542/go.mod h1:Ow0tF8D4Kplbc8s8sSb3V2oUCygFHVp8gC3Dn6U4MNI= +github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= +github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= +github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA= +github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE= +github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s= +github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o= +github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY= +github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI= +github.com/jackc/pgconn v1.14.3 h1:bVoTr12EGANZz66nZPkMInAV/KHD2TxH9npjXXgiB3w= +github.com/jackc/pgconn v1.14.3/go.mod h1:RZbme4uasqzybK2RK5c65VsHxoyaml09lx3tXOcO/VM= +github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438 h1:Dj0L5fhJ9F82ZJyVOmBx6msDp/kfd1t9GRfny/mfJA0= +github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438/go.mod h1:a/s9Lp5W7n/DD0VrVoyJ00FbP2ytTPDVOivvn2bMlds= +github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE= +github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= +github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE= +github.com/jackc/pgmock v0.0.0-20201204152224-4fe30f7445fd/go.mod h1:hrBW0Enj2AZTNpt/7Y5rr2xe/9Mn757Wtb2xeBzPv2c= +github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65 h1:DadwsjnMwFjfWc9y5Wi/+Zz7xoE5ALHsRQlOctkOiHc= +github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65/go.mod h1:5R2h2EEX+qri8jOWMbJCtaPWkrrNc7OHwsp2TCqp7ak= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg= +github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgproto3/v2 v2.3.3 h1:1HLSx5H+tXR9pW3in3zaztoEwQYRC9SQaYUHjTSUOag= +github.com/jackc/pgproto3/v2 v2.3.3/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg= +github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc= +github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw= +github.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM= +github.com/jackc/pgtype v1.14.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4= +github.com/jackc/pgtype v1.14.4 h1:fKuNiCumbKTAIxQwXfB/nsrnkEI6bPJrrSiMKgbJ2j8= +github.com/jackc/pgtype v1.14.4/go.mod h1:aKeozOde08iifGosdJpz9MBZonJOUJxqNpPBcMJTlVA= +github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y= +github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM= +github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc= +github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs= +github.com/jackc/pgx/v4 v4.18.2/go.mod h1:Ey4Oru5tH5sB6tV7hDmfWFahwF15Eb7DNXlRKx2CkVw= +github.com/jackc/pgx/v4 v4.18.3 h1:dE2/TrEsGX3RBprb3qryqSV9Y60iZN1C6i8IrmW9/BA= +github.com/jackc/pgx/v4 v4.18.3/go.mod h1:Ey4Oru5tH5sB6tV7hDmfWFahwF15Eb7DNXlRKx2CkVw= +github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.3.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/juju/gnuflag v0.0.0-20171113085948-2ce1bb71843d/go.mod h1:2PavIy+JPciBPrBUjwbNvtwB6RQlve+hkpll6QSNmOE= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= +github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= +github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32 h1:W6apQkHrMkS0Muv8G/TipAy/FJl/rCYT0+EuS8+Z0z4= +github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32/go.mod h1:9wM+0iRr9ahx58uYLpLIr5fm8diHn0JbqRycJi6w0Ms= +github.com/oapi-codegen/nullable v1.1.0 h1:eAh8JVc5430VtYVnq00Hrbpag9PFRGWLjxR1/3KntMs= +github.com/oapi-codegen/nullable v1.1.0/go.mod h1:KUZ3vUzkmEKY90ksAmit2+5juDIhIZhfDl+0PwOQlFY= +github.com/oapi-codegen/runtime v1.1.1 h1:EXLHh0DXIJnWhdRPN2w4MXAzFyE4CskzhNLUmtpMYro= +github.com/oapi-codegen/runtime v1.1.1/go.mod h1:SK9X900oXmPWilYR5/WKPzt3Kqxn/uS/+lbpREv+eCg= +github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4= +github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= +github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= +github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= +github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= +github.com/sagikazarmark/locafero v0.7.0 h1:5MqpDsTGNDhY8sGp0Aowyf0qKsPrhewaLSsFaodPcyo= +github.com/sagikazarmark/locafero v0.7.0/go.mod h1:2za3Cg5rMaTMoG/2Ulr9AwtFaIppKXTRYnozin4aB5k= +github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= +github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= +github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ= +github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= +github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= +github.com/spf13/afero v1.14.0 h1:9tH6MapGnn/j0eb0yIXiLjERO8RB6xIVZRDCX7PtqWA= +github.com/spf13/afero v1.14.0/go.mod h1:acJQ8t0ohCGuMN3O+Pv0V0hgMxNYDlvdk+VTfyZmbYo= +github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y= +github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= +github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o= +github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.20.1 h1:ZMi+z/lvLyPSCoNtFCpqjy0S4kPbirhpTMwl8BkW9X4= +github.com/spf13/viper v1.20.1/go.mod h1:P9Mdzt1zoHIG8m2eZQinpiBjo6kCmZSKBClNNqjJvu4= +github.com/spkg/bom v0.0.0-20160624110644-59b7046e48ad/go.mod h1:qLr4V1qq6nMqFKkMo8ZTx3f+BZEkzsRUY10Xsm2mwU0= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= +github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= +github.com/tidwall/jsonc v0.3.2 h1:ZTKrmejRlAJYdn0kcaFqRAKlxxFIC21pYq8vLa4p2Wc= +github.com/tidwall/jsonc v0.3.2/go.mod h1:dw+3CIxqHi+t8eFSpzzMlcVYxKp08UP5CD8/uSFCyJE= +github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU= +github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= +go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE= +go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= +go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= +go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI= +go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ= +go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= +go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.20.0/go.mod h1:Xwo95rrVNIoSMx9wa1JroENMToLWn3RNVrTBpLHgZPQ= +golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE= +golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w= +golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= +golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= +golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/grpc v1.73.0 h1:VIWSmpI2MegBtTuFt5/JWy2oXxtjJ/e89Z70ImfD2ok= +google.golang.org/grpc v1.73.0/go.mod h1:50sbHOUqWoCQGI8V2HQLJM0B+LMlIUjNSZmow7EVBQc= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= diff --git a/pkg/migration/dump.go b/pkg/migration/dump.go new file mode 100644 index 000000000..66d3f7486 --- /dev/null +++ b/pkg/migration/dump.go @@ -0,0 +1,224 @@ +package migration + +import ( + "context" + _ "embed" + "fmt" + "io" + "strings" + + "github.com/jackc/pgconn" +) + +var ( + //go:embed scripts/dump_schema.sh + dumpSchemaScript string + //go:embed scripts/dump_data.sh + dumpDataScript string + //go:embed scripts/dump_role.sh + dumpRoleScript string + + InternalSchemas = []string{ + "information_schema", + "pg_*", // Wildcard pattern follows pg_dump + // Initialised by supabase/postgres image and owned by postgres role + "_analytics", + "_realtime", + "_supavisor", + "auth", + "extensions", + "pgbouncer", + "realtime", + "storage", + "supabase_functions", + "supabase_migrations", + // Owned by extensions + "cron", + "dbdev", + "graphql", + "graphql_public", + "net", + "pgmq", + "pgsodium", + "pgsodium_masks", + "pgtle", + "repack", + "tiger", + "tiger_data", + "timescaledb_*", + "_timescaledb_*", + "topology", + "vault", + } + // Data dump includes auth, storage, etc. for migrating to new project + excludedSchemas = []string{ + "information_schema", + "pg_*", // Wildcard pattern follows pg_dump + // Owned by extensions + // "cron", + "graphql", + "graphql_public", + // "net", + // "pgmq", + "pgsodium", + "pgsodium_masks", + "pgtle", + "repack", + "tiger", + "tiger_data", + "timescaledb_*", + "_timescaledb_*", + "topology", + "vault", + // Managed by Supabase + // "auth", + "extensions", + "pgbouncer", + "realtime", + // "storage", + // "supabase_functions", + "supabase_migrations", + // TODO: Remove in a few version in favor of _supabase internal db + "_analytics", + "_realtime", + "_supavisor", + } + reservedRoles = []string{ + "anon", + "authenticated", + "authenticator", + "dashboard_user", + "pgbouncer", + "postgres", + "service_role", + "supabase_admin", + "supabase_auth_admin", + "supabase_functions_admin", + "supabase_read_only_user", + "supabase_realtime_admin", + "supabase_replication_admin", + "supabase_storage_admin", + // Managed by extensions + "pgsodium_keyholder", + "pgsodium_keyiduser", + "pgsodium_keymaker", + "pgtle_admin", + } + allowedConfigs = []string{ + // Ref: https://github.com/supabase/postgres/blob/develop/ansible/files/postgresql_config/supautils.conf.j2#L10 + "pgaudit.*", + "pgrst.*", + "session_replication_role", + "statement_timeout", + "track_io_timing", + } +) + +type pgDumpOption struct { + schema []string + keepComments bool + excludeTable []string + columnInsert bool +} + +type DumpOptionFunc func(*pgDumpOption) + +func WithSchema(schema ...string) DumpOptionFunc { + return func(pdo *pgDumpOption) { + pdo.schema = schema + } +} + +func WithComments(keep bool) DumpOptionFunc { + return func(pdo *pgDumpOption) { + pdo.keepComments = keep + } +} + +func WithColumnInsert(use bool) DumpOptionFunc { + return func(pdo *pgDumpOption) { + pdo.columnInsert = use + } +} + +func WithoutTable(table ...string) DumpOptionFunc { + return func(pdo *pgDumpOption) { + pdo.excludeTable = table + } +} + +func toEnv(config pgconn.Config) []string { + return []string{ + "PGHOST=" + config.Host, + fmt.Sprintf("PGPORT=%d", config.Port), + "PGUSER=" + config.User, + "PGPASSWORD=" + config.Password, + "PGDATABASE=" + config.Database, + } +} + +type ExecFunc func(context.Context, string, []string, io.Writer) error + +func DumpSchema(ctx context.Context, config pgconn.Config, w io.Writer, exec ExecFunc, opts ...DumpOptionFunc) error { + var opt pgDumpOption + for _, apply := range opts { + apply(&opt) + } + env := toEnv(config) + if len(opt.schema) > 0 { + // Must append flag because empty string results in error + env = append(env, "EXTRA_FLAGS=--schema="+strings.Join(opt.schema, "|")) + } else { + env = append(env, "EXCLUDED_SCHEMAS="+strings.Join(InternalSchemas, "|")) + } + if !opt.keepComments { + env = append(env, "EXTRA_SED=/^--/d") + } + return exec(ctx, dumpSchemaScript, env, w) +} + +func DumpData(ctx context.Context, config pgconn.Config, w io.Writer, exec ExecFunc, opts ...DumpOptionFunc) error { + var opt pgDumpOption + for _, apply := range opts { + apply(&opt) + } + env := toEnv(config) + if len(opt.schema) > 0 { + env = append(env, "INCLUDED_SCHEMAS="+strings.Join(opt.schema, "|")) + } else { + env = append(env, "INCLUDED_SCHEMAS=*", "EXCLUDED_SCHEMAS="+strings.Join(excludedSchemas, "|")) + } + var extraFlags []string + if opt.columnInsert { + extraFlags = append(extraFlags, "--column-inserts", "--rows-per-insert 100000") + } + for _, table := range opt.excludeTable { + escaped := quoteUpperCase(table) + // Use separate flags to avoid error: too many dotted names + extraFlags = append(extraFlags, "--exclude-table "+escaped) + } + if len(extraFlags) > 0 { + env = append(env, "EXTRA_FLAGS="+strings.Join(extraFlags, " ")) + } + return exec(ctx, dumpDataScript, env, w) +} + +func quoteUpperCase(table string) string { + escaped := strings.ReplaceAll(table, ".", `"."`) + return fmt.Sprintf(`"%s"`, escaped) +} + +func DumpRole(ctx context.Context, config pgconn.Config, w io.Writer, exec ExecFunc, opts ...DumpOptionFunc) error { + var opt pgDumpOption + for _, apply := range opts { + apply(&opt) + } + env := append(toEnv(config), + "RESERVED_ROLES="+strings.Join(reservedRoles, "|"), + "ALLOWED_CONFIGS="+strings.Join(allowedConfigs, "|"), + ) + if !opt.keepComments { + env = append(env, "EXTRA_SED=/^--/d") + } + return exec(ctx, dumpRoleScript, env, w) +} diff --git a/internal/db/dump/templates/dump_data.sh b/pkg/migration/scripts/dump_data.sh similarity index 100% rename from internal/db/dump/templates/dump_data.sh rename to pkg/migration/scripts/dump_data.sh diff --git a/internal/db/dump/templates/dump_role.sh b/pkg/migration/scripts/dump_role.sh similarity index 100% rename from internal/db/dump/templates/dump_role.sh rename to pkg/migration/scripts/dump_role.sh diff --git a/internal/db/dump/templates/dump_schema.sh b/pkg/migration/scripts/dump_schema.sh similarity index 100% rename from internal/db/dump/templates/dump_schema.sh rename to pkg/migration/scripts/dump_schema.sh