From 0499629e450bce4a1192e64e69d4912bbad2e2f8 Mon Sep 17 00:00:00 2001 From: AlexOnomy Date: Mon, 6 May 2024 09:38:12 -0700 Subject: [PATCH] From: https://github.com/onomyprotocol/Onomy-Rebuild --- .build.sh | 44 + .changelog/config.toml | 20 + .changelog/epilogue.md | 3 + .changelog/unreleased/.gitkeep | 0 .../v15.0.0/api-breaking/2912-vote-spam.md | 2 + .../v15.0.0/api-breaking/2967-bump-ics.md | 4 + .../v15.0.0/api-breaking/2967-bump-sdk.md | 8 + .../v15.0.0/bug-fixes/2912-vote-spam.md | 2 + .changelog/v15.0.0/bug-fixes/2967-bump-sdk.md | 12 + .../v15.0.0/dependencies/2852-bump-comet.md | 3 + .../v15.0.0/dependencies/2852-bump-ibc.md | 3 + .../v15.0.0/dependencies/2852-bump-pfm.md | 3 + .../v15.0.0/dependencies/2967-bump-ics.md | 3 + .../v15.0.0/dependencies/2967-bump-sdk.md | 4 + .../2960-add-metaprotocols-support.md | 2 + .../v15.0.0/state-breaking/2852-bump-comet.md | 3 + .../v15.0.0/state-breaking/2852-bump-ibc.md | 3 + .../v15.0.0/state-breaking/2852-bump-pfm.md | 3 + .../2855-migrate-min-commission-rate.md | 7 + .../2866-migrate-signing-infos.md | 5 + .../2891-migrate-vesting-funds.md | 3 + .../v15.0.0/state-breaking/2912-vote-spam.md | 2 + .../v15.0.0/state-breaking/2913-gov-spam.md | 3 + .../2960-add-metaprotocols-support.md | 2 + .../v15.0.0/state-breaking/2967-bump-ics.md | 3 + .../v15.0.0/state-breaking/2967-bump-sdk.md | 5 + .changelog/v15.0.0/summary.md | 1 + .../v15.1.0/dependencies/2982-bump-pfm.md | 2 + .../features/2974-add-snapshot-commands.md | 1 + .../v15.1.0/state-breaking/2982-bump-pfm.md | 2 + .../2993-migrate-escrow-accounts.md | 3 + .changelog/v15.1.0/summary.md | 1 + .../bug-fixes/3025-gov-metatdata-len.md | 1 + .../bug-fixes/3032-historic-tx-extensions.md | 1 + .../state-breaking/3025-gov-metatdata-len.md | 1 + .../3032-historic-tx-extensions.md | 1 + .changelog/v15.2.0/summary.md | 2 + .dockerignore | 4 + .gitattributes | 0 .github/CODEOWNERS | 5 + .github/ISSUE_TEMPLATE/bug-report.md | 34 + .github/ISSUE_TEMPLATE/epic-template.md | 30 + .github/ISSUE_TEMPLATE/feature-request.md | 50 + .github/ISSUE_TEMPLATE/issue-template.md | 24 + .github/ISSUE_TEMPLATE/tech-debt.md | 54 + .github/ISSUE_TEMPLATE/upgrade-checklist.md | 58 + .github/PULL_REQUEST_TEMPLATE.md | 5 + .github/PULL_REQUEST_TEMPLATE/docs.md | 38 + .github/PULL_REQUEST_TEMPLATE/others.md | 33 + .github/PULL_REQUEST_TEMPLATE/production.md | 48 + .github/codecov.yml | 33 + .github/dependabot.yml | 48 + .github/stale.yml | 45 + .github/workflows/automated-release.yml | 64 - .github/workflows/ci.yml | 72 - .github/workflows/codeql-analysis.yml | 61 + .github/workflows/deploy-docs.yml | 47 + .github/workflows/docker-devbase.yml | 29 - .github/workflows/docker-push.yml | 51 + .github/workflows/lint.yml | 33 + .github/workflows/md-link-checker.yml | 13 + .github/workflows/nightly-tests.yml | 63 + .github/workflows/release-sims.yml | 83 + .github/workflows/release.yml | 35 + .github/workflows/sim-label.yml | 43 + .github/workflows/sims.yml | 118 + .github/workflows/stale.yml | 18 + .github/workflows/test.yml | 182 + .gitignore | 62 +- .gitpod.yml | 4 + .golangci.yml | 452 +- .goreleaser.yml | 64 + .mergify.yml | 52 + .rustfmt.toml | 20 - CHANGELOG.md | 139 + CONTRIBUTING.md | 331 + Cargo.toml | 7 - Dockerfile | 20 + LICENSE | 862 +- Makefile | 325 +- RELEASE_NOTES.md | 40 + RELEASE_PROCESS.md | 251 + SECURITY.md | 30 + STATE-COMPATIBILITY.md | 211 + UPGRADING.md | 160 + ante/ante.go | 76 + ante/gov_vote_ante.go | 141 + ante/gov_vote_ante_test.go | 249 + app/app.go | 883 +- app/app_helpers.go | 56 + app/app_test.go | 47 + app/const.go | 5 + app/encoding.go | 18 + app/export.go | 115 +- app/genesis.go | 10 +- app/genesis_account.go | 56 + app/genesis_account_fuzz_test.go | 35 + app/helpers/test_helpers.go | 204 + app/keepers/keepers.go | 451 + app/keepers/keys.go | 93 + app/modules.go | 310 + app/params/amino.go | 23 + app/params/doc.go | 19 + app/params/encoding.go | 16 + app/params/params.go | 7 + app/params/proto.go | 21 + app/params/weights.go | 24 + app/sim/sim_config.go | 75 + app/sim/sim_state.go | 262 + app/sim/sim_utils.go | 77 + app/sim_bench_test.go | 85 + app/sim_test.go | 143 + app/upgrades/readme.md | 13 - app/upgrades/types.go | 40 + app/upgrades/v1.0.1/upgrade.go | 16 - app/upgrades/v1.0.3.4/upgrade.go | 16 - app/upgrades/v1.0.3.5/upgrade.go | 16 - app/upgrades/v1.0.3/upgrade.go | 16 - app/upgrades/v1.1.1/upgrade.go | 5 - app/upgrades/v1.1.2/upgrade.go | 16 - app/upgrades/v1.1.4/upgrade.go | 16 - app/upgrades/v10/constants.go | 15 + app/upgrades/v10/upgrades.go | 27 + app/upgrades/v11/constants.go | 15 + app/upgrades/v11/upgrades.go | 27 + app/upgrades/v12/constants.go | 30 + app/upgrades/v12/upgrades.go | 38 + app/upgrades/v13/constants.go | 15 + app/upgrades/v13/upgrades.go | 27 + app/upgrades/v14/constants.go | 15 + app/upgrades/v14/upgrades.go | 32 + app/upgrades/v15/constants.go | 26 + app/upgrades/v15/upgrades.go | 458 + app/upgrades/v15/upgrades_test.go | 320 + app/upgrades/v7/constants.go | 51 + app/upgrades/v7/upgrades.go | 72 + app/upgrades/v8/constants.go | 25 + app/upgrades/v8/upgrades.go | 140 + app/upgrades/v9/constants.go | 27 + app/upgrades/v9/upgrades.go | 29 + buf.work.yaml | 3 +- buf.yaml | 11 - client/docs/config.json | 18 + client/docs/swagger-ui/favicon-16x16.png | Bin 0 -> 665 bytes client/docs/swagger-ui/favicon-32x32.png | Bin 0 -> 628 bytes client/docs/swagger-ui/index.html | 61 + client/docs/swagger-ui/oauth2-redirect.html | 75 + client/docs/swagger-ui/swagger-ui-bundle.js | 3 + .../docs/swagger-ui/swagger-ui-bundle.js.map | 1 + .../swagger-ui/swagger-ui-es-bundle-core.js | 3 + .../swagger-ui-es-bundle-core.js.map | 1 + .../docs/swagger-ui/swagger-ui-es-bundle.js | 3 + .../swagger-ui/swagger-ui-es-bundle.js.map | 1 + .../swagger-ui-standalone-preset.js | 3 + .../swagger-ui-standalone-preset.js.map | 1 + client/docs/swagger-ui/swagger-ui.css | 4 + client/docs/swagger-ui/swagger-ui.css.map | 1 + client/docs/swagger-ui/swagger-ui.js | 3 + client/docs/swagger-ui/swagger-ui.js.map | 1 + client/docs/swagger-ui/swagger.yaml | 121 + cmd/onomyd/cmd/bech32_convert.go | 53 + cmd/onomyd/cmd/cmd.go | 54 - cmd/onomyd/cmd/genaccounts.go | 197 + cmd/onomyd/cmd/root.go | 352 + cmd/onomyd/cmd/root_test.go | 23 + cmd/onomyd/cmd/testnet.go | 532 + cmd/onomyd/main.go | 16 +- cmd/onomyd/main_test.go | 22 - contrib/Dockerfile.test | 35 + contrib/denom.json | 30 + contrib/devtools/Makefile | 114 + contrib/generate_release_note/main.go | 70 + contrib/get_node.sh | 14 + contrib/githooks/README.md | 21 + contrib/githooks/pre-commit | 41 + contrib/githooks/precommit | 41 + contrib/scripts/local-gaia.sh | 65 + contrib/scripts/test_localnet_liveness.sh | 53 + .../scripts/upgrade_test_scripts/run_gaia.sh | 93 + .../run_upgrade_commands.sh | 96 + .../upgrade_test_scripts/test_upgrade.sh | 70 + .../upgrade_test_scripts/v10/run_gaia_v9.sh | 73 + .../v10/run_upgrade_commands_v10.sh | 76 + .../upgrade_test_scripts/v11/run_gaia_v10.sh | 74 + .../v11/run_upgrade_commands_v11.sh | 76 + .../v11/test_migration_v11.sh | 32 + .../upgrade_test_scripts/v12/run_gaia_v11.sh | 73 + .../v12/run_upgrade_commands_v12.sh | 73 + .../upgrade_test_scripts/v8/run-gaia-v7.sh | 71 + .../v8/run-upgrade-commands-v8-rho.sh | 75 + .../upgrade_test_scripts/v9/run-gaia-v8.sh | 71 + .../v9/run-upgrade-commands.sh | 74 + contrib/single-node.sh | 36 + contrib/statesync.bash | 63 + contrib/testnets/Makefile | 143 + contrib/testnets/README.md | 6 + contrib/testnets/add-cluster.sh | 25 + contrib/testnets/add-datadog.sh | 14 + contrib/testnets/del-cluster.sh | 14 + contrib/testnets/del-datadog.sh | 13 + contrib/testnets/list.sh | 13 + contrib/testnets/new-testnet.sh | 30 + contrib/testnets/remote/ansible/.gitignore | 3 + contrib/testnets/remote/ansible/add-lcd.yml | 8 + .../testnets/remote/ansible/clear-config.yml | 8 + .../remote/ansible/extract-config.yml | 8 + .../remote/ansible/increase-openfiles.yml | 8 + .../remote/ansible/install-datadog-agent.yml | 12 + .../testnets/remote/ansible/inventory/COPYING | 675 + .../ansible/inventory/digital_ocean.ini | 34 + .../remote/ansible/inventory/digital_ocean.py | 471 + .../testnets/remote/ansible/inventory/ec2.ini | 209 + .../testnets/remote/ansible/inventory/ec2.py | 1595 + contrib/testnets/remote/ansible/logzio.yml | 13 + .../remote/ansible/remove-datadog-agent.yml | 8 + .../ansible/roles/add-lcd/defaults/main.yml | 4 + .../ansible/roles/add-lcd/handlers/main.yml | 9 + .../ansible/roles/add-lcd/tasks/main.yml | 15 + .../add-lcd/templates/gaiacli.service.j2 | 17 + .../ansible/roles/clear-config/tasks/main.yml | 9 + .../roles/extract-config/defaults/main.yml | 4 + .../roles/extract-config/tasks/main.yml | 14 + .../roles/increase-openfiles/files/50-fs.conf | 1 + .../increase-openfiles/files/91-nofiles.conf | 3 + .../increase-openfiles/files/limits.conf | 3 + .../increase-openfiles/handlers/main.yml | 5 + .../roles/increase-openfiles/tasks/main.yml | 22 + .../install-datadog-agent/handlers/main.yml | 10 + .../install-datadog-agent/tasks/main.yml | 15 + .../roles/logzio/files/journalbeat.service | 15 + .../ansible/roles/logzio/handlers/main.yml | 8 + .../ansible/roles/logzio/tasks/main.yml | 27 + .../roles/logzio/templates/journalbeat.yml.j2 | 342 + .../roles/remove-datadog-agent/tasks/main.yml | 12 + .../roles/set-debug/files/sysconfig/gaiacli | 1 + .../roles/set-debug/files/sysconfig/gaiad | 1 + .../set-debug/files/sysctl.d/10-procdump | 3 + .../roles/set-debug/handlers/main.yaml | 4 + .../ansible/roles/set-debug/tasks/main.yml | 9 + .../roles/setup-fullnodes/defaults/main.yml | 4 + .../roles/setup-fullnodes/files/gaiad.service | 17 + .../roles/setup-fullnodes/handlers/main.yml | 5 + .../roles/setup-fullnodes/tasks/main.yml | 61 + .../roles/setup-journald/handlers/main.yml | 5 + .../roles/setup-journald/tasks/main.yml | 26 + .../roles/setup-validators/defaults/main.yml | 4 + .../setup-validators/files/gaiad.service | 17 + .../roles/setup-validators/handlers/main.yml | 5 + .../roles/setup-validators/tasks/main.yml | 78 + .../remote/ansible/roles/start/tasks/main.yml | 5 + .../remote/ansible/roles/stop/tasks/main.yml | 5 + .../files/conf.d/http_check.d/conf.yaml | 13 + .../files/conf.d/network.d/conf.yaml | 9 + .../files/conf.d/process.d/conf.yaml | 15 + .../files/conf.d/prometheus.d/conf.yaml | 10 + .../update-datadog-agent/handlers/main.yml | 5 + .../roles/update-datadog-agent/tasks/main.yml | 10 + .../templates/datadog.yaml.j2 | 561 + .../roles/upgrade-gaiad/handlers/main.yml | 5 + .../roles/upgrade-gaiad/tasks/main.yml | 29 + contrib/testnets/remote/ansible/set-debug.yml | 8 + .../remote/ansible/setup-fullnodes.yml | 13 + .../remote/ansible/setup-journald.yml | 10 + .../remote/ansible/setup-validators.yml | 9 + contrib/testnets/remote/ansible/start.yml | 10 + contrib/testnets/remote/ansible/status.yml | 17 + contrib/testnets/remote/ansible/stop.yml | 10 + .../remote/ansible/update-datadog-agent.yml | 10 + .../testnets/remote/ansible/upgrade-gaia.yml | 9 + .../testnets/remote/ansible/upgrade-gaiad.yml | 11 + .../testnets/remote/terraform-app/.gitignore | 5 + .../remote/terraform-app/files/terraform.sh | 8 + .../remote/terraform-app/infra/attachment.tf | 21 + .../remote/terraform-app/infra/instance.tf | 58 + .../testnets/remote/terraform-app/infra/lb.tf | 52 + .../remote/terraform-app/infra/lcd.tf | 39 + .../remote/terraform-app/infra/outputs.tf | 24 + .../remote/terraform-app/infra/variables.tf | 39 + .../remote/terraform-app/infra/vpc.tf | 104 + contrib/testnets/remote/terraform-app/main.tf | 73 + .../testnets/remote/terraform-aws/.gitignore | 5 + .../remote/terraform-aws/files/terraform.sh | 11 + contrib/testnets/remote/terraform-aws/main.tf | 249 + .../remote/terraform-aws/nodes/main.tf | 104 + .../remote/terraform-aws/nodes/outputs.tf | 15 + .../remote/terraform-aws/nodes/variables.tf | 42 + .../testnets/remote/terraform-do/.gitignore | 6 + contrib/testnets/remote/terraform-do/Makefile | 100 + .../testnets/remote/terraform-do/README.md | 58 + .../remote/terraform-do/cluster/main.tf | 40 + .../remote/terraform-do/cluster/outputs.tf | 15 + .../remote/terraform-do/cluster/variables.tf | 30 + .../remote/terraform-do/files/terraform.sh | 8 + contrib/testnets/remote/terraform-do/main.tf | 43 + contrib/testnets/test_platform/README.md | 45 + .../test_platform/gaiad_config_manager.py | 233 + .../templates/3924406.cosmoshub-3.json.tar.gz | Bin 0 -> 9988219 bytes .../testnets/test_platform/templates/app.toml | 177 + .../test_platform/templates/config.toml | 393 + .../templates/replacement_defaults.txt | 31 + .../validator_replacement_example.json | 394 + contrib/testnets/upgrade-gaiad.sh | 14 + contrib/testnets/using-cleveldb.sh | 19 + deploy/scripts/add-service.sh | 46 - deploy/scripts/allow-cors.sh | 17 - deploy/scripts/bin-mainnet-from-sources.sh | 67 - deploy/scripts/bin-mainnet.sh | 46 - deploy/scripts/bin-testnet-from-sources.sh | 67 - deploy/scripts/bin-testnet.sh | 45 - deploy/scripts/expose-metrics.sh | 15 - deploy/scripts/init-mainnet-full-node.sh | 78 - deploy/scripts/init-mainnet-statesync.sh | 25 - deploy/scripts/init-testnet-full-node.sh | 78 - deploy/scripts/init-testnet-statesync.sh | 44 - deploy/scripts/set-snapshots.sh | 18 - deploy/scripts/start-cosmovisor-onomyd.sh | 11 - deploy/scripts/start-onomyd.sh | 11 - deploy/scripts/stop-cosmovisor-onomyd.sh | 6 - deploy/scripts/stop-onomyd.sh | 6 - dev/base-image/Dockerfile | 24 - dev/base-image/readme.md | 14 - dev/openapi/config.json | 143 - dev/openapi/swagger_legacy.yaml | 2597 - dev/scripts/protoc-swagger-gen.sh | 27 - dev/scripts/protocgen.sh | 39 - dev/tools/devtools.Dockerfile | 55 - dev/tools/entrypoint.sh | 3 - docs/.gitignore | 20 + docs/DOCS_README.md | 112 + docs/README copy.md | 40 + docs/README.md | 40 + docs/babel.config.js | 3 + docs/build.sh | 27 + docs/chain/bonding-curve.md | 50 - docs/chain/full.md | 94 - docs/chain/genesis-binaries.md | 30 - docs/chain/installation.md | 82 - docs/chain/mainnet.md | 14 - docs/chain/set-ulimit-rhel8.md | 13 - docs/chain/testnet.md | 15 - docs/chain/validator.md | 3 - docs/docs.go | 6 - docs/docs/architecture/PROCESS.md | 60 + docs/docs/architecture/README.md | 60 + docs/docs/architecture/_category_.json | 5 + docs/docs/architecture/adr/PROCESS.md | 57 + docs/docs/architecture/adr/README.md | 64 + docs/docs/architecture/adr/_category_.json | 5 + .../adr/adr-001-interchain-accounts.md | 94 + .../architecture/adr/adr-002-globalfee.md | 165 + .../architecture/templates/_category_.json | 5 + .../architecture/templates/adr-template.md | 58 + docs/docs/client/_category_.json | 5 + .../gaia/globalfee/v1beta1/query.swagger.json | 118 + docs/docs/delegators/README.md | 10 + docs/docs/delegators/_category_.json | 5 + docs/docs/delegators/delegator-faq.md | 98 + docs/docs/delegators/delegator-guide-cli.md | 538 + docs/docs/delegators/delegator-security.md | 56 + docs/docs/getting-started/README.md | 10 + docs/docs/getting-started/_category_.json | 5 + docs/docs/getting-started/installation.md | 124 + docs/docs/getting-started/quickstart.mdx | 141 + .../getting-started/system-requirements.md | 22 + docs/docs/getting-started/what-is-gaia.md | 24 + docs/docs/governance/LICENSE | 121 + docs/docs/governance/README.md | 21 + docs/docs/governance/_category_.json | 5 + docs/docs/governance/best-practices.md | 176 + docs/docs/governance/current-parameters.js | 126 + docs/docs/governance/formatting.md | 136 + docs/docs/governance/process.md | 69 + docs/docs/governance/proposal-types/README.md | 28 + .../governance/proposal-types/_category_.json | 5 + .../proposal-types/community-pool-spend.md | 64 + .../community-pool-spend/_category_.json | 4 + .../community-pool-spend/proposal.json | 18 + .../governance/proposal-types/param-change.md | 67 + .../proposal-types/params-change/Auth.mdx | 121 + .../proposal-types/params-change/Crisis.mdx | 37 + .../params-change/Distribution.mdx | 146 + .../params-change/Governance.mdx | 167 + .../proposal-types/params-change/Mint.mdx | 170 + .../proposal-types/params-change/Slashing.mdx | 156 + .../proposal-types/params-change/Staking.mdx | 118 + .../params-change/param-index.mdx | 18 + .../governance/proposal-types/text-prop.md | 45 + .../2020-10-blocks-per-year/README.md | 73 + .../2020-10-blocks-per-year/proposal.json | 17 + .../2020-11-inflation-rate-change/README.md | 44 + .../proposal.json | 17 + .../2021-01-atom2021_marketing/README.md | 113 + .../2021-01-atom2021_marketing/proposal.json | 17 + .../2021-01-delay-stargate-upgrade/README.md | 17 + .../proposal.json | 8 + .../2021-01-stargate-upgrade-b/README.md | 155 + .../2021-01-stargate-upgrade-b/proposal.json | 7 + .../2021-01-stargate-upgrade/README.md | 148 + .../2021-01-stargate-upgrade/proposal.json | 4 + .../2021-03-enable-ibc-transfers/README.md | 22 + .../proposal.json | 17 + .../2021-04-advancing-ethermint/README.md | 122 + .../2021-04-advancing-ethermint/proposal.json | 7 + .../README.md | 62 + .../proposal.json | 24 + .../2021-04-prop34-continuation/README.md | 163 + .../2021-04-prop34-continuation/proposal.json | 4 + .../README.md | 192 + .../proposal.json | 10 + .../README.md | 31 + .../proposal.json | 7 + .../2021-09-hub-ibc-router/README.md | 52 + .../2021-09-hub-ibc-router/proposal.json | 5 + docs/docs/governance/proposals/README.md | 10 + .../docs/governance/proposals/_category_.json | 4 + .../proposals/previous-proposals/README.md | 209 + .../proposals/proposal-template.json | 10 + .../governance/proposals/proposal-template.md | 42 + docs/docs/governance/scripts/_category_.json | 4 + .../scripts/extract_onchain_params.py | 94 + .../state-of-cosmos-governance-2021.md | 360 + docs/docs/governance/submitting.md | 259 + docs/docs/guidelines/_category_.json | 5 + docs/docs/guidelines/code-guidelines.md | 1354 + docs/docs/hub-tutorials/README.md | 12 + docs/docs/hub-tutorials/_category_.json | 5 + docs/docs/hub-tutorials/gaiad.mdx | 921 + docs/docs/hub-tutorials/join-mainnet.md | 543 + docs/docs/hub-tutorials/join-testnet.md | 281 + .../hub-tutorials/live-upgrade-tutorial.md | 76 + docs/docs/hub-tutorials/upgrade-node.md | 143 + docs/docs/images/cosmos-hub-image.jpg | Bin 0 -> 367964 bytes docs/docs/images/ledger-tuto-dev-mode.png | Bin 0 -> 325063 bytes .../docs/images/ledger-tuto-lunie-address.png | Bin 0 -> 235487 bytes docs/docs/images/ledger-tuto-lunie-option.png | Bin 0 -> 200139 bytes docs/docs/images/ledger-tuto-manager.png | Bin 0 -> 201512 bytes docs/docs/images/ledger-tuto-search.png | Bin 0 -> 212373 bytes docs/docs/images/verify-tx.png | Bin 0 -> 124166 bytes docs/docs/index.mdx | 89 + docs/docs/interchain-security/README.md | 22 + docs/docs/interchain-security/_category_.json | 5 + docs/docs/metaprotocols/README.md | 71 + docs/docs/metaprotocols/_category_.json | 5 + docs/docs/migration/README.md | 17 + docs/docs/migration/_category_.json | 5 + .../migration/cosmoshub-2/_category_.json | 5 + .../docs/migration/cosmoshub-2/cosmoshub-2.md | 225 + .../migration/cosmoshub-3/_category_.json | 5 + .../docs/migration/cosmoshub-3/cosmoshub-3.md | 383 + .../docs/migration/cosmoshub-4-v10-upgrade.md | 296 + .../docs/migration/cosmoshub-4-v11-upgrade.md | 276 + .../docs/migration/cosmoshub-4-v12-upgrade.md | 283 + .../docs/migration/cosmoshub-4-v13-upgrade.md | 276 + .../docs/migration/cosmoshub-4-v14-upgrade.md | 244 + .../migration/cosmoshub-4-v5-delta-upgrade.md | 151 + .../migration/cosmoshub-4-v6-vega-upgrade.md | 271 + .../migration/cosmoshub-4-v7-Theta-upgrade.md | 274 + .../migration/cosmoshub-4-v8-Rho-upgrade.md | 296 + .../cosmoshub-4-v9-Lambda-upgrade.md | 296 + docs/docs/modules/README.md | 11 + docs/docs/modules/_category_.json | 5 + docs/docs/modules/globalfee.md | 303 + docs/docs/proto/_category_.json | 5 + docs/docs/resources/README.md | 12 + docs/docs/resources/_category_.json | 5 + docs/docs/resources/archives.md | 20 + docs/docs/resources/genesis.md | 335 + docs/docs/resources/hd-wallets.md | 60 + docs/docs/resources/ledger.md | 216 + docs/docs/resources/reproducible-builds.md | 62 + docs/docs/resources/service-providers.md | 287 + docs/docs/roadmap/README.md | 14 + docs/docs/roadmap/_category_.json | 5 + docs/docs/roadmap/cosmos-hub-roadmap-2.0.md | 10 + docs/docs/roadmap/launch/_category_.json | 5 + docs/docs/roadmap/launch/blog-1-en.md | 228 + docs/docs/roadmap/launch/blog-2-en.md | 74 + docs/docs/roadmap/previous-releases.md | 29 + docs/docs/validators/README.md | 14 + docs/docs/validators/_category_.json | 5 + docs/docs/validators/kms/_category_.json | 5 + docs/docs/validators/kms/kms.md | 37 + docs/docs/validators/kms/kms_ledger.md | 116 + docs/docs/validators/kms/ledger_1.jpg | Bin 0 -> 23307 bytes docs/docs/validators/kms/ledger_2.jpg | Bin 0 -> 24150 bytes docs/docs/validators/overview.mdx | 40 + docs/docs/validators/security.md | 59 + docs/docs/validators/validator-faq.md | 344 + docs/docs/validators/validator-setup.md | 179 + docs/docusaurus.config.js | 257 + docs/openapi/openapi.yml | 56847 ---------- docs/package-lock.json | 11841 ++ docs/package.json | 48 + docs/proto/proto-docs.md | 279 - docs/proto/protodoc-markdown.tmpl | 105 - docs/sidebars.js | 33 + docs/src/css/base.css | 35 + docs/src/css/custom.css | 535 + docs/src/css/fonts.css | 64 + docs/src/js/KeyValueTable.js | 30 + docs/src/js/Var.js | 18 + docs/static/.nojekyll | 0 docs/static/fonts/inter/Inter-Black.woff | Bin 0 -> 138764 bytes docs/static/fonts/inter/Inter-Black.woff2 | Bin 0 -> 102868 bytes .../static/fonts/inter/Inter-BlackItalic.woff | Bin 0 -> 146824 bytes .../fonts/inter/Inter-BlackItalic.woff2 | Bin 0 -> 108752 bytes docs/static/fonts/inter/Inter-Bold.woff | Bin 0 -> 143208 bytes docs/static/fonts/inter/Inter-Bold.woff2 | Bin 0 -> 106140 bytes docs/static/fonts/inter/Inter-BoldItalic.woff | Bin 0 -> 151052 bytes .../static/fonts/inter/Inter-BoldItalic.woff2 | Bin 0 -> 111808 bytes docs/static/fonts/inter/Inter-ExtraBold.woff | Bin 0 -> 142920 bytes docs/static/fonts/inter/Inter-ExtraBold.woff2 | Bin 0 -> 106108 bytes .../fonts/inter/Inter-ExtraBoldItalic.woff | Bin 0 -> 150628 bytes .../fonts/inter/Inter-ExtraBoldItalic.woff2 | Bin 0 -> 111708 bytes docs/static/fonts/inter/Inter-ExtraLight.woff | Bin 0 -> 140724 bytes .../static/fonts/inter/Inter-ExtraLight.woff2 | Bin 0 -> 104232 bytes .../fonts/inter/Inter-ExtraLightItalic.woff | Bin 0 -> 149996 bytes .../fonts/inter/Inter-ExtraLightItalic.woff2 | Bin 0 -> 111392 bytes docs/static/fonts/inter/Inter-Italic.woff | Bin 0 -> 144372 bytes docs/static/fonts/inter/Inter-Italic.woff2 | Bin 0 -> 106876 bytes docs/static/fonts/inter/Inter-Light.woff | Bin 0 -> 140632 bytes docs/static/fonts/inter/Inter-Light.woff2 | Bin 0 -> 104332 bytes .../static/fonts/inter/Inter-LightItalic.woff | Bin 0 -> 150092 bytes .../fonts/inter/Inter-LightItalic.woff2 | Bin 0 -> 111332 bytes docs/static/fonts/inter/Inter-Medium.woff | Bin 0 -> 142552 bytes docs/static/fonts/inter/Inter-Medium.woff2 | Bin 0 -> 105924 bytes .../fonts/inter/Inter-MediumItalic.woff | Bin 0 -> 150988 bytes .../fonts/inter/Inter-MediumItalic.woff2 | Bin 0 -> 112184 bytes docs/static/fonts/inter/Inter-Regular.woff | Bin 0 -> 133844 bytes docs/static/fonts/inter/Inter-Regular.woff2 | Bin 0 -> 98868 bytes docs/static/fonts/inter/Inter-SemiBold.woff | Bin 0 -> 142932 bytes docs/static/fonts/inter/Inter-SemiBold.woff2 | Bin 0 -> 105804 bytes .../fonts/inter/Inter-SemiBoldItalic.woff | Bin 0 -> 151180 bytes .../fonts/inter/Inter-SemiBoldItalic.woff2 | Bin 0 -> 112048 bytes docs/static/fonts/inter/Inter-Thin.woff | Bin 0 -> 135920 bytes docs/static/fonts/inter/Inter-Thin.woff2 | Bin 0 -> 99632 bytes docs/static/fonts/inter/Inter-ThinItalic.woff | Bin 0 -> 145480 bytes .../static/fonts/inter/Inter-ThinItalic.woff2 | Bin 0 -> 106496 bytes .../static/fonts/inter/Inter-italic.var.woff2 | Bin 0 -> 245036 bytes docs/static/fonts/inter/Inter-roman.var.woff2 | Bin 0 -> 227180 bytes docs/static/fonts/intervar/Inter.var.woff2 | Bin 0 -> 324864 bytes .../jetbrainsmono/JetBrainsMono-Bold.woff2 | Bin 0 -> 71180 bytes .../JetBrainsMono-BoldItalic.woff2 | Bin 0 -> 73976 bytes .../JetBrainsMono-ExtraBold.woff2 | Bin 0 -> 70124 bytes .../JetBrainsMono-ExtraBoldItalic.woff2 | Bin 0 -> 73264 bytes .../JetBrainsMono-ExtraLight.woff2 | Bin 0 -> 69780 bytes .../JetBrainsMono-ExtraLightItalic.woff2 | Bin 0 -> 72520 bytes .../jetbrainsmono/JetBrainsMono-Italic.woff2 | Bin 0 -> 72268 bytes .../jetbrainsmono/JetBrainsMono-Light.woff2 | Bin 0 -> 70416 bytes .../JetBrainsMono-LightItalic.woff2 | Bin 0 -> 73552 bytes .../jetbrainsmono/JetBrainsMono-Medium.woff2 | Bin 0 -> 70480 bytes .../JetBrainsMono-MediumItalic.woff2 | Bin 0 -> 73704 bytes .../jetbrainsmono/JetBrainsMono-Regular.woff2 | Bin 0 -> 69440 bytes .../JetBrainsMono-SemiBold.woff2 | Bin 0 -> 70820 bytes .../JetBrainsMono-SemiBoldItalic.woff2 | Bin 0 -> 74088 bytes .../jetbrainsmono/JetBrainsMono-Thin.woff2 | Bin 0 -> 68064 bytes .../JetBrainsMono-ThinItalic.woff2 | Bin 0 -> 71180 bytes docs/static/img/android-chrome-192x192.png | Bin 0 -> 4110 bytes docs/static/img/android-chrome-256x256.png | Bin 0 -> 5421 bytes docs/static/img/apple-touch-icon.png | Bin 0 -> 3744 bytes docs/static/img/banner.jpg | Bin 0 -> 140262 bytes docs/static/img/favicon copy.svg | 21 + docs/static/img/favicon-16x16.png | Bin 0 -> 632 bytes docs/static/img/favicon-32x32.png | Bin 0 -> 942 bytes docs/static/img/favicon-dark.svg | 15 + docs/static/img/favicon.svg | 21 + docs/static/img/hub.svg | 1 + docs/static/img/ico-chevron.svg | 3 + docs/static/img/ico-github.svg | 3 + docs/static/img/logo-bw-inverse.svg | 8 + docs/static/img/logo-bw.svg | 8 + docs/static/img/logo-sdk.svg | 10 + docs/static/img/logo.svg | 18 + docs/tailwind.config.js | 104 + docs/versions.json | 3 + docs/webpack.config.js | 11 + e2e.Dockerfile | 21 + genesis/mainnet/genesis-mainnet-1.json | 18327 ---- genesis/testnet/genesis-testnet-1.json | 90876 ---------------- go.mod | 246 +- go.sum | 2761 +- mlc_config.json | 10 + pkg/address/address.go | 22 + pkg/address/address_test.go | 43 + proto/buf.gen.gogo.yaml | 8 + proto/buf.lock | 38 + proto/buf.yaml | 31 + proto/gaia/globalfee/v1beta1/genesis.proto | 43 + proto/gaia/globalfee/v1beta1/query.proto | 27 + proto/gaia/metaprotocols/extensions.proto | 19 + proto/onomyprotocol/dao/v1/dao.proto | 69 - proto/onomyprotocol/dao/v1/genesis.proto | 20 - proto/onomyprotocol/dao/v1/params.proto | 33 - proto/onomyprotocol/dao/v1/query.proto | 45 - proto/scripts/protoc-swagger-gen.sh | 24 + proto/scripts/protocgen.sh | 17 + readme.md | 147 +- sims.mk | 70 + sonar-project.properties | 19 + tests/Cargo.toml | 17 - tests/README.md | 4 - tests/dockerfiles/.gitignore | 1 - tests/dockerfiles/chain_upgrade.dockerfile | 25 - .../dockerfile_resources/.gitignore | 3 - tests/e2e/address.go | 33 + tests/e2e/chain.go | 148 + tests/e2e/doc.go | 14 + tests/e2e/docker/hermes.Dockerfile | 9 + tests/e2e/e2e_bank_test.go | 218 + tests/e2e/e2e_bypassminfee_test.go | 190 + tests/e2e/e2e_distribution_test.go | 88 + tests/e2e/e2e_encode_test.go | 50 + tests/e2e/e2e_evidence_test.go | 53 + tests/e2e/e2e_exec_test.go | 981 + tests/e2e/e2e_feegrant_test.go | 105 + tests/e2e/e2e_globalfee_proposal_test.go | 123 + tests/e2e/e2e_globalfee_test.go | 335 + tests/e2e/e2e_gov_test.go | 231 + tests/e2e/e2e_ibc_test.go | 447 + tests/e2e/e2e_ics_test.go | 139 + tests/e2e/e2e_lsm_test.go | 221 + tests/e2e/e2e_query_exec_test.go | 47 + tests/e2e/e2e_rest_regression_test.go | 90 + tests/e2e/e2e_setup_test.go | 851 + tests/e2e/e2e_slashing_test.go | 23 + tests/e2e/e2e_staking_test.go | 143 + tests/e2e/e2e_test.go | 128 + tests/e2e/e2e_vesting_test.go | 336 + tests/e2e/genesis.go | 221 + tests/e2e/http_util.go | 40 + tests/e2e/io.go | 41 + tests/e2e/keys.go | 20 + tests/e2e/query.go | 316 + tests/e2e/scripts/hermes_bootstrap.sh | 152 + tests/e2e/util.go | 52 + tests/e2e/validator.go | 332 + tests/ics/interchain_security_test.go | 47 + tests/logs/.gitignore | 2 - tests/resources/.gitignore | 3 - tests/resources/keyring-test/.gitignore | 3 - tests/src/bin/auto_exec_i.rs | 22 - tests/src/bin/chain_upgrade.rs | 144 - tests/src/bin/clean.rs | 22 - tests/src/bin/ics_cdd.rs | 549 - tests/src/bin/onomyd_only.rs | 168 - tests/src/lib/common.rs | 56 - testutil/integration/onomy_chain.go | 296 - testutil/network/network.go | 85 - testutil/retry/retry_utils.go | 33 - testutil/simapp/simapp.go | 307 - third_party/proto/confio/proofs.proto | 234 - .../proto/cosmos/auth/v1beta1/auth.proto | 50 - .../proto/cosmos/auth/v1beta1/genesis.proto | 17 - .../proto/cosmos/auth/v1beta1/query.proto | 74 - .../proto/cosmos/authz/v1beta1/authz.proto | 27 - .../proto/cosmos/authz/v1beta1/event.proto | 25 - .../proto/cosmos/authz/v1beta1/genesis.proto | 24 - .../proto/cosmos/authz/v1beta1/query.proto | 35 - .../proto/cosmos/authz/v1beta1/tx.proto | 70 - .../proto/cosmos/bank/v1beta1/authz.proto | 19 - .../proto/cosmos/bank/v1beta1/bank.proto | 96 - .../proto/cosmos/bank/v1beta1/genesis.proto | 39 - .../proto/cosmos/bank/v1beta1/query.proto | 163 - .../proto/cosmos/bank/v1beta1/tx.proto | 42 - .../proto/cosmos/base/abci/v1beta1/abci.proto | 144 - .../proto/cosmos/base/kv/v1beta1/kv.proto | 17 - .../base/query/v1beta1/pagination.proto | 55 - .../base/reflection/v1beta1/reflection.proto | 44 - .../base/reflection/v2alpha1/reflection.proto | 218 - .../base/snapshots/v1beta1/snapshot.proto | 20 - .../base/store/v1beta1/commit_info.proto | 29 - .../cosmos/base/store/v1beta1/listening.proto | 16 - .../cosmos/base/store/v1beta1/snapshot.proto | 28 - .../base/tendermint/v1beta1/query.proto | 138 - .../proto/cosmos/base/v1beta1/coin.proto | 40 - .../capability/v1beta1/capability.proto | 30 - .../cosmos/capability/v1beta1/genesis.proto | 26 - .../proto/cosmos/crisis/v1beta1/genesis.proto | 15 - .../proto/cosmos/crisis/v1beta1/tx.proto | 25 - .../proto/cosmos/crypto/ed25519/keys.proto | 23 - .../proto/cosmos/crypto/multisig/keys.proto | 18 - .../crypto/multisig/v1beta1/multisig.proto | 25 - .../proto/cosmos/crypto/secp256k1/keys.proto | 22 - .../proto/cosmos/crypto/secp256r1/keys.proto | 23 - .../distribution/v1beta1/distribution.proto | 157 - .../cosmos/distribution/v1beta1/genesis.proto | 155 - .../cosmos/distribution/v1beta1/query.proto | 218 - .../cosmos/distribution/v1beta1/tx.proto | 79 - .../cosmos/evidence/v1beta1/evidence.proto | 21 - .../cosmos/evidence/v1beta1/genesis.proto | 12 - .../proto/cosmos/evidence/v1beta1/query.proto | 51 - .../proto/cosmos/evidence/v1beta1/tx.proto | 32 - .../cosmos/feegrant/v1beta1/feegrant.proto | 78 - .../cosmos/feegrant/v1beta1/genesis.proto | 13 - .../proto/cosmos/feegrant/v1beta1/query.proto | 55 - .../proto/cosmos/feegrant/v1beta1/tx.proto | 49 - .../cosmos/genutil/v1beta1/genesis.proto | 16 - .../proto/cosmos/gov/v1beta1/genesis.proto | 26 - .../proto/cosmos/gov/v1beta1/gov.proto | 200 - .../proto/cosmos/gov/v1beta1/query.proto | 190 - third_party/proto/cosmos/gov/v1beta1/tx.proto | 99 - .../proto/cosmos/mint/v1beta1/genesis.proto | 16 - .../proto/cosmos/mint/v1beta1/mint.proto | 53 - .../proto/cosmos/mint/v1beta1/query.proto | 57 - .../proto/cosmos/params/v1beta1/params.proto | 27 - .../proto/cosmos/params/v1beta1/query.proto | 32 - .../cosmos/slashing/v1beta1/genesis.proto | 50 - .../proto/cosmos/slashing/v1beta1/query.proto | 63 - .../cosmos/slashing/v1beta1/slashing.proto | 58 - .../proto/cosmos/slashing/v1beta1/tx.proto | 26 - .../proto/cosmos/staking/v1beta1/authz.proto | 47 - .../cosmos/staking/v1beta1/genesis.proto | 53 - .../proto/cosmos/staking/v1beta1/query.proto | 348 - .../cosmos/staking/v1beta1/staking.proto | 340 - .../proto/cosmos/staking/v1beta1/tx.proto | 126 - .../cosmos/tx/signing/v1beta1/signing.proto | 79 - .../proto/cosmos/tx/v1beta1/service.proto | 134 - third_party/proto/cosmos/tx/v1beta1/tx.proto | 183 - .../proto/cosmos/upgrade/v1beta1/query.proto | 104 - .../cosmos/upgrade/v1beta1/upgrade.proto | 78 - .../proto/cosmos/vesting/v1beta1/tx.proto | 31 - .../cosmos/vesting/v1beta1/vesting.proto | 85 - third_party/proto/cosmos_proto/cosmos.proto | 16 - third_party/proto/gogoproto/gogo.proto | 145 - .../proto/google/api/annotations.proto | 31 - third_party/proto/google/api/http.proto | 318 - third_party/proto/google/api/httpbody.proto | 78 - third_party/proto/google/protobuf/any.proto | 164 - .../proto/gravity/v1/attestation.proto | 56 - third_party/proto/gravity/v1/batch.proto | 37 - .../proto/gravity/v1/ethereum_signer.proto | 14 - third_party/proto/gravity/v1/genesis.proto | 181 - third_party/proto/gravity/v1/msgs.proto | 276 - third_party/proto/gravity/v1/pool.proto | 15 - third_party/proto/gravity/v1/query.proto | 242 - third_party/proto/gravity/v1/types.proto | 99 - third_party/proto/tendermint/abci/types.proto | 407 - .../proto/tendermint/crypto/keys.proto | 17 - .../proto/tendermint/crypto/proof.proto | 41 - .../proto/tendermint/libs/bits/types.proto | 9 - third_party/proto/tendermint/p2p/types.proto | 34 - .../proto/tendermint/types/block.proto | 15 - .../proto/tendermint/types/evidence.proto | 38 - .../proto/tendermint/types/params.proto | 80 - .../proto/tendermint/types/types.proto | 157 - .../proto/tendermint/types/validator.proto | 25 - .../proto/tendermint/version/types.proto | 24 - types/errors/errors.go | 37 + x/dao/abci.go | 43 - x/dao/abci_test.go | 690 - x/dao/client/cli/query.go | 79 - x/dao/client/cli/query_test.go | 102 - x/dao/client/cli/tx.go | 257 - x/dao/client/cli/tx_test.go | 169 - x/dao/client/proposal_handler.go | 29 - x/dao/genesis.go | 23 - x/dao/genesis_test.go | 83 - x/dao/handler.go | 42 - x/dao/keeper/delegation.go | 212 - x/dao/keeper/genesis.go | 30 - x/dao/keeper/grpc_query.go | 43 - x/dao/keeper/keeper.go | 70 - x/dao/keeper/mint.go | 27 - x/dao/keeper/params.go | 46 - x/dao/keeper/params_test.go | 40 - x/dao/keeper/proposal.go | 91 - x/dao/keeper/proposal_test.go | 567 - x/dao/keeper/reward.go | 33 - x/dao/keeper/treasury.go | 33 - x/dao/keeper/treasury_test.go | 53 - x/dao/keeper/voting.go | 32 - x/dao/module.go | 177 - x/dao/module_simulation.go | 50 - x/dao/spec/01_state.md | 18 - x/dao/spec/02_state_transitions.md | 55 - x/dao/spec/03_end_block.md | 11 - x/dao/spec/04_params.md | 14 - x/dao/spec/README.md | 31 - x/dao/types/codec.go | 33 - x/dao/types/dao.pb.go | 1339 - x/dao/types/errors.go | 12 - x/dao/types/expected_keepers.go | 62 - x/dao/types/genesis.go | 26 - x/dao/types/genesis.pb.go | 396 - x/dao/types/genesis_test.go | 136 - x/dao/types/keys.go | 18 - x/dao/types/params.go | 154 - x/dao/types/params.pb.go | 455 - x/dao/types/proposal.go | 225 - x/dao/types/proposal_test.go | 192 - x/dao/types/query.pb.go | 873 - x/dao/types/types.go | 2 - x/globalfee/README.md | 5 + x/globalfee/alias.go | 9 + x/globalfee/ante/antetest/fee_test.go | 798 + x/globalfee/ante/antetest/fee_test_setup.go | 121 + x/globalfee/ante/fee.go | 267 + x/globalfee/ante/fee_utils.go | 116 + x/globalfee/ante/fee_utils_test.go | 299 + x/globalfee/client/cli/query.go | 48 + x/globalfee/genesis_test.go | 164 + x/globalfee/keeper/migrations.go | 23 + x/globalfee/migrations/v2/migration.go | 38 + .../migrations/v2/v2_test/migration_test.go | 110 + x/globalfee/module.go | 137 + x/globalfee/querier.go | 52 + x/globalfee/types/genesis.go | 41 + x/globalfee/types/genesis.pb.go | 622 + x/globalfee/types/keys.go | 8 + x/globalfee/types/params.go | 158 + x/globalfee/types/params_test.go | 157 + x/globalfee/types/query.pb.go | 536 + x/{dao => globalfee}/types/query.pb.gw.go | 73 +- x/metaprotocols/README.md | 62 + x/metaprotocols/module.go | 94 + x/metaprotocols/types/codec.go | 19 + x/metaprotocols/types/extensions.pb.go | 430 + x/metaprotocols/types/keys.go | 5 + 818 files changed, 58089 insertions(+), 191820 deletions(-) create mode 100755 .build.sh create mode 100644 .changelog/config.toml create mode 100644 .changelog/epilogue.md create mode 100644 .changelog/unreleased/.gitkeep create mode 100644 .changelog/v15.0.0/api-breaking/2912-vote-spam.md create mode 100644 .changelog/v15.0.0/api-breaking/2967-bump-ics.md create mode 100644 .changelog/v15.0.0/api-breaking/2967-bump-sdk.md create mode 100644 .changelog/v15.0.0/bug-fixes/2912-vote-spam.md create mode 100644 .changelog/v15.0.0/bug-fixes/2967-bump-sdk.md create mode 100644 .changelog/v15.0.0/dependencies/2852-bump-comet.md create mode 100644 .changelog/v15.0.0/dependencies/2852-bump-ibc.md create mode 100644 .changelog/v15.0.0/dependencies/2852-bump-pfm.md create mode 100644 .changelog/v15.0.0/dependencies/2967-bump-ics.md create mode 100644 .changelog/v15.0.0/dependencies/2967-bump-sdk.md create mode 100644 .changelog/v15.0.0/features/2960-add-metaprotocols-support.md create mode 100644 .changelog/v15.0.0/state-breaking/2852-bump-comet.md create mode 100644 .changelog/v15.0.0/state-breaking/2852-bump-ibc.md create mode 100644 .changelog/v15.0.0/state-breaking/2852-bump-pfm.md create mode 100644 .changelog/v15.0.0/state-breaking/2855-migrate-min-commission-rate.md create mode 100644 .changelog/v15.0.0/state-breaking/2866-migrate-signing-infos.md create mode 100644 .changelog/v15.0.0/state-breaking/2891-migrate-vesting-funds.md create mode 100644 .changelog/v15.0.0/state-breaking/2912-vote-spam.md create mode 100644 .changelog/v15.0.0/state-breaking/2913-gov-spam.md create mode 100644 .changelog/v15.0.0/state-breaking/2960-add-metaprotocols-support.md create mode 100644 .changelog/v15.0.0/state-breaking/2967-bump-ics.md create mode 100644 .changelog/v15.0.0/state-breaking/2967-bump-sdk.md create mode 100644 .changelog/v15.0.0/summary.md create mode 100644 .changelog/v15.1.0/dependencies/2982-bump-pfm.md create mode 100644 .changelog/v15.1.0/features/2974-add-snapshot-commands.md create mode 100644 .changelog/v15.1.0/state-breaking/2982-bump-pfm.md create mode 100644 .changelog/v15.1.0/state-breaking/2993-migrate-escrow-accounts.md create mode 100644 .changelog/v15.1.0/summary.md create mode 100644 .changelog/v15.2.0/bug-fixes/3025-gov-metatdata-len.md create mode 100644 .changelog/v15.2.0/bug-fixes/3032-historic-tx-extensions.md create mode 100644 .changelog/v15.2.0/state-breaking/3025-gov-metatdata-len.md create mode 100644 .changelog/v15.2.0/state-breaking/3032-historic-tx-extensions.md create mode 100644 .changelog/v15.2.0/summary.md create mode 100644 .dockerignore create mode 100644 .gitattributes create mode 100644 .github/CODEOWNERS create mode 100644 .github/ISSUE_TEMPLATE/bug-report.md create mode 100644 .github/ISSUE_TEMPLATE/epic-template.md create mode 100644 .github/ISSUE_TEMPLATE/feature-request.md create mode 100644 .github/ISSUE_TEMPLATE/issue-template.md create mode 100644 .github/ISSUE_TEMPLATE/tech-debt.md create mode 100644 .github/ISSUE_TEMPLATE/upgrade-checklist.md create mode 100644 .github/PULL_REQUEST_TEMPLATE.md create mode 100644 .github/PULL_REQUEST_TEMPLATE/docs.md create mode 100644 .github/PULL_REQUEST_TEMPLATE/others.md create mode 100644 .github/PULL_REQUEST_TEMPLATE/production.md create mode 100644 .github/codecov.yml create mode 100644 .github/dependabot.yml create mode 100644 .github/stale.yml delete mode 100644 .github/workflows/automated-release.yml delete mode 100644 .github/workflows/ci.yml create mode 100644 .github/workflows/codeql-analysis.yml create mode 100644 .github/workflows/deploy-docs.yml delete mode 100644 .github/workflows/docker-devbase.yml create mode 100644 .github/workflows/docker-push.yml create mode 100644 .github/workflows/lint.yml create mode 100644 .github/workflows/md-link-checker.yml create mode 100644 .github/workflows/nightly-tests.yml create mode 100644 .github/workflows/release-sims.yml create mode 100644 .github/workflows/release.yml create mode 100644 .github/workflows/sim-label.yml create mode 100644 .github/workflows/sims.yml create mode 100644 .github/workflows/stale.yml create mode 100644 .github/workflows/test.yml create mode 100644 .gitpod.yml create mode 100644 .goreleaser.yml create mode 100644 .mergify.yml delete mode 100644 .rustfmt.toml create mode 100644 CHANGELOG.md create mode 100644 CONTRIBUTING.md delete mode 100644 Cargo.toml create mode 100644 Dockerfile create mode 100644 RELEASE_NOTES.md create mode 100644 RELEASE_PROCESS.md create mode 100644 SECURITY.md create mode 100644 STATE-COMPATIBILITY.md create mode 100644 UPGRADING.md create mode 100644 ante/ante.go create mode 100644 ante/gov_vote_ante.go create mode 100644 ante/gov_vote_ante_test.go create mode 100644 app/app_helpers.go create mode 100644 app/app_test.go create mode 100644 app/const.go create mode 100644 app/encoding.go create mode 100644 app/genesis_account.go create mode 100644 app/genesis_account_fuzz_test.go create mode 100644 app/helpers/test_helpers.go create mode 100644 app/keepers/keepers.go create mode 100644 app/keepers/keys.go create mode 100644 app/modules.go create mode 100644 app/params/amino.go create mode 100644 app/params/doc.go create mode 100644 app/params/encoding.go create mode 100644 app/params/params.go create mode 100644 app/params/proto.go create mode 100644 app/params/weights.go create mode 100644 app/sim/sim_config.go create mode 100644 app/sim/sim_state.go create mode 100644 app/sim/sim_utils.go create mode 100644 app/sim_bench_test.go create mode 100644 app/sim_test.go delete mode 100644 app/upgrades/readme.md create mode 100644 app/upgrades/types.go delete mode 100644 app/upgrades/v1.0.1/upgrade.go delete mode 100644 app/upgrades/v1.0.3.4/upgrade.go delete mode 100644 app/upgrades/v1.0.3.5/upgrade.go delete mode 100644 app/upgrades/v1.0.3/upgrade.go delete mode 100644 app/upgrades/v1.1.1/upgrade.go delete mode 100644 app/upgrades/v1.1.2/upgrade.go delete mode 100644 app/upgrades/v1.1.4/upgrade.go create mode 100644 app/upgrades/v10/constants.go create mode 100644 app/upgrades/v10/upgrades.go create mode 100644 app/upgrades/v11/constants.go create mode 100644 app/upgrades/v11/upgrades.go create mode 100644 app/upgrades/v12/constants.go create mode 100644 app/upgrades/v12/upgrades.go create mode 100644 app/upgrades/v13/constants.go create mode 100644 app/upgrades/v13/upgrades.go create mode 100644 app/upgrades/v14/constants.go create mode 100644 app/upgrades/v14/upgrades.go create mode 100644 app/upgrades/v15/constants.go create mode 100644 app/upgrades/v15/upgrades.go create mode 100644 app/upgrades/v15/upgrades_test.go create mode 100644 app/upgrades/v7/constants.go create mode 100644 app/upgrades/v7/upgrades.go create mode 100644 app/upgrades/v8/constants.go create mode 100644 app/upgrades/v8/upgrades.go create mode 100644 app/upgrades/v9/constants.go create mode 100644 app/upgrades/v9/upgrades.go delete mode 100644 buf.yaml create mode 100644 client/docs/config.json create mode 100644 client/docs/swagger-ui/favicon-16x16.png create mode 100644 client/docs/swagger-ui/favicon-32x32.png create mode 100644 client/docs/swagger-ui/index.html create mode 100644 client/docs/swagger-ui/oauth2-redirect.html create mode 100644 client/docs/swagger-ui/swagger-ui-bundle.js create mode 100644 client/docs/swagger-ui/swagger-ui-bundle.js.map create mode 100644 client/docs/swagger-ui/swagger-ui-es-bundle-core.js create mode 100644 client/docs/swagger-ui/swagger-ui-es-bundle-core.js.map create mode 100644 client/docs/swagger-ui/swagger-ui-es-bundle.js create mode 100644 client/docs/swagger-ui/swagger-ui-es-bundle.js.map create mode 100644 client/docs/swagger-ui/swagger-ui-standalone-preset.js create mode 100644 client/docs/swagger-ui/swagger-ui-standalone-preset.js.map create mode 100644 client/docs/swagger-ui/swagger-ui.css create mode 100644 client/docs/swagger-ui/swagger-ui.css.map create mode 100644 client/docs/swagger-ui/swagger-ui.js create mode 100644 client/docs/swagger-ui/swagger-ui.js.map create mode 100644 client/docs/swagger-ui/swagger.yaml create mode 100644 cmd/onomyd/cmd/bech32_convert.go delete mode 100644 cmd/onomyd/cmd/cmd.go create mode 100644 cmd/onomyd/cmd/genaccounts.go create mode 100644 cmd/onomyd/cmd/root.go create mode 100644 cmd/onomyd/cmd/root_test.go create mode 100644 cmd/onomyd/cmd/testnet.go delete mode 100644 cmd/onomyd/main_test.go create mode 100644 contrib/Dockerfile.test create mode 100644 contrib/denom.json create mode 100644 contrib/devtools/Makefile create mode 100644 contrib/generate_release_note/main.go create mode 100755 contrib/get_node.sh create mode 100644 contrib/githooks/README.md create mode 100755 contrib/githooks/pre-commit create mode 100644 contrib/githooks/precommit create mode 100755 contrib/scripts/local-gaia.sh create mode 100755 contrib/scripts/test_localnet_liveness.sh create mode 100755 contrib/scripts/upgrade_test_scripts/run_gaia.sh create mode 100755 contrib/scripts/upgrade_test_scripts/run_upgrade_commands.sh create mode 100755 contrib/scripts/upgrade_test_scripts/test_upgrade.sh create mode 100644 contrib/scripts/upgrade_test_scripts/v10/run_gaia_v9.sh create mode 100644 contrib/scripts/upgrade_test_scripts/v10/run_upgrade_commands_v10.sh create mode 100755 contrib/scripts/upgrade_test_scripts/v11/run_gaia_v10.sh create mode 100755 contrib/scripts/upgrade_test_scripts/v11/run_upgrade_commands_v11.sh create mode 100755 contrib/scripts/upgrade_test_scripts/v11/test_migration_v11.sh create mode 100755 contrib/scripts/upgrade_test_scripts/v12/run_gaia_v11.sh create mode 100755 contrib/scripts/upgrade_test_scripts/v12/run_upgrade_commands_v12.sh create mode 100755 contrib/scripts/upgrade_test_scripts/v8/run-gaia-v7.sh create mode 100755 contrib/scripts/upgrade_test_scripts/v8/run-upgrade-commands-v8-rho.sh create mode 100755 contrib/scripts/upgrade_test_scripts/v9/run-gaia-v8.sh create mode 100755 contrib/scripts/upgrade_test_scripts/v9/run-upgrade-commands.sh create mode 100755 contrib/single-node.sh create mode 100644 contrib/statesync.bash create mode 100644 contrib/testnets/Makefile create mode 100644 contrib/testnets/README.md create mode 100755 contrib/testnets/add-cluster.sh create mode 100755 contrib/testnets/add-datadog.sh create mode 100755 contrib/testnets/del-cluster.sh create mode 100755 contrib/testnets/del-datadog.sh create mode 100755 contrib/testnets/list.sh create mode 100755 contrib/testnets/new-testnet.sh create mode 100644 contrib/testnets/remote/ansible/.gitignore create mode 100644 contrib/testnets/remote/ansible/add-lcd.yml create mode 100644 contrib/testnets/remote/ansible/clear-config.yml create mode 100644 contrib/testnets/remote/ansible/extract-config.yml create mode 100644 contrib/testnets/remote/ansible/increase-openfiles.yml create mode 100644 contrib/testnets/remote/ansible/install-datadog-agent.yml create mode 100644 contrib/testnets/remote/ansible/inventory/COPYING create mode 100644 contrib/testnets/remote/ansible/inventory/digital_ocean.ini create mode 100755 contrib/testnets/remote/ansible/inventory/digital_ocean.py create mode 100644 contrib/testnets/remote/ansible/inventory/ec2.ini create mode 100755 contrib/testnets/remote/ansible/inventory/ec2.py create mode 100644 contrib/testnets/remote/ansible/logzio.yml create mode 100644 contrib/testnets/remote/ansible/remove-datadog-agent.yml create mode 100644 contrib/testnets/remote/ansible/roles/add-lcd/defaults/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/add-lcd/handlers/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/add-lcd/tasks/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/add-lcd/templates/gaiacli.service.j2 create mode 100644 contrib/testnets/remote/ansible/roles/clear-config/tasks/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/extract-config/defaults/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/extract-config/tasks/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/increase-openfiles/files/50-fs.conf create mode 100644 contrib/testnets/remote/ansible/roles/increase-openfiles/files/91-nofiles.conf create mode 100644 contrib/testnets/remote/ansible/roles/increase-openfiles/files/limits.conf create mode 100644 contrib/testnets/remote/ansible/roles/increase-openfiles/handlers/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/increase-openfiles/tasks/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/install-datadog-agent/handlers/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/install-datadog-agent/tasks/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/logzio/files/journalbeat.service create mode 100644 contrib/testnets/remote/ansible/roles/logzio/handlers/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/logzio/tasks/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/logzio/templates/journalbeat.yml.j2 create mode 100644 contrib/testnets/remote/ansible/roles/remove-datadog-agent/tasks/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/set-debug/files/sysconfig/gaiacli create mode 100644 contrib/testnets/remote/ansible/roles/set-debug/files/sysconfig/gaiad create mode 100644 contrib/testnets/remote/ansible/roles/set-debug/files/sysctl.d/10-procdump create mode 100644 contrib/testnets/remote/ansible/roles/set-debug/handlers/main.yaml create mode 100644 contrib/testnets/remote/ansible/roles/set-debug/tasks/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/setup-fullnodes/defaults/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/setup-fullnodes/files/gaiad.service create mode 100644 contrib/testnets/remote/ansible/roles/setup-fullnodes/handlers/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/setup-fullnodes/tasks/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/setup-journald/handlers/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/setup-journald/tasks/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/setup-validators/defaults/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/setup-validators/files/gaiad.service create mode 100644 contrib/testnets/remote/ansible/roles/setup-validators/handlers/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/setup-validators/tasks/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/start/tasks/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/stop/tasks/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/update-datadog-agent/files/conf.d/http_check.d/conf.yaml create mode 100644 contrib/testnets/remote/ansible/roles/update-datadog-agent/files/conf.d/network.d/conf.yaml create mode 100644 contrib/testnets/remote/ansible/roles/update-datadog-agent/files/conf.d/process.d/conf.yaml create mode 100644 contrib/testnets/remote/ansible/roles/update-datadog-agent/files/conf.d/prometheus.d/conf.yaml create mode 100644 contrib/testnets/remote/ansible/roles/update-datadog-agent/handlers/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/update-datadog-agent/tasks/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/update-datadog-agent/templates/datadog.yaml.j2 create mode 100644 contrib/testnets/remote/ansible/roles/upgrade-gaiad/handlers/main.yml create mode 100644 contrib/testnets/remote/ansible/roles/upgrade-gaiad/tasks/main.yml create mode 100644 contrib/testnets/remote/ansible/set-debug.yml create mode 100644 contrib/testnets/remote/ansible/setup-fullnodes.yml create mode 100644 contrib/testnets/remote/ansible/setup-journald.yml create mode 100644 contrib/testnets/remote/ansible/setup-validators.yml create mode 100644 contrib/testnets/remote/ansible/start.yml create mode 100644 contrib/testnets/remote/ansible/status.yml create mode 100644 contrib/testnets/remote/ansible/stop.yml create mode 100644 contrib/testnets/remote/ansible/update-datadog-agent.yml create mode 100644 contrib/testnets/remote/ansible/upgrade-gaia.yml create mode 100644 contrib/testnets/remote/ansible/upgrade-gaiad.yml create mode 100644 contrib/testnets/remote/terraform-app/.gitignore create mode 100644 contrib/testnets/remote/terraform-app/files/terraform.sh create mode 100644 contrib/testnets/remote/terraform-app/infra/attachment.tf create mode 100644 contrib/testnets/remote/terraform-app/infra/instance.tf create mode 100644 contrib/testnets/remote/terraform-app/infra/lb.tf create mode 100644 contrib/testnets/remote/terraform-app/infra/lcd.tf create mode 100644 contrib/testnets/remote/terraform-app/infra/outputs.tf create mode 100644 contrib/testnets/remote/terraform-app/infra/variables.tf create mode 100644 contrib/testnets/remote/terraform-app/infra/vpc.tf create mode 100644 contrib/testnets/remote/terraform-app/main.tf create mode 100644 contrib/testnets/remote/terraform-aws/.gitignore create mode 100644 contrib/testnets/remote/terraform-aws/files/terraform.sh create mode 100644 contrib/testnets/remote/terraform-aws/main.tf create mode 100644 contrib/testnets/remote/terraform-aws/nodes/main.tf create mode 100644 contrib/testnets/remote/terraform-aws/nodes/outputs.tf create mode 100644 contrib/testnets/remote/terraform-aws/nodes/variables.tf create mode 100644 contrib/testnets/remote/terraform-do/.gitignore create mode 100644 contrib/testnets/remote/terraform-do/Makefile create mode 100644 contrib/testnets/remote/terraform-do/README.md create mode 100644 contrib/testnets/remote/terraform-do/cluster/main.tf create mode 100644 contrib/testnets/remote/terraform-do/cluster/outputs.tf create mode 100644 contrib/testnets/remote/terraform-do/cluster/variables.tf create mode 100644 contrib/testnets/remote/terraform-do/files/terraform.sh create mode 100644 contrib/testnets/remote/terraform-do/main.tf create mode 100644 contrib/testnets/test_platform/README.md create mode 100644 contrib/testnets/test_platform/gaiad_config_manager.py create mode 100644 contrib/testnets/test_platform/templates/3924406.cosmoshub-3.json.tar.gz create mode 100644 contrib/testnets/test_platform/templates/app.toml create mode 100644 contrib/testnets/test_platform/templates/config.toml create mode 100644 contrib/testnets/test_platform/templates/replacement_defaults.txt create mode 100644 contrib/testnets/test_platform/templates/validator_replacement_example.json create mode 100755 contrib/testnets/upgrade-gaiad.sh create mode 100644 contrib/testnets/using-cleveldb.sh delete mode 100755 deploy/scripts/add-service.sh delete mode 100755 deploy/scripts/allow-cors.sh delete mode 100755 deploy/scripts/bin-mainnet-from-sources.sh delete mode 100755 deploy/scripts/bin-mainnet.sh delete mode 100755 deploy/scripts/bin-testnet-from-sources.sh delete mode 100755 deploy/scripts/bin-testnet.sh delete mode 100755 deploy/scripts/expose-metrics.sh delete mode 100755 deploy/scripts/init-mainnet-full-node.sh delete mode 100644 deploy/scripts/init-mainnet-statesync.sh delete mode 100755 deploy/scripts/init-testnet-full-node.sh delete mode 100755 deploy/scripts/init-testnet-statesync.sh delete mode 100755 deploy/scripts/set-snapshots.sh delete mode 100755 deploy/scripts/start-cosmovisor-onomyd.sh delete mode 100755 deploy/scripts/start-onomyd.sh delete mode 100755 deploy/scripts/stop-cosmovisor-onomyd.sh delete mode 100755 deploy/scripts/stop-onomyd.sh delete mode 100644 dev/base-image/Dockerfile delete mode 100644 dev/base-image/readme.md delete mode 100644 dev/openapi/config.json delete mode 100644 dev/openapi/swagger_legacy.yaml delete mode 100755 dev/scripts/protoc-swagger-gen.sh delete mode 100755 dev/scripts/protocgen.sh delete mode 100644 dev/tools/devtools.Dockerfile delete mode 100644 dev/tools/entrypoint.sh create mode 100644 docs/.gitignore create mode 100644 docs/DOCS_README.md create mode 100644 docs/README copy.md create mode 100644 docs/README.md create mode 100644 docs/babel.config.js create mode 100755 docs/build.sh delete mode 100644 docs/chain/bonding-curve.md delete mode 100644 docs/chain/full.md delete mode 100644 docs/chain/genesis-binaries.md delete mode 100644 docs/chain/installation.md delete mode 100644 docs/chain/mainnet.md delete mode 100644 docs/chain/set-ulimit-rhel8.md delete mode 100644 docs/chain/testnet.md delete mode 100644 docs/chain/validator.md delete mode 100644 docs/docs.go create mode 100644 docs/docs/architecture/PROCESS.md create mode 100644 docs/docs/architecture/README.md create mode 100644 docs/docs/architecture/_category_.json create mode 100644 docs/docs/architecture/adr/PROCESS.md create mode 100644 docs/docs/architecture/adr/README.md create mode 100644 docs/docs/architecture/adr/_category_.json create mode 100644 docs/docs/architecture/adr/adr-001-interchain-accounts.md create mode 100644 docs/docs/architecture/adr/adr-002-globalfee.md create mode 100644 docs/docs/architecture/templates/_category_.json create mode 100644 docs/docs/architecture/templates/adr-template.md create mode 100644 docs/docs/client/_category_.json create mode 100644 docs/docs/client/gaia/globalfee/v1beta1/query.swagger.json create mode 100644 docs/docs/delegators/README.md create mode 100644 docs/docs/delegators/_category_.json create mode 100644 docs/docs/delegators/delegator-faq.md create mode 100644 docs/docs/delegators/delegator-guide-cli.md create mode 100644 docs/docs/delegators/delegator-security.md create mode 100644 docs/docs/getting-started/README.md create mode 100644 docs/docs/getting-started/_category_.json create mode 100644 docs/docs/getting-started/installation.md create mode 100644 docs/docs/getting-started/quickstart.mdx create mode 100644 docs/docs/getting-started/system-requirements.md create mode 100644 docs/docs/getting-started/what-is-gaia.md create mode 100644 docs/docs/governance/LICENSE create mode 100644 docs/docs/governance/README.md create mode 100644 docs/docs/governance/_category_.json create mode 100644 docs/docs/governance/best-practices.md create mode 100644 docs/docs/governance/current-parameters.js create mode 100644 docs/docs/governance/formatting.md create mode 100644 docs/docs/governance/process.md create mode 100644 docs/docs/governance/proposal-types/README.md create mode 100644 docs/docs/governance/proposal-types/_category_.json create mode 100644 docs/docs/governance/proposal-types/community-pool-spend.md create mode 100644 docs/docs/governance/proposal-types/community-pool-spend/_category_.json create mode 100644 docs/docs/governance/proposal-types/community-pool-spend/proposal.json create mode 100644 docs/docs/governance/proposal-types/param-change.md create mode 100644 docs/docs/governance/proposal-types/params-change/Auth.mdx create mode 100644 docs/docs/governance/proposal-types/params-change/Crisis.mdx create mode 100644 docs/docs/governance/proposal-types/params-change/Distribution.mdx create mode 100644 docs/docs/governance/proposal-types/params-change/Governance.mdx create mode 100644 docs/docs/governance/proposal-types/params-change/Mint.mdx create mode 100644 docs/docs/governance/proposal-types/params-change/Slashing.mdx create mode 100644 docs/docs/governance/proposal-types/params-change/Staking.mdx create mode 100644 docs/docs/governance/proposal-types/params-change/param-index.mdx create mode 100644 docs/docs/governance/proposal-types/text-prop.md create mode 100644 docs/docs/governance/proposals/2020-10-blocks-per-year/README.md create mode 100644 docs/docs/governance/proposals/2020-10-blocks-per-year/proposal.json create mode 100644 docs/docs/governance/proposals/2020-11-inflation-rate-change/README.md create mode 100644 docs/docs/governance/proposals/2020-11-inflation-rate-change/proposal.json create mode 100644 docs/docs/governance/proposals/2021-01-atom2021_marketing/README.md create mode 100644 docs/docs/governance/proposals/2021-01-atom2021_marketing/proposal.json create mode 100644 docs/docs/governance/proposals/2021-01-delay-stargate-upgrade/README.md create mode 100644 docs/docs/governance/proposals/2021-01-delay-stargate-upgrade/proposal.json create mode 100644 docs/docs/governance/proposals/2021-01-stargate-upgrade-b/README.md create mode 100644 docs/docs/governance/proposals/2021-01-stargate-upgrade-b/proposal.json create mode 100644 docs/docs/governance/proposals/2021-01-stargate-upgrade/README.md create mode 100644 docs/docs/governance/proposals/2021-01-stargate-upgrade/proposal.json create mode 100644 docs/docs/governance/proposals/2021-03-enable-ibc-transfers/README.md create mode 100644 docs/docs/governance/proposals/2021-03-enable-ibc-transfers/proposal.json create mode 100644 docs/docs/governance/proposals/2021-04-advancing-ethermint/README.md create mode 100644 docs/docs/governance/proposals/2021-04-advancing-ethermint/proposal.json create mode 100644 docs/docs/governance/proposals/2021-04-lower-deposit-requirement/README.md create mode 100644 docs/docs/governance/proposals/2021-04-lower-deposit-requirement/proposal.json create mode 100644 docs/docs/governance/proposals/2021-04-prop34-continuation/README.md create mode 100644 docs/docs/governance/proposals/2021-04-prop34-continuation/proposal.json create mode 100644 docs/docs/governance/proposals/2021-05-gravity-bridge-deployment/README.md create mode 100644 docs/docs/governance/proposals/2021-05-gravity-bridge-deployment/proposal.json create mode 100644 docs/docs/governance/proposals/2021-07-atom-liquidity-incentives/README.md create mode 100644 docs/docs/governance/proposals/2021-07-atom-liquidity-incentives/proposal.json create mode 100644 docs/docs/governance/proposals/2021-09-hub-ibc-router/README.md create mode 100644 docs/docs/governance/proposals/2021-09-hub-ibc-router/proposal.json create mode 100644 docs/docs/governance/proposals/README.md create mode 100644 docs/docs/governance/proposals/_category_.json create mode 100644 docs/docs/governance/proposals/previous-proposals/README.md create mode 100644 docs/docs/governance/proposals/proposal-template.json create mode 100644 docs/docs/governance/proposals/proposal-template.md create mode 100644 docs/docs/governance/scripts/_category_.json create mode 100755 docs/docs/governance/scripts/extract_onchain_params.py create mode 100644 docs/docs/governance/state-of-cosmos-governance-2021.md create mode 100644 docs/docs/governance/submitting.md create mode 100644 docs/docs/guidelines/_category_.json create mode 100644 docs/docs/guidelines/code-guidelines.md create mode 100644 docs/docs/hub-tutorials/README.md create mode 100644 docs/docs/hub-tutorials/_category_.json create mode 100644 docs/docs/hub-tutorials/gaiad.mdx create mode 100644 docs/docs/hub-tutorials/join-mainnet.md create mode 100644 docs/docs/hub-tutorials/join-testnet.md create mode 100644 docs/docs/hub-tutorials/live-upgrade-tutorial.md create mode 100644 docs/docs/hub-tutorials/upgrade-node.md create mode 100755 docs/docs/images/cosmos-hub-image.jpg create mode 100644 docs/docs/images/ledger-tuto-dev-mode.png create mode 100644 docs/docs/images/ledger-tuto-lunie-address.png create mode 100644 docs/docs/images/ledger-tuto-lunie-option.png create mode 100644 docs/docs/images/ledger-tuto-manager.png create mode 100644 docs/docs/images/ledger-tuto-search.png create mode 100644 docs/docs/images/verify-tx.png create mode 100644 docs/docs/index.mdx create mode 100644 docs/docs/interchain-security/README.md create mode 100644 docs/docs/interchain-security/_category_.json create mode 100644 docs/docs/metaprotocols/README.md create mode 100644 docs/docs/metaprotocols/_category_.json create mode 100644 docs/docs/migration/README.md create mode 100644 docs/docs/migration/_category_.json create mode 100644 docs/docs/migration/cosmoshub-2/_category_.json create mode 100644 docs/docs/migration/cosmoshub-2/cosmoshub-2.md create mode 100644 docs/docs/migration/cosmoshub-3/_category_.json create mode 100644 docs/docs/migration/cosmoshub-3/cosmoshub-3.md create mode 100644 docs/docs/migration/cosmoshub-4-v10-upgrade.md create mode 100644 docs/docs/migration/cosmoshub-4-v11-upgrade.md create mode 100644 docs/docs/migration/cosmoshub-4-v12-upgrade.md create mode 100644 docs/docs/migration/cosmoshub-4-v13-upgrade.md create mode 100644 docs/docs/migration/cosmoshub-4-v14-upgrade.md create mode 100644 docs/docs/migration/cosmoshub-4-v5-delta-upgrade.md create mode 100644 docs/docs/migration/cosmoshub-4-v6-vega-upgrade.md create mode 100644 docs/docs/migration/cosmoshub-4-v7-Theta-upgrade.md create mode 100644 docs/docs/migration/cosmoshub-4-v8-Rho-upgrade.md create mode 100644 docs/docs/migration/cosmoshub-4-v9-Lambda-upgrade.md create mode 100644 docs/docs/modules/README.md create mode 100644 docs/docs/modules/_category_.json create mode 100644 docs/docs/modules/globalfee.md create mode 100644 docs/docs/proto/_category_.json create mode 100644 docs/docs/resources/README.md create mode 100644 docs/docs/resources/_category_.json create mode 100644 docs/docs/resources/archives.md create mode 100644 docs/docs/resources/genesis.md create mode 100644 docs/docs/resources/hd-wallets.md create mode 100644 docs/docs/resources/ledger.md create mode 100644 docs/docs/resources/reproducible-builds.md create mode 100644 docs/docs/resources/service-providers.md create mode 100644 docs/docs/roadmap/README.md create mode 100644 docs/docs/roadmap/_category_.json create mode 100644 docs/docs/roadmap/cosmos-hub-roadmap-2.0.md create mode 100644 docs/docs/roadmap/launch/_category_.json create mode 100644 docs/docs/roadmap/launch/blog-1-en.md create mode 100644 docs/docs/roadmap/launch/blog-2-en.md create mode 100644 docs/docs/roadmap/previous-releases.md create mode 100644 docs/docs/validators/README.md create mode 100644 docs/docs/validators/_category_.json create mode 100644 docs/docs/validators/kms/_category_.json create mode 100644 docs/docs/validators/kms/kms.md create mode 100644 docs/docs/validators/kms/kms_ledger.md create mode 100644 docs/docs/validators/kms/ledger_1.jpg create mode 100644 docs/docs/validators/kms/ledger_2.jpg create mode 100644 docs/docs/validators/overview.mdx create mode 100644 docs/docs/validators/security.md create mode 100644 docs/docs/validators/validator-faq.md create mode 100644 docs/docs/validators/validator-setup.md create mode 100644 docs/docusaurus.config.js delete mode 100644 docs/openapi/openapi.yml create mode 100644 docs/package-lock.json create mode 100644 docs/package.json delete mode 100644 docs/proto/proto-docs.md delete mode 100644 docs/proto/protodoc-markdown.tmpl create mode 100644 docs/sidebars.js create mode 100644 docs/src/css/base.css create mode 100644 docs/src/css/custom.css create mode 100644 docs/src/css/fonts.css create mode 100644 docs/src/js/KeyValueTable.js create mode 100644 docs/src/js/Var.js create mode 100644 docs/static/.nojekyll create mode 100644 docs/static/fonts/inter/Inter-Black.woff create mode 100644 docs/static/fonts/inter/Inter-Black.woff2 create mode 100644 docs/static/fonts/inter/Inter-BlackItalic.woff create mode 100644 docs/static/fonts/inter/Inter-BlackItalic.woff2 create mode 100644 docs/static/fonts/inter/Inter-Bold.woff create mode 100644 docs/static/fonts/inter/Inter-Bold.woff2 create mode 100644 docs/static/fonts/inter/Inter-BoldItalic.woff create mode 100644 docs/static/fonts/inter/Inter-BoldItalic.woff2 create mode 100644 docs/static/fonts/inter/Inter-ExtraBold.woff create mode 100644 docs/static/fonts/inter/Inter-ExtraBold.woff2 create mode 100644 docs/static/fonts/inter/Inter-ExtraBoldItalic.woff create mode 100644 docs/static/fonts/inter/Inter-ExtraBoldItalic.woff2 create mode 100644 docs/static/fonts/inter/Inter-ExtraLight.woff create mode 100644 docs/static/fonts/inter/Inter-ExtraLight.woff2 create mode 100644 docs/static/fonts/inter/Inter-ExtraLightItalic.woff create mode 100644 docs/static/fonts/inter/Inter-ExtraLightItalic.woff2 create mode 100644 docs/static/fonts/inter/Inter-Italic.woff create mode 100644 docs/static/fonts/inter/Inter-Italic.woff2 create mode 100644 docs/static/fonts/inter/Inter-Light.woff create mode 100644 docs/static/fonts/inter/Inter-Light.woff2 create mode 100644 docs/static/fonts/inter/Inter-LightItalic.woff create mode 100644 docs/static/fonts/inter/Inter-LightItalic.woff2 create mode 100644 docs/static/fonts/inter/Inter-Medium.woff create mode 100644 docs/static/fonts/inter/Inter-Medium.woff2 create mode 100644 docs/static/fonts/inter/Inter-MediumItalic.woff create mode 100644 docs/static/fonts/inter/Inter-MediumItalic.woff2 create mode 100644 docs/static/fonts/inter/Inter-Regular.woff create mode 100644 docs/static/fonts/inter/Inter-Regular.woff2 create mode 100644 docs/static/fonts/inter/Inter-SemiBold.woff create mode 100644 docs/static/fonts/inter/Inter-SemiBold.woff2 create mode 100644 docs/static/fonts/inter/Inter-SemiBoldItalic.woff create mode 100644 docs/static/fonts/inter/Inter-SemiBoldItalic.woff2 create mode 100644 docs/static/fonts/inter/Inter-Thin.woff create mode 100644 docs/static/fonts/inter/Inter-Thin.woff2 create mode 100644 docs/static/fonts/inter/Inter-ThinItalic.woff create mode 100644 docs/static/fonts/inter/Inter-ThinItalic.woff2 create mode 100644 docs/static/fonts/inter/Inter-italic.var.woff2 create mode 100644 docs/static/fonts/inter/Inter-roman.var.woff2 create mode 100644 docs/static/fonts/intervar/Inter.var.woff2 create mode 100644 docs/static/fonts/jetbrainsmono/JetBrainsMono-Bold.woff2 create mode 100644 docs/static/fonts/jetbrainsmono/JetBrainsMono-BoldItalic.woff2 create mode 100644 docs/static/fonts/jetbrainsmono/JetBrainsMono-ExtraBold.woff2 create mode 100644 docs/static/fonts/jetbrainsmono/JetBrainsMono-ExtraBoldItalic.woff2 create mode 100644 docs/static/fonts/jetbrainsmono/JetBrainsMono-ExtraLight.woff2 create mode 100644 docs/static/fonts/jetbrainsmono/JetBrainsMono-ExtraLightItalic.woff2 create mode 100644 docs/static/fonts/jetbrainsmono/JetBrainsMono-Italic.woff2 create mode 100644 docs/static/fonts/jetbrainsmono/JetBrainsMono-Light.woff2 create mode 100644 docs/static/fonts/jetbrainsmono/JetBrainsMono-LightItalic.woff2 create mode 100644 docs/static/fonts/jetbrainsmono/JetBrainsMono-Medium.woff2 create mode 100644 docs/static/fonts/jetbrainsmono/JetBrainsMono-MediumItalic.woff2 create mode 100644 docs/static/fonts/jetbrainsmono/JetBrainsMono-Regular.woff2 create mode 100644 docs/static/fonts/jetbrainsmono/JetBrainsMono-SemiBold.woff2 create mode 100644 docs/static/fonts/jetbrainsmono/JetBrainsMono-SemiBoldItalic.woff2 create mode 100644 docs/static/fonts/jetbrainsmono/JetBrainsMono-Thin.woff2 create mode 100644 docs/static/fonts/jetbrainsmono/JetBrainsMono-ThinItalic.woff2 create mode 100644 docs/static/img/android-chrome-192x192.png create mode 100644 docs/static/img/android-chrome-256x256.png create mode 100644 docs/static/img/apple-touch-icon.png create mode 100755 docs/static/img/banner.jpg create mode 100644 docs/static/img/favicon copy.svg create mode 100644 docs/static/img/favicon-16x16.png create mode 100644 docs/static/img/favicon-32x32.png create mode 100644 docs/static/img/favicon-dark.svg create mode 100644 docs/static/img/favicon.svg create mode 100644 docs/static/img/hub.svg create mode 100644 docs/static/img/ico-chevron.svg create mode 100644 docs/static/img/ico-github.svg create mode 100644 docs/static/img/logo-bw-inverse.svg create mode 100644 docs/static/img/logo-bw.svg create mode 100644 docs/static/img/logo-sdk.svg create mode 100644 docs/static/img/logo.svg create mode 100644 docs/tailwind.config.js create mode 100644 docs/versions.json create mode 100644 docs/webpack.config.js create mode 100644 e2e.Dockerfile delete mode 100644 genesis/mainnet/genesis-mainnet-1.json delete mode 100644 genesis/testnet/genesis-testnet-1.json create mode 100644 mlc_config.json create mode 100644 pkg/address/address.go create mode 100644 pkg/address/address_test.go create mode 100644 proto/buf.gen.gogo.yaml create mode 100644 proto/buf.lock create mode 100644 proto/buf.yaml create mode 100644 proto/gaia/globalfee/v1beta1/genesis.proto create mode 100644 proto/gaia/globalfee/v1beta1/query.proto create mode 100644 proto/gaia/metaprotocols/extensions.proto delete mode 100644 proto/onomyprotocol/dao/v1/dao.proto delete mode 100644 proto/onomyprotocol/dao/v1/genesis.proto delete mode 100644 proto/onomyprotocol/dao/v1/params.proto delete mode 100644 proto/onomyprotocol/dao/v1/query.proto create mode 100755 proto/scripts/protoc-swagger-gen.sh create mode 100755 proto/scripts/protocgen.sh create mode 100644 sims.mk create mode 100644 sonar-project.properties delete mode 100644 tests/Cargo.toml delete mode 100644 tests/README.md delete mode 100644 tests/dockerfiles/.gitignore delete mode 100644 tests/dockerfiles/chain_upgrade.dockerfile delete mode 100644 tests/dockerfiles/dockerfile_resources/.gitignore create mode 100644 tests/e2e/address.go create mode 100644 tests/e2e/chain.go create mode 100644 tests/e2e/doc.go create mode 100644 tests/e2e/docker/hermes.Dockerfile create mode 100644 tests/e2e/e2e_bank_test.go create mode 100644 tests/e2e/e2e_bypassminfee_test.go create mode 100644 tests/e2e/e2e_distribution_test.go create mode 100644 tests/e2e/e2e_encode_test.go create mode 100644 tests/e2e/e2e_evidence_test.go create mode 100644 tests/e2e/e2e_exec_test.go create mode 100644 tests/e2e/e2e_feegrant_test.go create mode 100644 tests/e2e/e2e_globalfee_proposal_test.go create mode 100644 tests/e2e/e2e_globalfee_test.go create mode 100644 tests/e2e/e2e_gov_test.go create mode 100644 tests/e2e/e2e_ibc_test.go create mode 100644 tests/e2e/e2e_ics_test.go create mode 100644 tests/e2e/e2e_lsm_test.go create mode 100644 tests/e2e/e2e_query_exec_test.go create mode 100644 tests/e2e/e2e_rest_regression_test.go create mode 100644 tests/e2e/e2e_setup_test.go create mode 100644 tests/e2e/e2e_slashing_test.go create mode 100644 tests/e2e/e2e_staking_test.go create mode 100644 tests/e2e/e2e_test.go create mode 100644 tests/e2e/e2e_vesting_test.go create mode 100644 tests/e2e/genesis.go create mode 100644 tests/e2e/http_util.go create mode 100644 tests/e2e/io.go create mode 100644 tests/e2e/keys.go create mode 100644 tests/e2e/query.go create mode 100755 tests/e2e/scripts/hermes_bootstrap.sh create mode 100644 tests/e2e/util.go create mode 100644 tests/e2e/validator.go create mode 100644 tests/ics/interchain_security_test.go delete mode 100644 tests/logs/.gitignore delete mode 100644 tests/resources/.gitignore delete mode 100644 tests/resources/keyring-test/.gitignore delete mode 100644 tests/src/bin/auto_exec_i.rs delete mode 100644 tests/src/bin/chain_upgrade.rs delete mode 100644 tests/src/bin/clean.rs delete mode 100644 tests/src/bin/ics_cdd.rs delete mode 100644 tests/src/bin/onomyd_only.rs delete mode 100644 tests/src/lib/common.rs delete mode 100644 testutil/integration/onomy_chain.go delete mode 100644 testutil/network/network.go delete mode 100644 testutil/retry/retry_utils.go delete mode 100644 testutil/simapp/simapp.go delete mode 100644 third_party/proto/confio/proofs.proto delete mode 100644 third_party/proto/cosmos/auth/v1beta1/auth.proto delete mode 100644 third_party/proto/cosmos/auth/v1beta1/genesis.proto delete mode 100644 third_party/proto/cosmos/auth/v1beta1/query.proto delete mode 100644 third_party/proto/cosmos/authz/v1beta1/authz.proto delete mode 100644 third_party/proto/cosmos/authz/v1beta1/event.proto delete mode 100644 third_party/proto/cosmos/authz/v1beta1/genesis.proto delete mode 100644 third_party/proto/cosmos/authz/v1beta1/query.proto delete mode 100644 third_party/proto/cosmos/authz/v1beta1/tx.proto delete mode 100644 third_party/proto/cosmos/bank/v1beta1/authz.proto delete mode 100644 third_party/proto/cosmos/bank/v1beta1/bank.proto delete mode 100644 third_party/proto/cosmos/bank/v1beta1/genesis.proto delete mode 100644 third_party/proto/cosmos/bank/v1beta1/query.proto delete mode 100644 third_party/proto/cosmos/bank/v1beta1/tx.proto delete mode 100644 third_party/proto/cosmos/base/abci/v1beta1/abci.proto delete mode 100644 third_party/proto/cosmos/base/kv/v1beta1/kv.proto delete mode 100644 third_party/proto/cosmos/base/query/v1beta1/pagination.proto delete mode 100644 third_party/proto/cosmos/base/reflection/v1beta1/reflection.proto delete mode 100644 third_party/proto/cosmos/base/reflection/v2alpha1/reflection.proto delete mode 100644 third_party/proto/cosmos/base/snapshots/v1beta1/snapshot.proto delete mode 100644 third_party/proto/cosmos/base/store/v1beta1/commit_info.proto delete mode 100644 third_party/proto/cosmos/base/store/v1beta1/listening.proto delete mode 100644 third_party/proto/cosmos/base/store/v1beta1/snapshot.proto delete mode 100644 third_party/proto/cosmos/base/tendermint/v1beta1/query.proto delete mode 100644 third_party/proto/cosmos/base/v1beta1/coin.proto delete mode 100644 third_party/proto/cosmos/capability/v1beta1/capability.proto delete mode 100644 third_party/proto/cosmos/capability/v1beta1/genesis.proto delete mode 100644 third_party/proto/cosmos/crisis/v1beta1/genesis.proto delete mode 100644 third_party/proto/cosmos/crisis/v1beta1/tx.proto delete mode 100644 third_party/proto/cosmos/crypto/ed25519/keys.proto delete mode 100644 third_party/proto/cosmos/crypto/multisig/keys.proto delete mode 100644 third_party/proto/cosmos/crypto/multisig/v1beta1/multisig.proto delete mode 100644 third_party/proto/cosmos/crypto/secp256k1/keys.proto delete mode 100644 third_party/proto/cosmos/crypto/secp256r1/keys.proto delete mode 100644 third_party/proto/cosmos/distribution/v1beta1/distribution.proto delete mode 100644 third_party/proto/cosmos/distribution/v1beta1/genesis.proto delete mode 100644 third_party/proto/cosmos/distribution/v1beta1/query.proto delete mode 100644 third_party/proto/cosmos/distribution/v1beta1/tx.proto delete mode 100644 third_party/proto/cosmos/evidence/v1beta1/evidence.proto delete mode 100644 third_party/proto/cosmos/evidence/v1beta1/genesis.proto delete mode 100644 third_party/proto/cosmos/evidence/v1beta1/query.proto delete mode 100644 third_party/proto/cosmos/evidence/v1beta1/tx.proto delete mode 100644 third_party/proto/cosmos/feegrant/v1beta1/feegrant.proto delete mode 100644 third_party/proto/cosmos/feegrant/v1beta1/genesis.proto delete mode 100644 third_party/proto/cosmos/feegrant/v1beta1/query.proto delete mode 100644 third_party/proto/cosmos/feegrant/v1beta1/tx.proto delete mode 100644 third_party/proto/cosmos/genutil/v1beta1/genesis.proto delete mode 100644 third_party/proto/cosmos/gov/v1beta1/genesis.proto delete mode 100644 third_party/proto/cosmos/gov/v1beta1/gov.proto delete mode 100644 third_party/proto/cosmos/gov/v1beta1/query.proto delete mode 100644 third_party/proto/cosmos/gov/v1beta1/tx.proto delete mode 100644 third_party/proto/cosmos/mint/v1beta1/genesis.proto delete mode 100644 third_party/proto/cosmos/mint/v1beta1/mint.proto delete mode 100644 third_party/proto/cosmos/mint/v1beta1/query.proto delete mode 100644 third_party/proto/cosmos/params/v1beta1/params.proto delete mode 100644 third_party/proto/cosmos/params/v1beta1/query.proto delete mode 100644 third_party/proto/cosmos/slashing/v1beta1/genesis.proto delete mode 100644 third_party/proto/cosmos/slashing/v1beta1/query.proto delete mode 100644 third_party/proto/cosmos/slashing/v1beta1/slashing.proto delete mode 100644 third_party/proto/cosmos/slashing/v1beta1/tx.proto delete mode 100644 third_party/proto/cosmos/staking/v1beta1/authz.proto delete mode 100644 third_party/proto/cosmos/staking/v1beta1/genesis.proto delete mode 100644 third_party/proto/cosmos/staking/v1beta1/query.proto delete mode 100644 third_party/proto/cosmos/staking/v1beta1/staking.proto delete mode 100644 third_party/proto/cosmos/staking/v1beta1/tx.proto delete mode 100644 third_party/proto/cosmos/tx/signing/v1beta1/signing.proto delete mode 100644 third_party/proto/cosmos/tx/v1beta1/service.proto delete mode 100644 third_party/proto/cosmos/tx/v1beta1/tx.proto delete mode 100644 third_party/proto/cosmos/upgrade/v1beta1/query.proto delete mode 100644 third_party/proto/cosmos/upgrade/v1beta1/upgrade.proto delete mode 100644 third_party/proto/cosmos/vesting/v1beta1/tx.proto delete mode 100644 third_party/proto/cosmos/vesting/v1beta1/vesting.proto delete mode 100644 third_party/proto/cosmos_proto/cosmos.proto delete mode 100644 third_party/proto/gogoproto/gogo.proto delete mode 100644 third_party/proto/google/api/annotations.proto delete mode 100644 third_party/proto/google/api/http.proto delete mode 100644 third_party/proto/google/api/httpbody.proto delete mode 100644 third_party/proto/google/protobuf/any.proto delete mode 100644 third_party/proto/gravity/v1/attestation.proto delete mode 100644 third_party/proto/gravity/v1/batch.proto delete mode 100644 third_party/proto/gravity/v1/ethereum_signer.proto delete mode 100644 third_party/proto/gravity/v1/genesis.proto delete mode 100644 third_party/proto/gravity/v1/msgs.proto delete mode 100644 third_party/proto/gravity/v1/pool.proto delete mode 100644 third_party/proto/gravity/v1/query.proto delete mode 100644 third_party/proto/gravity/v1/types.proto delete mode 100644 third_party/proto/tendermint/abci/types.proto delete mode 100644 third_party/proto/tendermint/crypto/keys.proto delete mode 100644 third_party/proto/tendermint/crypto/proof.proto delete mode 100644 third_party/proto/tendermint/libs/bits/types.proto delete mode 100644 third_party/proto/tendermint/p2p/types.proto delete mode 100644 third_party/proto/tendermint/types/block.proto delete mode 100644 third_party/proto/tendermint/types/evidence.proto delete mode 100644 third_party/proto/tendermint/types/params.proto delete mode 100644 third_party/proto/tendermint/types/types.proto delete mode 100644 third_party/proto/tendermint/types/validator.proto delete mode 100644 third_party/proto/tendermint/version/types.proto create mode 100644 types/errors/errors.go delete mode 100644 x/dao/abci.go delete mode 100644 x/dao/abci_test.go delete mode 100644 x/dao/client/cli/query.go delete mode 100644 x/dao/client/cli/query_test.go delete mode 100644 x/dao/client/cli/tx.go delete mode 100644 x/dao/client/cli/tx_test.go delete mode 100644 x/dao/client/proposal_handler.go delete mode 100644 x/dao/genesis.go delete mode 100644 x/dao/genesis_test.go delete mode 100644 x/dao/handler.go delete mode 100644 x/dao/keeper/delegation.go delete mode 100644 x/dao/keeper/genesis.go delete mode 100644 x/dao/keeper/grpc_query.go delete mode 100644 x/dao/keeper/keeper.go delete mode 100644 x/dao/keeper/mint.go delete mode 100644 x/dao/keeper/params.go delete mode 100644 x/dao/keeper/params_test.go delete mode 100644 x/dao/keeper/proposal.go delete mode 100644 x/dao/keeper/proposal_test.go delete mode 100644 x/dao/keeper/reward.go delete mode 100644 x/dao/keeper/treasury.go delete mode 100644 x/dao/keeper/treasury_test.go delete mode 100644 x/dao/keeper/voting.go delete mode 100644 x/dao/module.go delete mode 100644 x/dao/module_simulation.go delete mode 100644 x/dao/spec/01_state.md delete mode 100644 x/dao/spec/02_state_transitions.md delete mode 100644 x/dao/spec/03_end_block.md delete mode 100644 x/dao/spec/04_params.md delete mode 100644 x/dao/spec/README.md delete mode 100644 x/dao/types/codec.go delete mode 100644 x/dao/types/dao.pb.go delete mode 100644 x/dao/types/errors.go delete mode 100644 x/dao/types/expected_keepers.go delete mode 100644 x/dao/types/genesis.go delete mode 100644 x/dao/types/genesis.pb.go delete mode 100644 x/dao/types/genesis_test.go delete mode 100644 x/dao/types/keys.go delete mode 100644 x/dao/types/params.go delete mode 100644 x/dao/types/params.pb.go delete mode 100644 x/dao/types/proposal.go delete mode 100644 x/dao/types/proposal_test.go delete mode 100644 x/dao/types/query.pb.go delete mode 100644 x/dao/types/types.go create mode 100644 x/globalfee/README.md create mode 100644 x/globalfee/alias.go create mode 100644 x/globalfee/ante/antetest/fee_test.go create mode 100644 x/globalfee/ante/antetest/fee_test_setup.go create mode 100644 x/globalfee/ante/fee.go create mode 100644 x/globalfee/ante/fee_utils.go create mode 100644 x/globalfee/ante/fee_utils_test.go create mode 100644 x/globalfee/client/cli/query.go create mode 100644 x/globalfee/genesis_test.go create mode 100644 x/globalfee/keeper/migrations.go create mode 100644 x/globalfee/migrations/v2/migration.go create mode 100644 x/globalfee/migrations/v2/v2_test/migration_test.go create mode 100644 x/globalfee/module.go create mode 100644 x/globalfee/querier.go create mode 100644 x/globalfee/types/genesis.go create mode 100644 x/globalfee/types/genesis.pb.go create mode 100644 x/globalfee/types/keys.go create mode 100644 x/globalfee/types/params.go create mode 100644 x/globalfee/types/params_test.go create mode 100644 x/globalfee/types/query.pb.go rename x/{dao => globalfee}/types/query.pb.gw.go (64%) create mode 100644 x/metaprotocols/README.md create mode 100644 x/metaprotocols/module.go create mode 100644 x/metaprotocols/types/codec.go create mode 100644 x/metaprotocols/types/extensions.pb.go create mode 100644 x/metaprotocols/types/keys.go diff --git a/.build.sh b/.build.sh new file mode 100755 index 00000000..1f7a1488 --- /dev/null +++ b/.build.sh @@ -0,0 +1,44 @@ +#!/bin/bash + +set -ue + +# Expect the following envvars to be set: +# - APP +# - VERSION +# - COMMIT +# - TARGET_OS +# - LEDGER_ENABLED +# - DEBUG + +# Source builder's functions library +. /usr/local/share/tendermint/buildlib.sh + +# These variables are now available +# - BASEDIR +# - OUTDIR + +# Build for each os-architecture pair +for platform in ${TARGET_PLATFORMS} ; do + # This function sets GOOS, GOARCH, and OS_FILE_EXT environment variables + # according to the build target platform. OS_FILE_EXT is empty in all + # cases except when the target platform is 'windows'. + setup_build_env_for_platform "${platform}" + + make clean + echo Building for $(go env GOOS)/$(go env GOARCH) >&2 + GOROOT_FINAL="$(go env GOROOT)" \ + make build \ + LDFLAGS=-buildid=${VERSION} \ + VERSION=${VERSION} \ + COMMIT=${COMMIT} \ + LEDGER_ENABLED=${LEDGER_ENABLED} + mv ./build/${APP}${OS_FILE_EXT} ${OUTDIR}/${APP}-${VERSION}-$(go env GOOS)-$(go env GOARCH)${OS_FILE_EXT} + + # This function restore the build environment variables to their + # original state. + restore_build_env +done + +# Generate and display build report. +generate_build_report +cat ${OUTDIR}/build_report diff --git a/.changelog/config.toml b/.changelog/config.toml new file mode 100644 index 00000000..b0b05b14 --- /dev/null +++ b/.changelog/config.toml @@ -0,0 +1,20 @@ +project_url = "https://github.com/onomyprotocol/onomy-rebuild" + +# Settings related to components/sub-modules. Only relevant if you make use of +# components/sub-modules. +[components] + +# The title to use for the section of entries not relating to a specific +# component. +general_entries_title = "General" + +# The number of spaces to inject before each component-related entry. +entry_indent = 2 + + # The components themselves. Each component has a name (used when rendered + # to Markdown) and a path relative to the project folder (i.e. relative to + # the parent of the `.changelog` folder). + [components.all] + globalfee = { name = "GlobalFee", path = "x/globalfee" } + tests = { name = "Tests", path = "tests" } + docs = { name = "Documentation", path = "docs" } \ No newline at end of file diff --git a/.changelog/epilogue.md b/.changelog/epilogue.md new file mode 100644 index 00000000..f61a8d60 --- /dev/null +++ b/.changelog/epilogue.md @@ -0,0 +1,3 @@ +## Previous Versions + +[CHANGELOG of previous versions](https://github.com/onomyprotocol/onomy-rebuild/blob/main/CHANGELOG.md) \ No newline at end of file diff --git a/.changelog/unreleased/.gitkeep b/.changelog/unreleased/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/.changelog/v15.0.0/api-breaking/2912-vote-spam.md b/.changelog/v15.0.0/api-breaking/2912-vote-spam.md new file mode 100644 index 00000000..425f7ede --- /dev/null +++ b/.changelog/v15.0.0/api-breaking/2912-vote-spam.md @@ -0,0 +1,2 @@ +- Reject `MsgVote` messages from accounts with less than 1 atom staked. + ([\#2912](https://github.com/onomyprotocol/onomy-rebuild/pull/2912)) \ No newline at end of file diff --git a/.changelog/v15.0.0/api-breaking/2967-bump-ics.md b/.changelog/v15.0.0/api-breaking/2967-bump-ics.md new file mode 100644 index 00000000..5f93f180 --- /dev/null +++ b/.changelog/v15.0.0/api-breaking/2967-bump-ics.md @@ -0,0 +1,4 @@ +- The consumer CCV genesis state obtained from the provider chain needs to be + transformed to be compatible with older versions of consumer chains + (see [ICS docs](https://cosmos.github.io/interchain-security/consumer-development/consumer-genesis-transformation)). + ([\#2967](https://github.com/onomyprotocol/onomy-rebuild/pull/2967)) diff --git a/.changelog/v15.0.0/api-breaking/2967-bump-sdk.md b/.changelog/v15.0.0/api-breaking/2967-bump-sdk.md new file mode 100644 index 00000000..e9b7c3c3 --- /dev/null +++ b/.changelog/v15.0.0/api-breaking/2967-bump-sdk.md @@ -0,0 +1,8 @@ +- Bump [cosmos-sdk](https://github.com/cosmos/cosmos-sdk) to + [v0.47.10-ics-lsm](https://github.com/cosmos/cosmos-sdk/tree/v0.47.10-ics-lsm). + As compared to [v0.47.10](https://github.com/cosmos/cosmos-sdk/tree/v0.47.10), + this special branch of cosmos-sdk has the following API-breaking changes: + ([\#2967](https://github.com/onomyprotocol/onomy-rebuild/pull/2967)) + - Limit the accepted deposit coins for a proposal to the minimum proposal deposit denoms (e.g., `uatom` for Cosmos Hub). ([sdk-#19302](https://github.com/cosmos/cosmos-sdk/pull/19302)) + - Add denom check to reject denoms outside of those listed in `MinDeposit`. A new `MinDepositRatio` param is added (with a default value of `0.01`) and now deposits are required to be at least `MinDepositRatio*MinDeposit` to be accepted. ([sdk-#19312](https://github.com/cosmos/cosmos-sdk/pull/19312)) + - Disable the `DenomOwners` query. ([sdk-#19266](https://github.com/cosmos/cosmos-sdk/pull/19266)) \ No newline at end of file diff --git a/.changelog/v15.0.0/bug-fixes/2912-vote-spam.md b/.changelog/v15.0.0/bug-fixes/2912-vote-spam.md new file mode 100644 index 00000000..96e9e053 --- /dev/null +++ b/.changelog/v15.0.0/bug-fixes/2912-vote-spam.md @@ -0,0 +1,2 @@ +- Add ante handler that only allows `MsgVote` messages from accounts with at least + 1 atom staked. ([\#2912](https://github.com/onomyprotocol/onomy-rebuild/pull/2912)) \ No newline at end of file diff --git a/.changelog/v15.0.0/bug-fixes/2967-bump-sdk.md b/.changelog/v15.0.0/bug-fixes/2967-bump-sdk.md new file mode 100644 index 00000000..ab8cac3a --- /dev/null +++ b/.changelog/v15.0.0/bug-fixes/2967-bump-sdk.md @@ -0,0 +1,12 @@ +- Bump [cosmos-sdk](https://github.com/cosmos/cosmos-sdk) to + [v0.47.10-ics-lsm](https://github.com/cosmos/cosmos-sdk/tree/v0.47.10-ics-lsm). + This special branch of cosmos-sdk backports a series of fixes for issues found + during the [Oak Security audit of SDK 0.47](https://github.com/oak-security/audit-reports/blob/master/Cosmos%20SDK/2024-01-23%20Audit%20Report%20-%20Cosmos%20SDK%20v1.0.pdf). + ([\#2967](https://github.com/onomyprotocol/onomy-rebuild/pull/2967)) + - Backport [sdk-#18146](https://github.com/cosmos/cosmos-sdk/pull/18146): Add denom check to reject denoms outside of those listed in `MinDeposit`. A new `MinDepositRatio` param is added (with a default value of `0.01`) and now deposits are required to be at least `MinDepositRatio*MinDeposit` to be accepted. ([sdk-#19312](https://github.com/cosmos/cosmos-sdk/pull/19312)) + - Partially backport [sdk-#18047](https://github.com/cosmos/cosmos-sdk/pull/18047): Add a limit of 200 grants pruned per `EndBlock` in the feegrant module. ([sdk-#19314](https://github.com/cosmos/cosmos-sdk/pull/19314)) + - Partially backport [skd-#18737](https://github.com/cosmos/cosmos-sdk/pull/18737): Add a limit of 200 grants pruned per `BeginBlock` in the authz module. ([sdk-#19315](https://github.com/cosmos/cosmos-sdk/pull/19315)) + - Backport [sdk-#18173](https://github.com/cosmos/cosmos-sdk/pull/18173): Gov Hooks now returns error and are "blocking" if they fail. Expect for `AfterProposalFailedMinDeposit` and `AfterProposalVotingPeriodEnded` that will log the error and continue. ([sdk-#19305](https://github.com/cosmos/cosmos-sdk/pull/19305)) + - Backport [sdk-#18189](https://github.com/cosmos/cosmos-sdk/pull/18189): Limit the accepted deposit coins for a proposal to the minimum proposal deposit denoms. ([sdk-#19302](https://github.com/cosmos/cosmos-sdk/pull/19302)) + - Backport [sdk-#18214](https://github.com/cosmos/cosmos-sdk/pull/18214) and [sdk-#17352](https://github.com/cosmos/cosmos-sdk/pull/17352): Ensure that modifying the argument to `NewUIntFromBigInt` and `NewIntFromBigInt` doesn't mutate the returned value. ([sdk-#19293](https://github.com/cosmos/cosmos-sdk/pull/19293)) + \ No newline at end of file diff --git a/.changelog/v15.0.0/dependencies/2852-bump-comet.md b/.changelog/v15.0.0/dependencies/2852-bump-comet.md new file mode 100644 index 00000000..ab2cd6d1 --- /dev/null +++ b/.changelog/v15.0.0/dependencies/2852-bump-comet.md @@ -0,0 +1,3 @@ +- Bump [CometBFT](https://github.com/cometbft/cometbft) + to [v0.37.4](https://github.com/cometbft/cometbft/releases/tag/v0.37.4) + ([\#2852](https://github.com/onomyprotocol/onomy-rebuild/pull/2852)) \ No newline at end of file diff --git a/.changelog/v15.0.0/dependencies/2852-bump-ibc.md b/.changelog/v15.0.0/dependencies/2852-bump-ibc.md new file mode 100644 index 00000000..7060d1d0 --- /dev/null +++ b/.changelog/v15.0.0/dependencies/2852-bump-ibc.md @@ -0,0 +1,3 @@ +- Bump [ibc-go](https://github.com/cosmos/ibc-go) to + [v7.3.1](https://github.com/cosmos/ibc-go/releases/tag/v7.3.1) + ([\#2852](https://github.com/onomyprotocol/onomy-rebuild/pull/2852)) \ No newline at end of file diff --git a/.changelog/v15.0.0/dependencies/2852-bump-pfm.md b/.changelog/v15.0.0/dependencies/2852-bump-pfm.md new file mode 100644 index 00000000..468f2dea --- /dev/null +++ b/.changelog/v15.0.0/dependencies/2852-bump-pfm.md @@ -0,0 +1,3 @@ +- Bump [PFM](https://github.com/cosmos/ibc-apps/tree/main/middleware) + to [v7.1.2](https://github.com/cosmos/ibc-apps/releases/tag/middleware%2Fpacket-forward-middleware%2Fv7.1.2) + ([\#2852](https://github.com/onomyprotocol/onomy-rebuild/pull/2852)) \ No newline at end of file diff --git a/.changelog/v15.0.0/dependencies/2967-bump-ics.md b/.changelog/v15.0.0/dependencies/2967-bump-ics.md new file mode 100644 index 00000000..43929ae6 --- /dev/null +++ b/.changelog/v15.0.0/dependencies/2967-bump-ics.md @@ -0,0 +1,3 @@ +- Bump [ICS](https://github.com/cosmos/interchain-security) to + [v3.3.3-lsm](https://github.com/cosmos/interchain-security/releases/tag/v3.3.3-lsm) + ([\#2967](https://github.com/onomyprotocol/onomy-rebuild/pull/2967)) \ No newline at end of file diff --git a/.changelog/v15.0.0/dependencies/2967-bump-sdk.md b/.changelog/v15.0.0/dependencies/2967-bump-sdk.md new file mode 100644 index 00000000..d6d625dc --- /dev/null +++ b/.changelog/v15.0.0/dependencies/2967-bump-sdk.md @@ -0,0 +1,4 @@ +- Bump [cosmos-sdk](https://github.com/cosmos/cosmos-sdk) to + [v0.47.10-ics-lsm](https://github.com/cosmos/cosmos-sdk/tree/v0.47.10-ics-lsm). + This is a special cosmos-sdk branch with support for both ICS and LSM. + ([\#2967](https://github.com/onomyprotocol/onomy-rebuild/pull/2967)) \ No newline at end of file diff --git a/.changelog/v15.0.0/features/2960-add-metaprotocols-support.md b/.changelog/v15.0.0/features/2960-add-metaprotocols-support.md new file mode 100644 index 00000000..e182796e --- /dev/null +++ b/.changelog/v15.0.0/features/2960-add-metaprotocols-support.md @@ -0,0 +1,2 @@ +- Add support for metaprotocols using Tx extension options. + ([\#2960](https://github.com/onomyprotocol/onomy-rebuild/pull/2960)) \ No newline at end of file diff --git a/.changelog/v15.0.0/state-breaking/2852-bump-comet.md b/.changelog/v15.0.0/state-breaking/2852-bump-comet.md new file mode 100644 index 00000000..ab2cd6d1 --- /dev/null +++ b/.changelog/v15.0.0/state-breaking/2852-bump-comet.md @@ -0,0 +1,3 @@ +- Bump [CometBFT](https://github.com/cometbft/cometbft) + to [v0.37.4](https://github.com/cometbft/cometbft/releases/tag/v0.37.4) + ([\#2852](https://github.com/onomyprotocol/onomy-rebuild/pull/2852)) \ No newline at end of file diff --git a/.changelog/v15.0.0/state-breaking/2852-bump-ibc.md b/.changelog/v15.0.0/state-breaking/2852-bump-ibc.md new file mode 100644 index 00000000..7060d1d0 --- /dev/null +++ b/.changelog/v15.0.0/state-breaking/2852-bump-ibc.md @@ -0,0 +1,3 @@ +- Bump [ibc-go](https://github.com/cosmos/ibc-go) to + [v7.3.1](https://github.com/cosmos/ibc-go/releases/tag/v7.3.1) + ([\#2852](https://github.com/onomyprotocol/onomy-rebuild/pull/2852)) \ No newline at end of file diff --git a/.changelog/v15.0.0/state-breaking/2852-bump-pfm.md b/.changelog/v15.0.0/state-breaking/2852-bump-pfm.md new file mode 100644 index 00000000..468f2dea --- /dev/null +++ b/.changelog/v15.0.0/state-breaking/2852-bump-pfm.md @@ -0,0 +1,3 @@ +- Bump [PFM](https://github.com/cosmos/ibc-apps/tree/main/middleware) + to [v7.1.2](https://github.com/cosmos/ibc-apps/releases/tag/middleware%2Fpacket-forward-middleware%2Fv7.1.2) + ([\#2852](https://github.com/onomyprotocol/onomy-rebuild/pull/2852)) \ No newline at end of file diff --git a/.changelog/v15.0.0/state-breaking/2855-migrate-min-commission-rate.md b/.changelog/v15.0.0/state-breaking/2855-migrate-min-commission-rate.md new file mode 100644 index 00000000..3ccae084 --- /dev/null +++ b/.changelog/v15.0.0/state-breaking/2855-migrate-min-commission-rate.md @@ -0,0 +1,7 @@ +- Set min commission rate staking parameter to `5%` + ([prop 826](https://www.mintscan.io/cosmos/proposals/826)) + and update the commission rate for all validators that have a commission + rate less than `5%`. ([\#2855](https://github.com/onomyprotocol/onomy-rebuild/pull/2855)) + + + diff --git a/.changelog/v15.0.0/state-breaking/2866-migrate-signing-infos.md b/.changelog/v15.0.0/state-breaking/2866-migrate-signing-infos.md new file mode 100644 index 00000000..7376f8cf --- /dev/null +++ b/.changelog/v15.0.0/state-breaking/2866-migrate-signing-infos.md @@ -0,0 +1,5 @@ +- Migrate the signing infos of validators for which the consensus address is missing. +([\#2886](https://github.com/onomyprotocol/onomy-rebuild/pull/2886)) + + + diff --git a/.changelog/v15.0.0/state-breaking/2891-migrate-vesting-funds.md b/.changelog/v15.0.0/state-breaking/2891-migrate-vesting-funds.md new file mode 100644 index 00000000..8f45a5a3 --- /dev/null +++ b/.changelog/v15.0.0/state-breaking/2891-migrate-vesting-funds.md @@ -0,0 +1,3 @@ +- Migrate vesting funds from "cosmos145hytrc49m0hn6fphp8d5h4xspwkawcuzmx498" + to community pool according to signal prop [860](https://www.mintscan.io/cosmos/proposals/860). + ([\#2891](https://github.com/onomyprotocol/onomy-rebuild/pull/2891)) \ No newline at end of file diff --git a/.changelog/v15.0.0/state-breaking/2912-vote-spam.md b/.changelog/v15.0.0/state-breaking/2912-vote-spam.md new file mode 100644 index 00000000..96e9e053 --- /dev/null +++ b/.changelog/v15.0.0/state-breaking/2912-vote-spam.md @@ -0,0 +1,2 @@ +- Add ante handler that only allows `MsgVote` messages from accounts with at least + 1 atom staked. ([\#2912](https://github.com/onomyprotocol/onomy-rebuild/pull/2912)) \ No newline at end of file diff --git a/.changelog/v15.0.0/state-breaking/2913-gov-spam.md b/.changelog/v15.0.0/state-breaking/2913-gov-spam.md new file mode 100644 index 00000000..d8feffb7 --- /dev/null +++ b/.changelog/v15.0.0/state-breaking/2913-gov-spam.md @@ -0,0 +1,3 @@ +- Remove `GovPreventSpamDecorator` and initialize the `MinInitialDepositRatio` gov + param to `10%`. + ([\#2913](https://github.com/onomyprotocol/onomy-rebuild/pull/2913)) \ No newline at end of file diff --git a/.changelog/v15.0.0/state-breaking/2960-add-metaprotocols-support.md b/.changelog/v15.0.0/state-breaking/2960-add-metaprotocols-support.md new file mode 100644 index 00000000..e182796e --- /dev/null +++ b/.changelog/v15.0.0/state-breaking/2960-add-metaprotocols-support.md @@ -0,0 +1,2 @@ +- Add support for metaprotocols using Tx extension options. + ([\#2960](https://github.com/onomyprotocol/onomy-rebuild/pull/2960)) \ No newline at end of file diff --git a/.changelog/v15.0.0/state-breaking/2967-bump-ics.md b/.changelog/v15.0.0/state-breaking/2967-bump-ics.md new file mode 100644 index 00000000..43929ae6 --- /dev/null +++ b/.changelog/v15.0.0/state-breaking/2967-bump-ics.md @@ -0,0 +1,3 @@ +- Bump [ICS](https://github.com/cosmos/interchain-security) to + [v3.3.3-lsm](https://github.com/cosmos/interchain-security/releases/tag/v3.3.3-lsm) + ([\#2967](https://github.com/onomyprotocol/onomy-rebuild/pull/2967)) \ No newline at end of file diff --git a/.changelog/v15.0.0/state-breaking/2967-bump-sdk.md b/.changelog/v15.0.0/state-breaking/2967-bump-sdk.md new file mode 100644 index 00000000..8e2e6357 --- /dev/null +++ b/.changelog/v15.0.0/state-breaking/2967-bump-sdk.md @@ -0,0 +1,5 @@ +- Bump [cosmos-sdk](https://github.com/cosmos/cosmos-sdk) to + [v0.47.10-ics-lsm](https://github.com/cosmos/cosmos-sdk/tree/v0.47.10-ics-lsm). + This is a special cosmos-sdk branch with support for both ICS and LSM. + ([\#2967](https://github.com/onomyprotocol/onomy-rebuild/pull/2967)) + - Skip running `addDenomReverseIndex` in `bank/v3` migration as it is prohibitively expensive to run on the Cosmos Hub. ([sdk-#19266](https://github.com/cosmos/cosmos-sdk/pull/19266)) \ No newline at end of file diff --git a/.changelog/v15.0.0/summary.md b/.changelog/v15.0.0/summary.md new file mode 100644 index 00000000..259d75b1 --- /dev/null +++ b/.changelog/v15.0.0/summary.md @@ -0,0 +1 @@ +*February 20, 2024* diff --git a/.changelog/v15.1.0/dependencies/2982-bump-pfm.md b/.changelog/v15.1.0/dependencies/2982-bump-pfm.md new file mode 100644 index 00000000..e036bcb5 --- /dev/null +++ b/.changelog/v15.1.0/dependencies/2982-bump-pfm.md @@ -0,0 +1,2 @@ +- Bump [PFM](https://github.com/cosmos/ibc-apps/tree/main/middleware) to `v7.1.3-0.20240228213828-cce7f56d000b`. + ([\#2982](https://github.com/onomyprotocol/onomy-rebuild/pull/2982)) \ No newline at end of file diff --git a/.changelog/v15.1.0/features/2974-add-snapshot-commands.md b/.changelog/v15.1.0/features/2974-add-snapshot-commands.md new file mode 100644 index 00000000..d129deb2 --- /dev/null +++ b/.changelog/v15.1.0/features/2974-add-snapshot-commands.md @@ -0,0 +1 @@ +- Add onomyd snapshots command set ([\#2974](https://github.com/onomyprotocol/onomy-rebuild/pull/2974)) diff --git a/.changelog/v15.1.0/state-breaking/2982-bump-pfm.md b/.changelog/v15.1.0/state-breaking/2982-bump-pfm.md new file mode 100644 index 00000000..e036bcb5 --- /dev/null +++ b/.changelog/v15.1.0/state-breaking/2982-bump-pfm.md @@ -0,0 +1,2 @@ +- Bump [PFM](https://github.com/cosmos/ibc-apps/tree/main/middleware) to `v7.1.3-0.20240228213828-cce7f56d000b`. + ([\#2982](https://github.com/onomyprotocol/onomy-rebuild/pull/2982)) \ No newline at end of file diff --git a/.changelog/v15.1.0/state-breaking/2993-migrate-escrow-accounts.md b/.changelog/v15.1.0/state-breaking/2993-migrate-escrow-accounts.md new file mode 100644 index 00000000..63d7e4fe --- /dev/null +++ b/.changelog/v15.1.0/state-breaking/2993-migrate-escrow-accounts.md @@ -0,0 +1,3 @@ +- Mint and transfer missing assets in escrow accounts + to reach parity with counterparty chain supply. + ([\#2993](https://github.com/onomyprotocol/onomy-rebuild/pull/2993)) \ No newline at end of file diff --git a/.changelog/v15.1.0/summary.md b/.changelog/v15.1.0/summary.md new file mode 100644 index 00000000..aa56eae6 --- /dev/null +++ b/.changelog/v15.1.0/summary.md @@ -0,0 +1 @@ +*March 15, 2024* diff --git a/.changelog/v15.2.0/bug-fixes/3025-gov-metatdata-len.md b/.changelog/v15.2.0/bug-fixes/3025-gov-metatdata-len.md new file mode 100644 index 00000000..66fcfa66 --- /dev/null +++ b/.changelog/v15.2.0/bug-fixes/3025-gov-metatdata-len.md @@ -0,0 +1 @@ +- Increase x/gov metadata fields length to 10200 ([\#3025](https://github.com/onomyprotocol/onomy-rebuild/pull/3025)) diff --git a/.changelog/v15.2.0/bug-fixes/3032-historic-tx-extensions.md b/.changelog/v15.2.0/bug-fixes/3032-historic-tx-extensions.md new file mode 100644 index 00000000..f04b64b8 --- /dev/null +++ b/.changelog/v15.2.0/bug-fixes/3032-historic-tx-extensions.md @@ -0,0 +1 @@ +- Fix parsing of historic Txs with TxExtensionOptions ([\#3032](https://github.com/onomyprotocol/onomy-rebuild/pull/3032)) \ No newline at end of file diff --git a/.changelog/v15.2.0/state-breaking/3025-gov-metatdata-len.md b/.changelog/v15.2.0/state-breaking/3025-gov-metatdata-len.md new file mode 100644 index 00000000..66fcfa66 --- /dev/null +++ b/.changelog/v15.2.0/state-breaking/3025-gov-metatdata-len.md @@ -0,0 +1 @@ +- Increase x/gov metadata fields length to 10200 ([\#3025](https://github.com/onomyprotocol/onomy-rebuild/pull/3025)) diff --git a/.changelog/v15.2.0/state-breaking/3032-historic-tx-extensions.md b/.changelog/v15.2.0/state-breaking/3032-historic-tx-extensions.md new file mode 100644 index 00000000..f04b64b8 --- /dev/null +++ b/.changelog/v15.2.0/state-breaking/3032-historic-tx-extensions.md @@ -0,0 +1 @@ +- Fix parsing of historic Txs with TxExtensionOptions ([\#3032](https://github.com/onomyprotocol/onomy-rebuild/pull/3032)) \ No newline at end of file diff --git a/.changelog/v15.2.0/summary.md b/.changelog/v15.2.0/summary.md new file mode 100644 index 00000000..4c55d041 --- /dev/null +++ b/.changelog/v15.2.0/summary.md @@ -0,0 +1,2 @@ +*March 29, 2024* + diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..4a11244b --- /dev/null +++ b/.dockerignore @@ -0,0 +1,4 @@ +node_modules +build +.github +.vscode \ No newline at end of file diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..e69de29b diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000..11f3c66e --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,5 @@ +# CODEOWNERS: https://help.github.com/articles/about-codeowners/ + +# Primary repo maintainers +* @alexanderbez @zmanian @crodriguezvega @jackzampolin @cosmos/informal_onomy_maintain + diff --git a/.github/ISSUE_TEMPLATE/bug-report.md b/.github/ISSUE_TEMPLATE/bug-report.md new file mode 100644 index 00000000..b0116ba9 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug-report.md @@ -0,0 +1,34 @@ +--- +name: Bug Report +about: Create a report to help us squash bugs! +labels: bug, needs-triage +--- + + + +## Summary of Bug + + + +## Version + + + +## Steps to Reproduce + + + +____ + +#### For Admin Use + +- [ ] Not duplicate issue +- [ ] Appropriate labels applied +- [ ] Appropriate contributors tagged +- [ ] Contributor assigned/self-assigned +- [ ] Is a spike necessary to map out how the issue should be approached? + diff --git a/.github/ISSUE_TEMPLATE/epic-template.md b/.github/ISSUE_TEMPLATE/epic-template.md new file mode 100644 index 00000000..6738bb12 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/epic-template.md @@ -0,0 +1,30 @@ +--- +name: EPIC Template +about: Basic template for EPICs (used by the team) +labels: epic, needs-triage +--- + +## Problem + + + +## Closing criteria + + + + +## Problem details + + + +## Task list + +```[tasklist] +### Must have + +``` + +```[tasklist] +### Nice to have + +``` \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/feature-request.md b/.github/ISSUE_TEMPLATE/feature-request.md new file mode 100644 index 00000000..b7948227 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature-request.md @@ -0,0 +1,50 @@ +--- +name: Feature Request +about: Create a proposal to request a feature +labels: enhancement, epic, needs-triage +--- + + + +## Summary + + + +## Problem Definition + + + +## Proposal + + + +## Task list + +```[tasklist] +### Must have +- [ ] discuss proposal (if proposal rejected, close EPIC) +- [ ] create ADR (if ADR rejected, close EPIC) +- [ ] add sub-tasks needed to implement the proposed feature +``` + +```[tasklist] +### Nice to have +- [ ] add sub-tasks that are nice to have for the proposed feature +``` +____ + +#### For Admin Use + +- [ ] Not duplicate issue +- [ ] Appropriate labels applied +- [ ] Appropriate contributors tagged +- [ ] Contributor assigned/self-assigned +- [ ] Is a spike necessary to map out how the issue should be approached? diff --git a/.github/ISSUE_TEMPLATE/issue-template.md b/.github/ISSUE_TEMPLATE/issue-template.md new file mode 100644 index 00000000..2ce98fd8 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/issue-template.md @@ -0,0 +1,24 @@ +--- +name: Issue Template +about: Basic template for issues (used by the team) +labels: needs-triage +--- + + + +# Problem + + + +# Closing criteria + + + + +# Problem details + + diff --git a/.github/ISSUE_TEMPLATE/tech-debt.md b/.github/ISSUE_TEMPLATE/tech-debt.md new file mode 100644 index 00000000..781cd9e0 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/tech-debt.md @@ -0,0 +1,54 @@ +--- +name: Tech Debt +about: Create an issue to address and reduce technical debt +label: technical-debt, needs-triage + +--- + + + +## Summary + + + +## Type + + + +## Impact + + + +## Proposed Solution + + + +____ + +#### For Admin Use + +- [ ] Not duplicate issue +- [ ] Appropriate labels applied +- [ ] Appropriate contributors tagged +- [ ] Contributor assigned/self-assigned +- [ ] Is a spike necessary to map out how the issue should be approached? \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/upgrade-checklist.md b/.github/ISSUE_TEMPLATE/upgrade-checklist.md new file mode 100644 index 00000000..5bed0ae1 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/upgrade-checklist.md @@ -0,0 +1,58 @@ +--- +name: Cosmos Hub Upgrade Checklist +about: Create a checklist for an upgrade +labels: epic, needs-triage +--- + +## Cosmos Hub Upgrade to Onomy + + + +```[tasklist] +### After Cutting Release Candidate +- [ ] Coordinate with Hypha to test release candidate +- [ ] Create proposal text draft +- [ ] Post proposal text draft on forum +- [ ] Upgrade release and replicated security testnets (note: on Wednesdays) +- [ ] Review post-upgrade status of affected features if necessary +``` + +```[tasklist] +### Before Proposal Submission (TODO sync on a call) +- [ ] Cut final release +- [ ] Predict block height for target date +- [ ] Update/proofread proposal text +- [ ] Transfer deposit amount (i.e., 250 ATOMs) to submitter wallet +- [ ] Create upgrade docs (with disclaimer upgrade prop still being voted on) +- [ ] Coordinate with marketing/comms to prep communication channels/posts +``` + +```[tasklist] +### Voting Period +- [ ] Estimate threshold of validators that are aware of proposal and have voted or confirmed their vote +- [ ] Coordinate with marketing/comms to update on voting progress (and any change in upgrade time) +``` + +```[tasklist] +## Proposal Passed +- [ ] Determine "on-call" team: available on Discord in [#cosmos-hub-validators-verified](https://discord.com/channels/669268347736686612/798937713474142229) during upgrade +- [ ] Coordinate with marketing/comms on who will be available, increase regular upgrade time updates and validator outreach +- [ ] Prep Onomy docs: `docs/getting-started/quickstart.md`, `docs/hub-tutorials/join-mainnet.md`, `docs/migration/` (open PR) +- [ ] Prep chain-registry update: [cosmoshub/chain.json](https://github.com/toschdev/chain-registry/blob/master/cosmoshub/chain.json) (open PR) +- [ ] Prep [cosmos mainnet repo](https://github.com/cosmos/mainnet) update (open PR) +- [ ] Prep internal statesync node for upgrade (confirm cosmovisor configured) +- [ ] Reach out to main dependency teams -- Comet, IBC, SDK -- for assistance during the upgrade (#onomy-release-warroom on Slack) +``` + +```[tasklist] +## During Upgrade (note: on Wednesdays at 15:00 UTC) +- [ ] Available on Discord in [#cosmos-hub-validators-verified](https://discord.com/channels/669268347736686612/798937713474142229) +- [ ] Available on Twitter / Slack / Telegram +``` + +```[tasklist] +## Post Upgrade +- [ ] Merge PRs for Onomy docs & chain-registry update +- [ ] FAQ: collect issues on upgrade from discord +- [ ] Hold validator feedback session +``` diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 00000000..1036e0b3 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,5 @@ +Please go the the `Preview` tab and select the appropriate sub-template: + +* [Production code](?expand=1&template=production.md) - for types `fix`, `feat`, and `refactor`. +* [Docs](?expand=1&template=docs.md) - for documentation changes. +* [Others](?expand=1&template=others.md) - for changes that do not affect production code. \ No newline at end of file diff --git a/.github/PULL_REQUEST_TEMPLATE/docs.md b/.github/PULL_REQUEST_TEMPLATE/docs.md new file mode 100644 index 00000000..ea00b7f4 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE/docs.md @@ -0,0 +1,38 @@ +## Description + +Closes: #XXXX + + + + +--- + +### Author Checklist + +*All items are required. Please add a note to the item if the item is not applicable and +please add links to any relevant follow up issues.* + +I have... + +- [ ] included the correct `docs:` prefix in the PR title +- [ ] targeted the correct branch (see [PR Targeting](https://github.com/onomyprotocol/onomy-rebuild/blob/main/CONTRIBUTING.md#pr-targeting)) +- [ ] provided a link to the relevant issue or specification +- [ ] reviewed "Files changed" and left comments if necessary +- [ ] confirmed all CI checks have passed + +### Reviewers Checklist + +*All items are required. Please add a note if the item is not applicable and please add +your handle next to the items reviewed if you only reviewed selected items.* + +I have... + +- [ ] Confirmed the correct `docs:` prefix in the PR title +- [ ] Confirmed all author checklist items have been addressed +- [ ] Confirmed that this PR only changes documentation +- [ ] Reviewed content for consistency +- [ ] Reviewed content for thoroughness +- [ ] Reviewed content for spelling and grammar +- [ ] Tested instructions (if applicable) + diff --git a/.github/PULL_REQUEST_TEMPLATE/others.md b/.github/PULL_REQUEST_TEMPLATE/others.md new file mode 100644 index 00000000..e595384a --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE/others.md @@ -0,0 +1,33 @@ +## Description + +Closes: #XXXX + + + +--- + +### Author Checklist + +*All items are required. Please add a note to the item if the item is not applicable and +please add links to any relevant follow up issues.* + +I have... + +- [ ] Included the correct [type prefix](https://github.com/commitizen/conventional-commit-types/blob/v3.0.0/index.json) in the PR title +- [ ] Targeted the correct branch (see [PR Targeting](https://github.com/onomyprotocol/onomy-rebuild/blob/main/CONTRIBUTING.md#pr-targeting)) +- [ ] Provided a link to the relevant issue or specification +- [ ] Reviewed "Files changed" and left comments if necessary +- [ ] Confirmed all CI checks have passed + +### Reviewers Checklist + +*All items are required. Please add a note if the item is not applicable and please add +your handle next to the items reviewed if you only reviewed selected items.* + +I have... + +- [ ] Confirmed the correct [type prefix](https://github.com/commitizen/conventional-commit-types/blob/v3.0.0/index.json) in the PR title +- [ ] Confirmed all author checklist items have been addressed +- [ ] Confirmed that this PR does not change production code + diff --git a/.github/PULL_REQUEST_TEMPLATE/production.md b/.github/PULL_REQUEST_TEMPLATE/production.md new file mode 100644 index 00000000..661a9f49 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE/production.md @@ -0,0 +1,48 @@ + + +## Description + +Closes: #XXXX + + + + + +--- + +### Author Checklist + +*All items are required. Please add a note to the item if the item is not applicable and +please add links to any relevant follow up issues.* + +I have... + +* [ ] Included the correct [type prefix](https://github.com/commitizen/conventional-commit-types/blob/v3.0.0/index.json) in the PR title +* [ ] Added `!` to the type prefix if API, client, or state breaking change (i.e., requires minor or major version bump) +* [ ] Targeted the correct branch (see [PR Targeting](https://github.com/onomyprotocol/onomy-rebuild/blob/main/CONTRIBUTING.md#pr-targeting)) +* [ ] Provided a link to the relevant issue or specification +* [ ] Followed the guidelines for [building SDK modules](https://github.com/cosmos/cosmos-sdk/blob/main/docs/docs/building-modules) +* [ ] Included the necessary unit and integration [tests](https://github.com/onomyprotocol/onomy-rebuild/blob/main/CONTRIBUTING.md#testing) +* [ ] Added a changelog entry in `.changelog` (for details, see [contributing guidelines](../../CONTRIBUTING.md#changelog)) +* [ ] Included comments for [documenting Go code](https://blog.golang.org/godoc) +* [ ] Updated the relevant documentation or specification +* [ ] Reviewed "Files changed" and left comments if necessary +* [ ] Confirmed all CI checks have passed + +### Reviewers Checklist + +*All items are required. Please add a note if the item is not applicable and please add +your handle next to the items reviewed if you only reviewed selected items.* + +I have... + +* [ ] confirmed the correct [type prefix](https://github.com/commitizen/conventional-commit-types/blob/v3.0.0/index.json) in the PR title +* [ ] confirmed `!` in the type prefix if API or client breaking change +* [ ] confirmed all author checklist items have been addressed +* [ ] reviewed state machine logic +* [ ] reviewed API design and naming +* [ ] reviewed documentation is accurate +* [ ] reviewed tests and test coverage diff --git a/.github/codecov.yml b/.github/codecov.yml new file mode 100644 index 00000000..3839e741 --- /dev/null +++ b/.github/codecov.yml @@ -0,0 +1,33 @@ +coverage: + precision: 2 + round: down + range: 70...100 + status: + project: + default: + threshold: 1% # allow this much decrease on project + app: + target: 80% + paths: # this must be a list type + - "app/" + changes: false + +comment: + layout: "reach, diff, files" + behavior: default # update if exists else create new + require_changes: true + +ignore: + - "*.pb.go" + - "*.pb.gw.go" + - "*.md" + - "*.rst" + - "cmd" + - "client" + - "contrib" + - "docs" + - "proto" + - "tests/e2e" + - "app/app_helpers.go" + - "app/sim" + - "app/upgrades" \ No newline at end of file diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..1296f84b --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,48 @@ +version: 2 +updates: + - package-ecosystem: github-actions + directory: "/" + schedule: + interval: weekly + open-pull-requests-limit: 10 + labels: + - "A:automerge" + + - package-ecosystem: gomod + directory: "/" + schedule: + interval: weekly + open-pull-requests-limit: 10 + labels: + - "A:automerge" + - dependencies + + - package-ecosystem: gomod + directory: "/" + schedule: + interval: daily + target-branch: "release/v14.1.x" + # Only allow automated security-related dependency updates on release branches. + open-pull-requests-limit: 0 + labels: + - dependencies + + - package-ecosystem: gomod + directory: "/" + schedule: + interval: daily + target-branch: "release/v13.x" + # Only allow automated security-related dependency updates on release branches. + open-pull-requests-limit: 0 + labels: + - dependencies + + - package-ecosystem: gomod + directory: "/" + schedule: + interval: daily + target-branch: "release/v12.x" + # Only allow automated security-related dependency updates on release branches. + open-pull-requests-limit: 0 + labels: + - dependencies diff --git a/.github/stale.yml b/.github/stale.yml new file mode 100644 index 00000000..435cfeaa --- /dev/null +++ b/.github/stale.yml @@ -0,0 +1,45 @@ +# Configuration for probot-stale - https://github.com/probot/stale + +# Number of days of inactivity before an Issue or Pull Request becomes stale +daysUntilStale: 10 + +# Number of days of inactivity before an Issue or Pull Request with the stale label is closed. +# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale. +daysUntilClose: 4 + +# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled) +onlyLabels: [] + +# Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable +exemptLabels: + - blocked + - pinned + - security + +# Set to true to ignore issues in a project (defaults to false) +exemptProjects: true + +# Set to true to ignore issues in a milestone (defaults to false) +exemptMilestones: true + +# Label to use when marking as stale +staleLabel: stale + +# Comment to post when marking as stale. Set to `false` to disable +markComment: > + This issue has been automatically marked as stale because it has not had + recent activity. It will be closed if no further activity occurs. Thank you + for your contributions. +# Limit the number of actions per hour, from 1-30. Default is 30 +limitPerRun: 30 + +# Limit to only `issues` or `pulls` +only: pulls + +# Optionally, specify configuration settings that are specific to just 'issues' or 'pulls': +pulls: + daysUntilStale: 30 + markComment: > + This pull request has been automatically marked as stale because it has not had + recent activity. It will be closed if no further activity occurs. Thank you + for your contributions. diff --git a/.github/workflows/automated-release.yml b/.github/workflows/automated-release.yml deleted file mode 100644 index 9fc66fc3..00000000 --- a/.github/workflows/automated-release.yml +++ /dev/null @@ -1,64 +0,0 @@ -on: - push: - tags: - - "v*" - -name: Automated release build - -jobs: - build: - name: Build and upload release assets - runs-on: ubuntu-latest - - steps: - - name: Set up Go 1.x - uses: actions/setup-go@v2 - with: - go-version: ^1.19 - id: go - - - name: Checkout code - uses: actions/checkout@v2 - - - name: Create Release - id: create_release - uses: actions/create-release@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - tag_name: ${{ github.ref }} - release_name: ${{ github.ref }} - draft: false - prerelease: true - - # build & upload onomyd - - - name: Build onomyd - run: make build - - - name: Upload onomyd - id: upload-onomyd-release-asset - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_release.outputs.upload_url }} - asset_path: onomyd - asset_name: onomyd - asset_content_type: application/bin - - # build & upload onomyd arm64 - - - name: Build onomyd arm64 - run: GOARCH=arm64 make build - - - name: Upload onomyd arm64 - id: upload-onomyd-release-asset-arm - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_release.outputs.upload_url }} - asset_path: onomyd - asset_name: onomyd-arm - asset_content_type: application/bin diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index 58b278b5..00000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,72 +0,0 @@ -name: CI - -on: - push: - branches: - - 'dev' - - 'main' - pull_request: - -jobs: - test_suite: - runs-on: ubuntu-latest - steps: - - name: Set up Go 1.x - uses: actions/setup-go@v2 - with: - go-version: ^1.19 - id: go - - name: Install Rust components - run: | - rustup set profile minimal - rustup default stable - - name: Checkout - uses: actions/checkout@v2 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 - - name: Cache Docker layers - uses: actions/cache@v2 - with: - path: /tmp/.buildx-cache - key: ${{ runner.os }}-buildx-${{ github.sha }} - restore-keys: | - ${{ runner.os }}-buildx- - - name: test suite - # TODO there is some unused stuff we need to fix or remove - # make all-in-docker - run: | - cargo r --bin chain_upgrade - go test ./... - cargo r --bin onomyd_only - cargo r --bin ics_cdd - cargo r --bin clean - - rustfmt: - name: Rustfmt - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - # Acquire the most recent nightly with a rustfmt component - - name: Install most recent Rustfmt - run: | - rustup set profile minimal - rustup default "nightly-$(curl -s https://rust-lang.github.io/rustup-components-history/x86_64-unknown-linux-gnu/rustfmt)" - rustup component add rustfmt - - name: Run `cargo fmt` - run: | - cargo fmt -- --check - - clippy: - name: Clippy - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - # Acquire the most recent nightly with a clippy component - - name: Install most recent Clippy - run: | - rustup set profile minimal - rustup default "nightly-$(curl -s https://rust-lang.github.io/rustup-components-history/x86_64-unknown-linux-gnu/clippy)" - rustup component add clippy - - name: Run `cargo clippy` - run: | - cargo clippy --all --all-targets --all-features -- -D clippy::all diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 00000000..7ac33f80 --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,61 @@ +name: "CodeQL" + +on: + pull_request: + paths: + - "**.go" + push: + branches: + - main + - release/v* + - feat/** + paths: + - "**.go" + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version: "1.21" + check-latest: true + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: "go" + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + # queries: ./path/to/local/query, your-org/your-repo/queries@main + queries: +security-and-quality,github/codeql/go/ql/src/experimental/InconsistentCode/DeferInLoop.ql@main,github/codeql/go/ql/src/experimental/Unsafe/WrongUsageOfUnsafe.ql@main,github/codeql/go/ql/src/experimental/CWE-369/DivideByZero.ql@main + packs: +crypto-com/cosmos-sdk-codeql + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v3 + + # ℹ️ Command-line programs to run using the OS shell. + # 📚 https://git.io/JvXDl + + # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines + # and modify them (or add more) to build your code if your project + # uses a compiled language + + #- run: | + # make bootstrap + # make release + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/deploy-docs.yml b/.github/workflows/deploy-docs.yml new file mode 100644 index 00000000..31c4af1b --- /dev/null +++ b/.github/workflows/deploy-docs.yml @@ -0,0 +1,47 @@ +name: Deploy docs +# This job builds and deploys documenation to github pages. +# It runs on every push to main with a change in the docs folder. +on: + workflow_dispatch: + push: + branches: + - main + # - "release/**" + paths: + - "docs/**" + # - "x/**/*.md" + - .github/workflows/deploy-docs.yml + +permissions: + contents: read + +jobs: + build-and-deploy: + permissions: + contents: write # for JamesIves/github-pages-deploy-action to push changes in repo + runs-on: ubuntu-latest + steps: + - name: Checkout 🛎️ + uses: actions/checkout@v3 + with: + persist-credentials: false + fetch-depth: 0 + path: "." + + - name: Setup Node.js 🔧 + uses: actions/setup-node@v3 + with: + node-version: "16.x" + + # npm install npm should be removed when https://github.com/npm/cli/issues/4942 is fixed + - name: Build 🔧 + run: | + npm install -g npm@8.5.5 + make build-docs + + - name: Deploy 🚀 + uses: JamesIves/github-pages-deploy-action@v4.4.3 + with: + branch: gh-pages + folder: ~/output + single-commit: true \ No newline at end of file diff --git a/.github/workflows/docker-devbase.yml b/.github/workflows/docker-devbase.yml deleted file mode 100644 index 3ca2c332..00000000 --- a/.github/workflows/docker-devbase.yml +++ /dev/null @@ -1,29 +0,0 @@ -name: Docker dev base Build & Push - -on: - push: - branches: - - 'dev' - paths: - - "dev/base-image/Dockerfile" - -jobs: - build: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 - - name: Login to DockerHub - uses: docker/login-action@v1 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Publish to Docker Hub - uses: docker/build-push-action@v2 - with: - context: dev/base-image - file: dev/base-image/Dockerfile - push: true - tags: onomy/dev-base:latest diff --git a/.github/workflows/docker-push.yml b/.github/workflows/docker-push.yml new file mode 100644 index 00000000..50e6f6e8 --- /dev/null +++ b/.github/workflows/docker-push.yml @@ -0,0 +1,51 @@ +# source: https://docs.github.com/en/enterprise-cloud@latest/actions/publishing-packages/publishing-docker-images +name: Create and publish a Docker image + +on: + push: + branches: ['release'] + +env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + +jobs: + build-and-push-image: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Log in to the Container registry + uses: docker/login-action@v3.0.0 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@v5.5.1 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + + - name: Build and push Docker image + uses: docker/build-push-action@v5.1.0 + with: + context: . + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + + - name: Build and push e2e docker image + uses: docker/build-push-action@v5.1.0 + with: + context: . + file: Dockerfile.e2e + push: true + tags: ${{ steps.meta.outputs.tags }}-e2e + labels: ${{ steps.meta.outputs.labels }} diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 00000000..7e75883b --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,33 @@ +name: Lint +on: + push: + branches: + - main + - release/** + - feat/** + pull_request: +permissions: + contents: read +jobs: + golangci: + name: golangci-lint + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version: "1.21" + check-latest: true + - uses: technote-space/get-diff-action@v6.1.2 + id: git_diff + with: + PATTERNS: | + **/*.go + go.mod + go.sum + **/go.mod + **/go.sum + - name: run linting + if: env.GIT_DIFF + run: | + make lint \ No newline at end of file diff --git a/.github/workflows/md-link-checker.yml b/.github/workflows/md-link-checker.yml new file mode 100644 index 00000000..b9071633 --- /dev/null +++ b/.github/workflows/md-link-checker.yml @@ -0,0 +1,13 @@ +name: Check Markdown links +on: + workflow_dispatch: + schedule: + - cron: '* */24 * * *' +jobs: + markdown-link-check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: gaurav-nelson/github-action-markdown-link-check@1.0.15 + with: + folder-path: "docs" \ No newline at end of file diff --git a/.github/workflows/nightly-tests.yml b/.github/workflows/nightly-tests.yml new file mode 100644 index 00000000..4bae7b84 --- /dev/null +++ b/.github/workflows/nightly-tests.yml @@ -0,0 +1,63 @@ +name: "Nightly E2E run" +on: + workflow_call: + workflow_dispatch: + schedule: + # run every day at 03:00 UTC + - cron: "0 3 * * *" + +jobs: + + run-tests: + uses: onomyprotocol/onomy-rebuild/.github/workflows/test.yml@main + + run-simulations: + uses: onomyprotocol/onomy-rebuild/.github/workflows/sims.yml@main + + run-vulncheck: + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version: "1.21" + check-latest: true + - name: run-vulncheck + id: vulncheck + run: make vulncheck + + warn-if-failure: + if: failure() + needs: [ run-tests, run-vulncheck, run-simulations] + runs-on: ubuntu-latest + steps: + - name: Notify Slack on failure + uses: slackapi/slack-github-action@v1.25.0 + env: + SLACK_WEBHOOK_URL: ${{ secrets.NIGHTLY_E2E_SLACK_WEBHOOK_URL }} + SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK + BRANCH: ${{ github.ref_name }} + RUN_URL: "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" + COMMITS_URL: "${{ github.server_url }}/${{ github.repository }}/commits/${{ github.ref_name }}" + with: + payload: | + { + "blocks": [ + { + "type": "header", + "text": { + "type": "plain_text", + "text": "❗Nightly tests failed", + "emoji": true + } + }, + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": "See the <${{ env.RUN_URL }}|run details>" + } + } + ] + } \ No newline at end of file diff --git a/.github/workflows/release-sims.yml b/.github/workflows/release-sims.yml new file mode 100644 index 00000000..43670487 --- /dev/null +++ b/.github/workflows/release-sims.yml @@ -0,0 +1,83 @@ +name: Release Sims +on: + pull_request: + branches: + - "rc**" + +jobs: + cleanup-runs: + runs-on: ubuntu-latest + steps: + - uses: rokroskar/workflow-run-cleanup-action@master + env: + GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" + if: "!startsWith(github.ref, 'refs/tags/') && github.ref != 'refs/heads/main'" + + build: + runs-on: ubuntu-latest + if: "!contains(github.event.head_commit.message, 'skip-sims')" + steps: + - uses: actions/checkout@v4 + - run: | + make build + + newbuild: + runs-on: ubuntu-latest + steps: + - uses: actions/setup-go@v5 + with: + go-version: 1.21.x + - name: Install runsim + run: go install github.com/cosmos/tools/cmd/runsim@v1.0.0 + - uses: actions/cache@v4.0.0 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + + install-runsim: + runs-on: ubuntu-latest + needs: build + steps: + - name: install runsim + run: go install github.com/cosmos/tools/cmd/runsim@v1.0.0 + - uses: actions/cache@v4.0.0 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + + test-sim-multi-seed-long: + runs-on: ubuntu-latest + needs: [build, install-runsim] + steps: + - uses: actions/checkout@v4 + - uses: actions/cache@v4.0.0 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + - name: test-sim-multi-seed-long + run: | + make test-sim-multi-seed-long + + test-sim-nondeterminism: + runs-on: ubuntu-latest + needs: newbuild + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version: 1.21.x + - uses: technote-space/get-diff-action@v6.0.1 + with: + PATTERNS: | + **/**.go + go.mod + go.sum + - uses: actions/cache@v4.0.0 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + if: "env.GIT_DIFF != ''" + - name: test nondeterminism + run: | + make test-sim-nondeterminism + if: "env.GIT_DIFF != ''" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..205994bf --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,35 @@ +name: "Release" + +on: + # can be used to re-release an existing tag + workflow_dispatch: + + push: + tags: + - "v[0-9]+\\.[0-9]+\\.[0-9]+" + - "v[0-9]+\\.[0-9]+\\.[0-9]+-rc[0-9]+" + +jobs: + release: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - run: git fetch --force --tags + + - uses: actions/setup-go@v5 + with: + go-version: "1.21" + + - name: Set Env + run: echo "TM_VERSION=$(go list -m github.com/tendermint/tendermint | sed 's:.* ::')" >> $GITHUB_ENV + + - name: Release + uses: goreleaser/goreleaser-action@v5 + with: + version: latest + args: release --clean --release-notes ./RELEASE_NOTES.md + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/sim-label.yml b/.github/workflows/sim-label.yml new file mode 100644 index 00000000..82e69160 --- /dev/null +++ b/.github/workflows/sim-label.yml @@ -0,0 +1,43 @@ +name: SimLabeled +on: + pull_request: + types: [ labeled ] + +jobs: + cleanup-runs: + runs-on: ubuntu-latest + steps: + - uses: rokroskar/workflow-run-cleanup-action@master + env: + GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" +# if: "!startsWith(github.ref, 'refs/tags/') && github.ref != 'refs/heads/main'" + + newbuild: + runs-on: ubuntu-latest + steps: + - uses: actions/setup-go@v5 + with: + go-version: 1.21.x + - name: Install runsim + run: go install github.com/cosmos/tools/cmd/runsim@v1.0.0 + - uses: actions/cache@v4.0.0 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + + test-sim-nondeterminism-labeled: + if: ${{ github.event.label.name == 'sim' }} + runs-on: ubuntu-latest + needs: newbuild + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version: 1.21.x + - uses: actions/cache@v4.0.0 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + - name: test nondeterminism + run: | + make test-sim-nondeterminism diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml new file mode 100644 index 00000000..eacc3598 --- /dev/null +++ b/.github/workflows/sims.yml @@ -0,0 +1,118 @@ +name: Sims +on: + workflow_call: + workflow_dispatch: + pull_request: + push: + branches: + - main + +jobs: + cleanup-runs: + runs-on: ubuntu-latest + steps: + - uses: rokroskar/workflow-run-cleanup-action@master + env: + GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" + if: "!startsWith(github.ref, 'refs/tags/') && github.ref != 'refs/heads/main'" + + build: + runs-on: ubuntu-latest + steps: + - uses: actions/setup-go@v5 + with: + go-version: 1.21.x + - name: Install runsim + run: go install github.com/cosmos/tools/cmd/runsim@v1.0.0 + - uses: actions/cache@v4.0.0 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + + test-sim-nondeterminism: + runs-on: ubuntu-latest + needs: newbuild + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version: 1.21.x + - uses: technote-space/get-diff-action@v6.0.1 + with: + PATTERNS: | + **/**.go + go.mod + go.sum + - uses: actions/cache@v4.0.0 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + if: "env.GIT_DIFF != ''" + - name: test nondeterminism + run: | + make test-sim-nondeterminism + if: "env.GIT_DIFF != ''" + + test-sim-multi-seed-short: + runs-on: ubuntu-latest + needs: build + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version: 1.21.x + - uses: technote-space/get-diff-action@v6 + with: + PATTERNS: | + **/**.go + go.mod + go.sum + - uses: actions/cache@v4.0.0 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + if: "env.GIT_DIFF != ''" + - name: test-sim-multi-seed-short + run: | + make test-sim-multi-seed-short + if: "env.GIT_DIFF != ''" + + newbuild: + runs-on: ubuntu-latest + steps: + - uses: actions/setup-go@v5 + with: + go-version: 1.21.x + - name: Install runsim + run: go install github.com/cosmos/tools/cmd/runsim@v1.0.0 + - uses: actions/cache@v4.0.0 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + + install-runsim: + runs-on: ubuntu-latest + needs: build + steps: + - name: install runsim + run: go install github.com/cosmos/tools/cmd/runsim@v1.0.0 + - uses: actions/cache@v4.0.0 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + + test-sim-multi-seed-long: + runs-on: ubuntu-latest + needs: [build, install-runsim] + steps: + - uses: actions/setup-go@v5 + with: + go-version: 1.21.x + - uses: actions/checkout@v4 + - uses: actions/cache@v4.0.0 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + - name: test-sim-multi-seed-long + run: | + make test-sim-multi-seed-long diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml new file mode 100644 index 00000000..b1e4597e --- /dev/null +++ b/.github/workflows/stale.yml @@ -0,0 +1,18 @@ +name: "Close stale pull requests" +on: + schedule: + - cron: "0 0 * * 1-5" + +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v9.0.0 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + stale-pr-message: "This pull request has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions." + days-before-stale: -1 + days-before-close: -1 + days-before-pr-stale: 45 + days-before-pr-close: 6 + exempt-pr-labels: "pinned, security, proposal, blocked, ADR" diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 00000000..5d322561 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,182 @@ +name: Test +on: + workflow_call: + pull_request: + paths-ignore: + - "**/*.md" + - "docs/**" + push: + branches: + - main + paths-ignore: + - "**/*.md" + - "docs/**" + +permissions: + contents: read + +concurrency: + group: ci-${{ github.ref }}-tests + cancel-in-progress: true + +jobs: + tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version: "1.21" + check-latest: true + cache: true + cache-dependency-path: go.sum + - uses: technote-space/get-diff-action@v6.1.2 + id: git_diff + with: + PATTERNS: | + **/*.go + go.mod + go.sum + **/go.mod + **/go.sum + **/Makefile + Makefile + - uses: actions/cache@v4.0.0 + with: + path: | + ~/.cache/go-build + ~/go/pkg/mod + key: ${{ runner.os }}-go-${{ hashFiles('**/go.mod') }} + restore-keys: | + ${{ runner.os }}-go- + - name: test & coverage report creation + if: env.GIT_DIFF + run: | + go test -v -coverprofile=profile.out -covermode=atomic -coverpkg=./... $(go list ./... | grep -v -e '/tests/e2e') + - uses: actions/upload-artifact@v4 + if: env.GIT_DIFF + with: + name: "${{ github.sha }}-coverage" + path: ./profile.out + + test-e2e: + runs-on: ubuntu-latest + timeout-minutes: 45 + steps: + - uses: actions/setup-go@v5 + with: + go-version: 1.21.x + - uses: actions/checkout@v4 + - uses: technote-space/get-diff-action@v6.0.1 + with: + PATTERNS: | + **/**.go + go.mod + go.sum + - name: Build Onomy Docker Image + run: make docker-build-debug + - name: Build Hermes Docker Image + run: make docker-build-hermes + - name: Test E2E + run: make test-e2e + + repo-analysis: + runs-on: ubuntu-latest + needs: [tests] + steps: + - uses: actions/checkout@v4 + - uses: technote-space/get-diff-action@v6.1.2 + id: git_diff + with: + PATTERNS: | + **/*.go + go.mod + go.sum + **/go.mod + **/go.sum + - uses: actions/download-artifact@v4 + if: env.GIT_DIFF + with: + name: "${{ github.sha }}-coverage" + - name: sonarcloud + if: ${{ env.SONAR_TOKEN != null && env.GIT_DIFF && !github.event.pull_request.draft }} + uses: SonarSource/sonarcloud-github-action@master + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} + + liveness-test: + runs-on: ubuntu-latest + timeout-minutes: 10 + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version: 1.21.x + - uses: technote-space/get-diff-action@v6.0.1 + with: + PATTERNS: | + **/**.go + go.mod + go.sum + - name: Install Onomy + run: | + make build + if: env.GIT_DIFF + - name: Start Local Network + run: | + make start-localnet-ci > liveness.out 2>&1 & + if: env.GIT_DIFF + - name: Test Local Network Liveness + run: | + ./contrib/scripts/test_localnet_liveness.sh 100 5 50 localhost + if: env.GIT_DIFF + + upgrade-test: + runs-on: ubuntu-latest + timeout-minutes: 30 + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: technote-space/get-diff-action@v6.0.1 + with: + PATTERNS: | + **/**.go + go.mod + go.sum + - uses: actions/setup-go@v5 + with: + go-version: 1.20.x + # the old onomyd binary version is hardcoded, need to be updated each major release. + - name: Install Old Onomyd + run: | + git checkout v14.0.0 + make build + cp ./build/onomyd ./build/onomydold + go clean -modcache + if: env.GIT_DIFF + - name: Install New Onomyd + run: | + git checkout - + make build + cp ./build/onomyd ./build/onomydnew + go clean -modcache + if: env.GIT_DIFF + - name: Install Cosmovisor + run: | + go install github.com/cosmos/cosmos-sdk/cosmovisor/cmd/cosmovisor@latest + if: env.GIT_DIFF + - name: Start Old Onomyd Binary + run: | + go env GOPATH + ./contrib/scripts/upgrade_test_scripts/run_onomy.sh + if: env.GIT_DIFF + - name: Submit Upgrade Commands + run: | + ./contrib/scripts/upgrade_test_scripts/run_upgrade_commands.sh 15 + if: env.GIT_DIFF + - name: Check for successful upgrade + run: | + ./contrib/scripts/upgrade_test_scripts/test_upgrade.sh 20 5 16 localhost + if: env.GIT_DIFF diff --git a/.gitignore b/.gitignore index 508078a2..0caa4423 100644 --- a/.gitignore +++ b/.gitignore @@ -1,10 +1,54 @@ -/node_modules/ -release/ -/vendor/ -/.idea/ +# OS +.DS_Store +*.swp +*.swo +*.swl +*.swm +*.swn +.vscode +.idea + +# Build +artifacts +vendor +build +tools/bin/* +examples/build/* +docs/_build +docs/node_modules +docs/tutorial +dist +tools-stamp +docs/node_modules + +# Data - ideally these don't exist +baseapp/data/* +client/lcd/keys/* +cmd/onomycli/statik/statik.go +mytestnet + +# Testing +coverage.txt +profile.out + +# Vagrant +.vagrant/ +*.box +*.log +vagrant + +# IDE +.idea/ *.iml -*.tar.gz -onomyd -/tmp-swagger-gen/ -/target -Cargo.lock + +# Graphviz +dependency-graph.png + +# Latex +*.aux +*.out +*.synctex.gz +contract_tests/* + +go.work.sum + diff --git a/.gitpod.yml b/.gitpod.yml new file mode 100644 index 00000000..bd5e62ec --- /dev/null +++ b/.gitpod.yml @@ -0,0 +1,4 @@ +tasks: + - init: go get && go build ./... && go test ./... && make + command: go run +image: ghcr.io/notional-labs/cosmos diff --git a/.golangci.yml b/.golangci.yml index 9a0d292f..9499c148 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -1,375 +1,109 @@ -# This file contains all available configuration options -# with their default values. - -# options for analysis running run: - # default concurrency is a available CPU number - concurrency: 4 - + tests: true # timeout for analysis, e.g. 30s, 5m, default is 1m timeout: 5m - # exit code when at least one issue was found, default is 1 - issues-exit-code: 1 - - # include test files or not, default is true - tests: true - - # list of build tags, all linters use it. Default is empty list. - # build-tags: - - # which dirs to skip: issues from them won't be reported; - # can use regexp here: generated.*, regexp is applied on full path; - # default value is empty list, but default dirs are skipped independently - # from this option's value (see skip-dirs-use-default). - # "/" will be replaced by current OS file path separator to properly work - # on Windows. - skip-dirs: - - docs - - vendor - - # default is true. Enables skipping of directories: - # vendor$, third_party$, testdata$, examples$, Godeps$, builtin$ - skip-dirs-use-default: true - - # which files to skip: they will be analyzed, but issues from them - # won't be reported. Default value is empty list, but there is - # no need to include all autogenerated files, we confidently recognize - # autogenerated files. If it's not please let us know. - # "/" will be replaced by current OS file path separator to properly work - # on Windows. - # skip-files: - # - ".*\\.my\\.go$" - # - lib/bad.go - - # by default isn't set. If set we pass it to "go list -mod={option}". From "go help modules": - # If invoked with -mod=readonly, the go command is disallowed from the implicit - # automatic updating of go.mod described above. Instead, it fails when any changes - # to go.mod are needed. This setting is most useful to check that go.mod does - # not need updates, such as in a continuous integration and testing system. - # If invoked with -mod=vendor, the go command assumes that the vendor - # directory holds the correct copies of dependencies and ignores - # the dependency descriptions in go.mod. - # modules-download-mode: readonly|vendor|mod - - # Allow multiple parallel golangci-lint instances running. - # If false (default) - golangci-lint acquires file lock on start. - # allow-parallel-runners: false - - -# output configuration options -output: - # colored-line-number|line-number|json|tab|checkstyle|code-climate|junit-xml|github-actions - # default is "colored-line-number" - format: colored-line-number - - # print lines of code with issue, default is true - print-issued-lines: true - - # print linter name in the end of issue text, default is true - print-linter-name: true - - # make issues output unique by line, default is true - uniq-by-line: true - - # add a prefix to the output file references; default is no prefix - path-prefix: "" +linters: + disable-all: true + enable: + - dogsled + - errcheck + - exportloopref + - gci + - goconst + - gocritic + - gofumpt + - gosec + - gosimple + - govet + - ineffassign + - misspell + - nakedret + - nolintlint + - revive + - staticcheck + - stylecheck + - typecheck + - thelper + - unconvert + - unparam + - unused - # sorts results by: filepath, line and column - sort-results: false +issues: + exclude-rules: + - text: 'Use of weak random number generator' + linters: + - gosec + - text: 'comment on exported var' + linters: + - golint + - text: "don't use an underscore in package name" + linters: + - golint + - text: 'ST1003:' + linters: + - stylecheck + # FIXME: Disabled until golangci-lint updates stylecheck with this fix: + # https://github.com/dominikh/go-tools/issues/389 + - text: 'ST1016:' + linters: + - stylecheck + - path: 'migrations' + text: 'SA1019:' + linters: + - staticcheck + max-issues-per-linter: 10000 + max-same-issues: 10000 -# all available settings of specific linters linters-settings: - dogsled: - # checks assignments with too many blank identifiers; default is 2 - max-blank-identifiers: 2 - - dupl: - # tokens count to trigger issue, 150 by default - threshold: 200 - - errcheck: - # report about not checking of errors in type assertions: `a := b.(MyStruct)`; - # default is false: such cases aren't reported by default. - check-type-assertions: false - - # report about assignment of errors to blank identifier: `num, _ := strconv.Atoi(numStr)`; - # default is false: such cases aren't reported by default. - check-blank: true - - errorlint: - # Check whether fmt.Errorf uses the %w verb for formatting errors. See the readme for caveats - errorf: true - # Check for plain type assertions and type switches - # asserts: true - # Check for plain error comparisons - # comparison: true - - exhaustive: - # check switch statements in generated files also - check-generated: false - # indicates that switch statements are to be considered exhaustive if a - # 'default' case is present, even if all enum members aren't listed in the - # switch - default-signifies-exhaustive: false - - funlen: - lines: 200 - statements: 100 - gci: - # put imports beginning with prefix after 3rd-party packages; - # only support one prefix - # if not set, use goimports.local-prefixes - local-prefixes: github.com/onomyprotocol - - gocognit: - # minimal code complexity to report, 30 by default (but we recommend 10-20) - min-complexity: 15 - - nestif: - # minimal complexity of if statements to report, 5 by default - min-complexity: 5 - - goconst: - # minimal length of string constant, 3 by default - min-len: 3 - # minimum occurrences of constant string count to trigger issue, 3 by default - min-occurrences: 3 - # ignore test files, false by default - # ignore-tests: false - # look for existing constants matching the values, true by default - # match-constant: true - # search also for duplicated numbers, false by default - # numbers: false - # minimum value, only works with goconst.numbers, 3 by default - # min: 3 - # maximum value, only works with goconst.numbers, 3 by default - # max: 3 - # ignore when constant is not used as function argument, true by default - # ignore-calls: true - - # gocritic: - # # Which checks should be enabled; can't be combined with 'disabled-checks'; - # # See https://go-critic.github.io/overview#checks-overview - # # To check which checks are enabled run `GL_DEBUG=gocritic golangci-lint run` - # # By default list of stable checks is used. - # enabled-checks: - # - rangeValCopy - # - # # Which checks should be disabled; can't be combined with 'enabled-checks'; default is empty - # disabled-checks: - # - regexpMust - # - # # Enable multiple checks by tags, run `GL_DEBUG=gocritic golangci-lint run` to see all tags and checks. - # # Empty list by default. See https://github.com/go-critic/go-critic#usage -> section "Tags". - # enabled-tags: - # - performance - # disabled-tags: - # - experimental - # - # # Settings passed to gocritic. - # # The settings key is the name of a supported gocritic checker. - # # The list of supported checkers can be find in https://go-critic.github.io/overview. - # settings: - # captLocal: # must be valid enabled check name - # # whether to restrict checker to params only (default true) - # paramsOnly: true - # elseif: - # # whether to skip balanced if-else pairs (default true) - # skipBalanced: true - # hugeParam: - # # size in bytes that makes the warning trigger (default 80) - # sizeThreshold: 80 - # nestingReduce: - # # min number of statements inside a branch to trigger a warning (default 5) - # bodyWidth: 5 - # rangeExprCopy: - # # size in bytes that makes the warning trigger (default 512) - # sizeThreshold: 512 - # # whether to check test functions (default true) - # skipTestFuncs: true - # rangeValCopy: - # # size in bytes that makes the warning trigger (default 128) - # sizeThreshold: 32 - # # whether to check test functions (default true) - # skipTestFuncs: true - # ruleguard: - # # path to a gorules file for the ruleguard checker - # rules: '' - # truncateCmp: - # # whether to skip int/uint/uintptr types (default true) - # skipArchDependent: true - # underef: - # # whether to skip (*x).method() calls where x is a pointer receiver (default true) - # skipRecvDeref: true - # unnamedResult: - # # whether to check exported functions - # checkExported: true - - gocyclo: - # minimal code complexity to report, 30 by default (but we recommend 10-20) - min-complexity: 10 - - godot: - # comments to be checked: `declarations`, `toplevel`, or `all` - scope: all - # check all top-level comments, not only declarations - check-all: true - # check that each sentence starts with a capital letter - capital: false - - gofmt: - # simplify code: gofmt with `-s` option, true by default - simplify: true - - gofumpt: - extra-rules: true - - goimports: - # put imports beginning with prefix after 3rd-party packages; - # it's a comma-separated list of prefixes - local-prefixes: github.com/onomyprotocol - - golint: - # minimal confidence for issues, default is 0.8 - min-confidence: 0.8 - - gomnd: - settings: - mnd: - # the list of enabled checks, see https://github.com/tommy-muehle/go-mnd/#checks for description. - checks: argument,case,condition,operation,return,assign - # ignored-numbers: 1000 - # ignored-files: magic_.*.go - # ignored-functions: math.* - - govet: - # report about shadowed variables - check-shadowing: true - disable: - - fieldalignment # produces a lot of warnings which are not topical at the moment - # settings per analyzer - settings: - shadow: - strict: true - printf: # analyzer name, run `go tool vet help` to see all analyzers - funcs: # run `go tool vet help printf` to see available settings for `printf` analyzer - - (github.com/golangci/golangci-lint/pkg/logutils.Log).Infof - - (github.com/golangci/golangci-lint/pkg/logutils.Log).Warnf - - (github.com/golangci/golangci-lint/pkg/logutils.Log).Errorf - - (github.com/golangci/golangci-lint/pkg/logutils.Log).Fatalf - enable-all: true - - depguard: - list-type: blacklist - include-go-root: false - packages: - - github.com/sirupsen/logrus - packages-with-error-message: - # specify an error message to output when a blacklisted package is used - - github.com/sirupsen/logrus: "logging is allowed only by logutils.Log" - - lll: - # max line length, lines longer will be reported. Default is 120. - # '\t' is counted as 1 character by default, and can be changed with the tab-width option - line-length: 250 - # tab width in spaces. Default to 1. - tab-width: 4 - - makezero: - # Allow only slices initialized with a length of zero. Default is false. - always: false - + custom-order: true + sections: + - standard # Standard section: captures all standard packages. + - default # Default section: contains all imports that could not be matched to another section type. + - blank # blank imports + - dot # dot imports + - prefix(github.com/cometbft/cometbft) # comet + - prefix(github.com/cosmos) # cosmos org + - prefix(cosmossdk.io) # new modules + - prefix(github.com/cosmos/cosmos-sdk) # cosmos sdk + - prefix(github.com/onomyprotocol/onomy-rebuild) # Onomy + dogsled: + max-blank-identifiers: 3 maligned: # print struct with more effective memory layout or not, false by default suggest-new: true - - misspell: - # Correct spellings using locale preferences for US or UK. - # Default is to use a neutral variety of English. - # Setting locale to US will correct the British spelling of 'colour' to 'color'. - locale: US - ignore-words: - - someword - - nakedret: - # make an issue if func has more lines of code than this setting and it has naked returns; default is 30 - max-func-lines: 40 - - prealloc: - # XXX: we don't recommend using this linter before doing performance profiling. - # For most programs usage of prealloc will be a premature optimization. - - # Report preallocation suggestions only on simple loops that have no returns/breaks/continues/gotos in them. - # True by default. - simple: false - range-loops: false # Report preallocation suggestions on range loops, true by default - for-loops: false # Report preallocation suggestions on for loops, false by default - nolintlint: - # Enable to ensure that nolint directives are all used. Default is true. allow-unused: false - # Disable to ensure that nolint directives don't have a leading space. Default is true. allow-leading-space: true - # Exclude following linters from requiring an explanation. Default is []. - allow-no-explanation: [] - # Enable to require an explanation of nonzero length after each nolint directive. Default is false. require-explanation: false - # Enable to require nolint directives to mention the specific linter being suppressed. Default is false. - require-specific: true - - rowserrcheck: - packages: - - github.com/jmoiron/sqlx - - unparam: - # Inspect exported functions, default is false. Set to true if no external program/library imports your code. - # XXX: if you enable this setting, unparam will report a lot of false-positives in text editors: - # if it's called for subdir of a project it can't find external interfaces. All text editor integrations - # with golangci-lint call it on a directory with the changed file. - check-exported: true - - whitespace: - multi-if: false # Enforces newlines (or comments) after every multi-line if statement - multi-func: false # Enforces newlines (or comments) after every multi-line function signature - -linters: - enable-all: true - disable: - - exhaustivestruct - - prealloc - - paralleltest - - gas - - goerr113 - - goheader - - golint - - gomoddirectives - - gomodguard - - interfacer - - maligned - - nlreturn - - testpackage - - tparallel - - wrapcheck - - wsl - -issues: - exclude-use-default: false - # Maximum issues count per one linter. Set to 0 to disable. Default is 50. - max-issues-per-linter: 0 - # Maximum count of issues with the same text. Set to 0 to disable. Default is 3. - max-same-issues: 0 - exclude-rules: # Excluding configuration per-path, per-linter, per-text and per-source - - linters: - - govet - text: 'declaration of "err" shadows declaration at line' - - linters: # Exclude lll issues for long lines with go:generate - - lll - source: "^//go:generate " - # Exclude some linters from running on tests files. - - path: _test\.go - linters: - - noctx - - gocognit \ No newline at end of file + require-specific: false + revive: + ignore-generated-header: true + severity: warning + rules: + - name: unused-parameter + disabled: true + - name: blank-imports + - name: context-as-argument + - name: context-keys-type + - name: dot-imports + - name: error-return + - name: error-strings + - name: error-naming + - name: exported + - name: if-return + - name: increment-decrement + - name: var-naming + - name: var-declaration + - name: range + - name: receiver-naming + - name: time-naming + - name: unexported-return + - name: indent-error-flow + - name: errorf + - name: empty-block + - name: superfluous-else + - name: unreachable-code + - name: redefines-builtin-id diff --git a/.goreleaser.yml b/.goreleaser.yml new file mode 100644 index 00000000..ebab237b --- /dev/null +++ b/.goreleaser.yml @@ -0,0 +1,64 @@ +--- +project_name: onomy + +env: + - GO111MODULE=on + +builds: + - main: ./cmd/onomyd + id: "onomyd" + binary: onomyd + mod_timestamp: "{{ .CommitTimestamp }}" + flags: + - -tags=netgo ledger + - -trimpath + env: + - CGO_ENABLED=0 + ldflags: + # .Env.TM_VERSION is provided in the workflow runner environment -> see .github/workflows/release.yml + - -s -w -X main.commit={{.Commit}} -X main.date={{ .CommitDate }} -X github.com/cosmos/cosmos-sdk/version.Name=onomy -X github.com/cosmos/cosmos-sdk/version.AppName=onomyd -X github.com/cosmos/cosmos-sdk/version.Version=v{{ .Version }} -X github.com/cosmos/cosmos-sdk/version.Commit={{ .Commit }} -X github.com/cosmos/cosmos-sdk/version.BuildTags=netgo,ledger -X github.com/tendermint/tendermint/version.TMCoreSemVer={{ .Env.TM_VERSION }} + goos: + - darwin + - linux + - windows + goarch: + - amd64 + - arm64 + +archives: + # disables archiving; to enable use commented lines below + - format: binary + name_template: "{{ .Binary }}-v{{ .Version }}-{{ .Os }}-{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}" + + # - format: tar.gz + # wrap_in_directory: "true" + # format_overrides: + # - goos: windows + # format: zip + # name_template: "{{ .Binary }}-v{{ .Version }}-{{ .Os }}-{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}" + # files: + # - LICENSE + # - README.md + # rlcp: true + +release: + prerelease: true + name_template: "v{{.Version}}" + +checksum: + name_template: SHA256SUMS-v{{.Version}}.txt + algorithm: sha256 + +snapshot: + name_template: SNAPSHOT-{{ .Commit }} + +changelog: + skip: false + +git: + # What should be used to sort tags when gathering the current and previous + # tags if there are more than one tag in the same commit. + # + # source: https://goreleaser.com/customization/git/ + tag_sort: -version:refname + prerelease_suffix: "-rc" diff --git a/.mergify.yml b/.mergify.yml new file mode 100644 index 00000000..aac2b9a5 --- /dev/null +++ b/.mergify.yml @@ -0,0 +1,52 @@ +defaults: + actions: + backport: + assignees: + - "{{ author }}" + +queue_rules: + - name: default + conditions: + - "#approved-reviews-by>1" + +pull_request_rules: + - name: Automatic merge on approval to the main branch + conditions: + - "#approved-reviews-by>=1" + - base=main + - label=A:automerge + actions: + queue: + name: default + merge: + method: squash + commit_message_template: | + {{ title }} (#{{ number }}) + {{ body }} + + - name: Backport patches to the release/v12.x branch + conditions: + - base=main + - label=A:backport/v12.x + actions: + backport: + branches: + - release/v12.x + + - name: Backport patches to the release/v13.x branch + conditions: + - base=main + - label=A:backport/v13.x + actions: + backport: + branches: + - release/v13.x + + - name: Backport patches to the release/v14.1.x branch + conditions: + - base=main + - label=A:backport/v14.1.x + actions: + backport: + branches: + - release/v14.1.x \ No newline at end of file diff --git a/.rustfmt.toml b/.rustfmt.toml deleted file mode 100644 index 5420a403..00000000 --- a/.rustfmt.toml +++ /dev/null @@ -1,20 +0,0 @@ -# this was made for rustfmt 1.4.37-nightly (fdf65053 2021-09-07) - -condense_wildcard_suffixes = true # better -error_on_line_overflow = true # be more strict -error_on_unformatted = true # be more strict -format_code_in_doc_comments = true # needed -format_macro_bodies = true # changed -format_macro_matchers = true # changed -format_strings = true # better -ignore = [] -imports_granularity = "Crate" # changed -match_arm_blocks = true # needed -newline_style = "Unix" # prevents `\r\n` being accidentally introduced -overflow_delimited_expr = true # preferred -reorder_impl_items = true # better -group_imports = "StdExternalCrate" # promotes consistency -trailing_semicolon = false # the keyword is explicit enough -unstable_features = true -use_field_init_shorthand = true # better -wrap_comments = true # enforce the default comment_width diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..60460cbf --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,139 @@ +# CHANGELOG + +## v15.2.0 + +*March 29, 2024* + +### BUG FIXES + +- Increase x/gov metadata fields length to 10200 ([\#3025](https://github.com/onomyprotocol/onomy-rebuild/pull/3025)) +- Fix parsing of historic Txs with TxExtensionOptions ([\#3032](https://github.com/onomyprotocol/onomy-rebuild/pull/3032)) + +### STATE BREAKING + +- Increase x/gov metadata fields length to 10200 ([\#3025](https://github.com/onomyprotocol/onomy-rebuild/pull/3025)) +- Fix parsing of historic Txs with TxExtensionOptions ([\#3032](https://github.com/onomyprotocol/onomy-rebuild/pull/3032)) + +## v15.1.0 + +*March 15, 2024* + +### DEPENDENCIES + +- Bump [PFM](https://github.com/cosmos/ibc-apps/tree/main/middleware) to `v7.1.3-0.20240228213828-cce7f56d000b`. + ([\#2982](https://github.com/onomyprotocol/onomy-rebuild/pull/2982)) + +### FEATURES + +- Add onomyd snapshots command set ([\#2974](https://github.com/onomyprotocol/onomy-rebuild/pull/2974)) + +### STATE BREAKING + +- Bump [PFM](https://github.com/cosmos/ibc-apps/tree/main/middleware) to `v7.1.3-0.20240228213828-cce7f56d000b`. + ([\#2982](https://github.com/onomyprotocol/onomy-rebuild/pull/2982)) +- Mint and transfer missing assets in escrow accounts + to reach parity with counterparty chain supply. + ([\#2993](https://github.com/onomyprotocol/onomy-rebuild/pull/2993)) + +## v15.0.0 + +*February 20, 2024* + +### API BREAKING + +- Reject `MsgVote` messages from accounts with less than 1 atom staked. + ([\#2912](https://github.com/onomyprotocol/onomy-rebuild/pull/2912)) +- Bump [cosmos-sdk](https://github.com/cosmos/cosmos-sdk) to + [v0.47.10-ics-lsm](https://github.com/cosmos/cosmos-sdk/tree/v0.47.10-ics-lsm). + As compared to [v0.47.10](https://github.com/cosmos/cosmos-sdk/tree/v0.47.10), + this special branch of cosmos-sdk has the following API-breaking changes: + ([\#2967](https://github.com/onomyprotocol/onomy-rebuild/pull/2967)) + - Limit the accepted deposit coins for a proposal to the minimum proposal deposit denoms (e.g., `uatom` for Cosmos Hub). ([sdk-#19302](https://github.com/cosmos/cosmos-sdk/pull/19302)) + - Add denom check to reject denoms outside of those listed in `MinDeposit`. A new `MinDepositRatio` param is added (with a default value of `0.01`) and now deposits are required to be at least `MinDepositRatio*MinDeposit` to be accepted. ([sdk-#19312](https://github.com/cosmos/cosmos-sdk/pull/19312)) + - Disable the `DenomOwners` query. ([sdk-#19266](https://github.com/cosmos/cosmos-sdk/pull/19266)) +- The consumer CCV genesis state obtained from the provider chain needs to be + transformed to be compatible with older versions of consumer chains + (see [ICS docs](https://cosmos.github.io/interchain-security/consumer-development/consumer-genesis-transformation)). + ([\#2967](https://github.com/onomyprotocol/onomy-rebuild/pull/2967)) + +### BUG FIXES + +- Add ante handler that only allows `MsgVote` messages from accounts with at least + 1 atom staked. ([\#2912](https://github.com/onomyprotocol/onomy-rebuild/pull/2912)) +- Bump [cosmos-sdk](https://github.com/cosmos/cosmos-sdk) to + [v0.47.10-ics-lsm](https://github.com/cosmos/cosmos-sdk/tree/v0.47.10-ics-lsm). + This special branch of cosmos-sdk backports a series of fixes for issues found + during the [Oak Security audit of SDK 0.47](https://github.com/oak-security/audit-reports/blob/master/Cosmos%20SDK/2024-01-23%20Audit%20Report%20-%20Cosmos%20SDK%20v1.0.pdf). + ([\#2967](https://github.com/onomyprotocol/onomy-rebuild/pull/2967)) + - Backport [sdk-#18146](https://github.com/cosmos/cosmos-sdk/pull/18146): Add denom check to reject denoms outside of those listed in `MinDeposit`. A new `MinDepositRatio` param is added (with a default value of `0.01`) and now deposits are required to be at least `MinDepositRatio*MinDeposit` to be accepted. ([sdk-#19312](https://github.com/cosmos/cosmos-sdk/pull/19312)) + - Partially backport [sdk-#18047](https://github.com/cosmos/cosmos-sdk/pull/18047): Add a limit of 200 grants pruned per `EndBlock` in the feegrant module. ([sdk-#19314](https://github.com/cosmos/cosmos-sdk/pull/19314)) + - Partially backport [skd-#18737](https://github.com/cosmos/cosmos-sdk/pull/18737): Add a limit of 200 grants pruned per `BeginBlock` in the authz module. ([sdk-#19315](https://github.com/cosmos/cosmos-sdk/pull/19315)) + - Backport [sdk-#18173](https://github.com/cosmos/cosmos-sdk/pull/18173): Gov Hooks now returns error and are "blocking" if they fail. Expect for `AfterProposalFailedMinDeposit` and `AfterProposalVotingPeriodEnded` that will log the error and continue. ([sdk-#19305](https://github.com/cosmos/cosmos-sdk/pull/19305)) + - Backport [sdk-#18189](https://github.com/cosmos/cosmos-sdk/pull/18189): Limit the accepted deposit coins for a proposal to the minimum proposal deposit denoms. ([sdk-#19302](https://github.com/cosmos/cosmos-sdk/pull/19302)) + - Backport [sdk-#18214](https://github.com/cosmos/cosmos-sdk/pull/18214) and [sdk-#17352](https://github.com/cosmos/cosmos-sdk/pull/17352): Ensure that modifying the argument to `NewUIntFromBigInt` and `NewIntFromBigInt` doesn't mutate the returned value. ([sdk-#19293](https://github.com/cosmos/cosmos-sdk/pull/19293)) + + +### DEPENDENCIES + +- Bump [ibc-go](https://github.com/cosmos/ibc-go) to + [v7.3.1](https://github.com/cosmos/ibc-go/releases/tag/v7.3.1) + ([\#2852](https://github.com/onomyprotocol/onomy-rebuild/pull/2852)) +- Bump [PFM](https://github.com/cosmos/ibc-apps/tree/main/middleware) + to [v7.1.2](https://github.com/cosmos/ibc-apps/releases/tag/middleware%2Fpacket-forward-middleware%2Fv7.1.2) + ([\#2852](https://github.com/onomyprotocol/onomy-rebuild/pull/2852)) +- Bump [CometBFT](https://github.com/cometbft/cometbft) + to [v0.37.4](https://github.com/cometbft/cometbft/releases/tag/v0.37.4) + ([\#2852](https://github.com/onomyprotocol/onomy-rebuild/pull/2852)) +- Bump [cosmos-sdk](https://github.com/cosmos/cosmos-sdk) to + [v0.47.10-ics-lsm](https://github.com/cosmos/cosmos-sdk/tree/v0.47.10-ics-lsm). + This is a special cosmos-sdk branch with support for both ICS and LSM. + ([\#2967](https://github.com/onomyprotocol/onomy-rebuild/pull/2967)) +- Bump [ICS](https://github.com/cosmos/interchain-security) to + [v3.3.3-lsm](https://github.com/cosmos/interchain-security/releases/tag/v3.3.3-lsm) + ([\#2967](https://github.com/onomyprotocol/onomy-rebuild/pull/2967)) + +### FEATURES + +- Add support for metaprotocols using Tx extension options. + ([\#2960](https://github.com/onomyprotocol/onomy-rebuild/pull/2960)) + +### STATE BREAKING + +- Bump [ibc-go](https://github.com/cosmos/ibc-go) to + [v7.3.1](https://github.com/cosmos/ibc-go/releases/tag/v7.3.1) + ([\#2852](https://github.com/onomyprotocol/onomy-rebuild/pull/2852)) +- Bump [PFM](https://github.com/cosmos/ibc-apps/tree/main/middleware) + to [v7.1.2](https://github.com/cosmos/ibc-apps/releases/tag/middleware%2Fpacket-forward-middleware%2Fv7.1.2) + ([\#2852](https://github.com/onomyprotocol/onomy-rebuild/pull/2852)) +- Bump [CometBFT](https://github.com/cometbft/cometbft) + to [v0.37.4](https://github.com/cometbft/cometbft/releases/tag/v0.37.4) + ([\#2852](https://github.com/onomyprotocol/onomy-rebuild/pull/2852)) +- Set min commission rate staking parameter to `5%` + ([prop 826](https://www.mintscan.io/cosmos/proposals/826)) + and update the commission rate for all validators that have a commission + rate less than `5%`. ([\#2855](https://github.com/onomyprotocol/onomy-rebuild/pull/2855)) +- Migrate the signing infos of validators for which the consensus address is missing. +([\#2886](https://github.com/onomyprotocol/onomy-rebuild/pull/2886)) +- Migrate vesting funds from "cosmos145hytrc49m0hn6fphp8d5h4xspwkawcuzmx498" + to community pool according to signal prop [860](https://www.mintscan.io/cosmos/proposals/860). + ([\#2891](https://github.com/onomyprotocol/onomy-rebuild/pull/2891)) +- Add ante handler that only allows `MsgVote` messages from accounts with at least + 1 atom staked. ([\#2912](https://github.com/onomyprotocol/onomy-rebuild/pull/2912)) +- Remove `GovPreventSpamDecorator` and initialize the `MinInitialDepositRatio` gov + param to `10%`. + ([\#2913](https://github.com/onomyprotocol/onomy-rebuild/pull/2913)) +- Add support for metaprotocols using Tx extension options. + ([\#2960](https://github.com/onomyprotocol/onomy-rebuild/pull/2960)) +- Bump [cosmos-sdk](https://github.com/cosmos/cosmos-sdk) to + [v0.47.10-ics-lsm](https://github.com/cosmos/cosmos-sdk/tree/v0.47.10-ics-lsm). + This is a special cosmos-sdk branch with support for both ICS and LSM. + ([\#2967](https://github.com/onomyprotocol/onomy-rebuild/pull/2967)) + - Skip running `addDenomReverseIndex` in `bank/v3` migration as it is prohibitively expensive to run on the Cosmos Hub. ([sdk-#19266](https://github.com/cosmos/cosmos-sdk/pull/19266)) +- Bump [ICS](https://github.com/cosmos/interchain-security) to + [v3.3.3-lsm](https://github.com/cosmos/interchain-security/releases/tag/v3.3.3-lsm) + ([\#2967](https://github.com/onomyprotocol/onomy-rebuild/pull/2967)) + +## Previous Versions + +[CHANGELOG of previous versions](https://github.com/onomyprotocol/onomy-rebuild/blob/main/CHANGELOG.md) + diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..55fdbffa --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,331 @@ +# Contributing + +- [Contributing](#contributing) + - [Overview](#overview) + - [Responsibilities of the stewarding team](#responsibilities-of-the-stewarding-team) + - [Ease of reviewing](#ease-of-reviewing) + - [Workflow](#workflow) + - [Project Board](#project-board) + - [Architecture Decision Records (ADR)](#architecture-decision-records-adr) + - [Development Procedure](#development-procedure) + - [Testing](#testing) + - [Pull Requests](#pull-requests) + - [Pull Request Templates](#pull-request-templates) + - [Requesting Reviews](#requesting-reviews) + - [Updating Documentation](#updating-documentation) + - [Changelog](#changelog) + - [Dependencies](#dependencies) + - [Protobuf](#protobuf) + - [Branching Model and Release](#branching-model-and-release) + - [PR Targeting](#pr-targeting) + +Thank you for considering making contributions to Onomy! 🎉👍 + +## Overview + +Contributing to this repo can mean many things such as participating in +discussion or proposing code changes. +Following the processes outlined in this document will lead to the best +chance of getting changes merged into the codebase. + +### Responsibilities of the stewarding team + +Onomy has many stakeholders contributing and shaping the project. +The _Onomy stewarding team_ is composed of Informal Systems developers and +is responsible for stewarding this project over time. +This means that the stewarding team needs to understand the nature of, +and agree to maintain, all of the changes that land on `main` or a backport branch. +It may cost a few days/weeks' worth of time to _submit_ a particular change, +but _maintaining_ that change over the years has a much higher cost that the stewarding team will bear. + +### Ease of reviewing + + The fact that the stewarding team needs to be able to deeply understand the short-, + medium- and long-term consequences of incoming changes means that changes need + to be **easy to review**. + + What makes a change easy to review, and more likely to land in an upcoming + release? + + 1. **Each pull request must do _one thing_**. It must be very clear what that + one thing is when looking at the pull request title, description, and linked + issues. It must also be very clear what value it ultimately aims to deliver, + and for which user(s). A single pull request that does multiple things, or + without a clear articulation of the problem it attempts to solve, may be + rejected immediately. + + 2. **Each pull request must be manageable in size**. + Self-contained pull requests that are manageable in size may target `main` directly. + Larger contributions though must be structured as a series of smaller pull requests + each building upon the previous one, all ideally tracked in a tracking issue + (i.e., [an EPIC](#project-board)). + These pull requests must target a long-lived feature branch. + For details, see the [development procedure guidelines](#development-procedure). + Poorly structured pull requests may be rejected immediately with a + request to restructure them. + + **Note**: This does not necessarily apply to documentation-related changes or + automatically generated code (e.g. generated from Protobuf definitions). But + automatically generated code changes should occur within separate commits, so + they are easily distinguishable from manual code changes. + +### Workflow + +To ensure a smooth workflow for all contributors, a general procedure for contributing has been established. + +1. Start by browsing [existing issues](https://github.com/onomyprotocol/onomy-rebuild/issues) and [discussions](https://github.com/onomyprotocol/onomy-rebuild/discussions). If you are looking for something interesting or if you have something in your mind, there is a chance it had been discussed. + * Looking for a good place to start contributing? How about checking out some [good first issues](https://github.com/onomyprotocol/onomy-rebuild/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) or [bugs](https://github.com/onomyprotocol/onomy-rebuild/issues?q=is%3Aopen+is%3Aissue+label%3Abug)? +2. Determine whether a GitHub issue or discussion is more appropriate for your needs: + 1. If you want to propose something new that requires specification or an additional design, or you would like to change a process, start with a [new discussion](https://github.com/onomyprotocol/onomy-rebuild/discussions/new/choose). With discussions, we can better handle the design process using discussion threads. A discussion usually leads to one or more issues. + 2. If the issue you want addressed is a specific proposal or a bug, then open a [new issue](https://github.com/onomyprotocol/onomy-rebuild/issues/new/choose). + 3. Review existing [issues](https://github.com/onomyprotocol/onomy-rebuild/issues) to find an issue you'd like to help with. +3. Participate in thoughtful discussion on that issue. +4. If you would like to contribute: + 1. Ensure that the proposal has been accepted. + 2. Ensure that nobody else has already begun working on this issue. If they have, + make sure to contact them to collaborate. + 3. If nobody has been assigned for the issue and you would like to work on it, + make a comment on the issue to inform the community of your intentions + to begin work and please wait for an acknowledgement from the stewarding team. +5. To submit your work as a contribution to the repository, follow standard GitHub best practices. + See [development procedure guidelines](#development-procedure) below. + +**Note:** For very small or trivial issues such as typos, you are not required to open an issue before submitting a PR. +For complex problems or features, please make sure to open an issue and provide context and problem description. +PRs opened before adequate design discussion has taken place in a GitHub issue have a high likelihood of being rejected without review. + +## Project Board + +We use self-organizing principles to coordinate and collaborate across organizations in structured "EPICs" that focus on specific problem domains or architectural components of Onomy. For details, see the [GitHub Project board](https://github.com/orgs/cosmos/projects/28/views/11). + +The developers work in sprints, which are available in a [GitHub Project](https://github.com/orgs/cosmos/projects/28/views/2). + +## Architecture Decision Records (ADR) + +When proposing an architecture decision for Onomy, please start by opening an [issue](https://github.com/onomyprotocol/onomy-rebuild/issues/new/choose) or a [discussion](https://github.com/onomyprotocol/onomy-rebuild/discussions/new) with a summary of the proposal. Once the proposal has been discussed and there is rough alignment on a high-level approach to the design, you may either start development, or write an ADR. + +If your architecture decision is a simple change, you may contribute directly without writing an ADR. However, if you are proposing a significant change, please include a corresponding ADR. + +To create an ADR, follow the [template](./docs/architecture/adr-template.md) and [doc](./docs/architecture/README.md). If you would like to see examples of how these are written, please refer to the current [ADRs](https://github.com/onomyprotocol/onomy-rebuild/tree/main/docs/architecture). + +## Development Procedure + +`main` must be stable, include only completed features and never fail `make lint`, `make run-tests`, or `make build/install`. + +Depending on the scope of the work, we differentiate between self-contained pull requests and long-lived contributions (features). + +**Self-contained pull requests**: + +* Fork the repo (core developers must create a branch directly in the Onomy repo), +branch from the HEAD of `main`, make some commits, and submit a PR to `main`. +* For developers who are core contributors and are working within the `onomy` repo, follow branch name conventions to ensure clear +ownership of branches: `{moniker}/{issue#}-branch-name`. +* See [Branching Model](#branching-model-and-release) for more details. + +**Large contributions**: + +* Make sure that a feature branch is created in the repo. + This will be created by the stewarding team after design discussions. + The name convention for the feature branch must be `feat/{issue#}-branch-name`. + Note that (similar to `main`) all feature branches have branch protection rules and they run the CI. + Unlike `main`, feature branch may intermittently fail `make lint`, `make run-tests`, or `make build/install`. +* Fork the repo (core developers must create a branch directly in the Onomy repo), + branch from the HEAD of the feature branch, make some commits, and submit a PR to the feature branch. + All PRs targeting a feature branch should follow the same guidelines in this document. +* Once the feature is completed, submit a PR from the feature branch targeting `main`. + +Be sure to run `make format` before every commit. The easiest way +to do this is have your editor run it for you upon saving a file (most of the editors +will do it anyway using a pre-configured setup of the programming language mode). +A convenience git `pre-commit` hook that runs the formatters automatically +before each commit is available in the `contrib/githooks/` directory. + +**Note:** Exceptions to the above guidelines are possible, but only after prior discussions with the stewarding team. + +### Testing + +Tests can be executed by running `make run-tests` at the top level of the Onomy repository. +For running the e2e tests, make sure to build the docker images by running `make docker-build-all`. + +When testing a function under a variety of different inputs, we prefer to use +[table driven tests](https://github.com/golang/go/wiki/TableDrivenTests). +Table driven test error messages should follow the following format +`, tc #, i #`. +`` is an optional short description of whats failing, `tc` is the +index within the table of the testcase that is failing, and `i` is when there +is a loop, exactly which iteration of the loop failed. +The idea is you should be able to see the +error message and figure out exactly what failed. +Here is an example check: + +```go + +for tcIndex, tc := range cases { + +for i := 0; i < tc.numTxsToTest; i++ { + +require.Equal(t, expectedTx[:32], calculatedTx[:32], +"First 32 bytes of the txs differed. tc #%d, i #%d", tcIndex, i) +``` + +### Pull Requests + +Before submitting a pull request: + +* synchronize your branch with the latest base branch (i.e., `main` or feature branch) and resolve any arising conflicts, e.g., + - either `git fetch origin/main && git merge origin/main` + - or `git fetch origin/main && git rebase -i origin/main` +* run `make lint`, `make run-tests`, `make build/install` to ensure that all checks and tests pass. + +Then: + +1. If you have something to show, **start with a `Draft` PR**. It's good to have early validation of your work and we highly recommend this practice. A Draft PR also indicates to the community that the work is in progress. + Draft PRs also help the stewarding team provide early feedback and ensure the work is in the right direction. +2. When the code is complete, change your PR from `Draft` to `Ready for Review`. +3. Go through the actions for each checkbox present in the PR template description. The PR actions are automatically provided for each new PR. + +PRs must have a category prefix that is based on the type of changes being made (for example, `fix`, `feat`, +`refactor`, `docs`, and so on). The [type](https://github.com/commitizen/conventional-commit-types/blob/v3.0.0/index.json) +must be included in the PR title as a prefix (for example, `fix: `). +This convention ensures that all changes that are committed to the base branch follow the +[Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) specification. +Additionally, **each PR should only address a single issue**. + +Pull requests are merged automatically using [`A:automerge` action](https://mergify.io/features/auto-merge). + +**Note:** When merging, GitHub will squash commits and rebase on top of the base branch. + +### Pull Request Templates + +There are three PR templates. The [default template](./.github/PULL_REQUEST_TEMPLATE.md) contains links to the three templates. Please go to the `Preview` tab and select the appropriate sub-template: + +- The [production template](./.github/PULL_REQUEST_TEMPLATE/production.md) is for types `fix`, `feat`, `deps`, and `refactor`. +- The [docs template](./.github/PULL_REQUEST_TEMPLATE/docs.md) is for documentation changes. +- The [other template](./.github/PULL_REQUEST_TEMPLATE/other.md) is for changes that do not affect production code. + +### Requesting Reviews + +In order to accommodate the review process, the author of the PR must complete the author checklist +(from the pull request template) +to the best of their abilities before marking the PR as "Ready for Review". If you would like to +receive early feedback on the PR, open the PR as a "Draft" and leave a comment in the PR indicating +that you would like early feedback and tagging whoever you would like to receive feedback from. + +Codeowners are marked automatically as the reviewers. + +All PRs require at least two review approvals before they can be merged (one review might be acceptable in +the case of minor changes to [docs](./.github/PULL_REQUEST_TEMPLATE/docs.md) or [other](./.github/PULL_REQUEST_TEMPLATE/other.md) changes that do not affect production code). Each PR template has a reviewers checklist that must be completed before the PR can be merged. Each reviewer is responsible +for all checked items unless they have indicated otherwise by leaving their handle next to specific +items. In addition, use the following review explanations: + +* `LGTM` without an explicit approval means that the changes look good, but you haven't thoroughly reviewed the reviewer checklist items. +* `Approval` means that you have completed some or all of the reviewer checklist items. If you only reviewed selected items, you must add your handle next to the items that you have reviewed. In addition, follow these guidelines: + * You must also think through anything which ought to be included but is not + * You must think through whether any added code could be partially combined (DRYed) with existing code + * You must think through any potential security issues or incentive-compatibility flaws introduced by the changes + * Naming must be consistent with conventions and the rest of the codebase + * Code must live in a reasonable location, considering dependency structures (for example, not importing testing modules in production code, or including example code modules in production code). + * If you approve the PR, you are responsible for any issues mentioned here and any issues that should have been addressed after thoroughly reviewing the reviewer checklist items in the pull request template. +* If you sat down with the PR submitter and did a pairing review, add this information in the `Approval` or your PR comments. +* If you are only making "surface level" reviews, submit notes as a `comment` review. + +### Updating Documentation + +If you open a PR in Onomy, it is mandatory to update the relevant documentation in `/docs`. + +### Changelog + +To manage and generate our changelog, we currently use [unclog](https://github.com/informalsystems/unclog). + +Every PR with types `fix`, `feat`, `deps`, and `refactor` should include a file +`.changelog/unreleased/${section}/[${component}/]${pr-number}-${short-description}.md`, +where: + +- `section` is one of + `dependencies`, `improvements`, `features`, `bug-fixes`, `state-breaking`, `api-breaking`, + and _**if multiple apply, create multiple files**_, + not necessarily with the same `short-description` or content; +- `pr-number` is the PR number; +- `short-description` is a short (4 to 6 word), hyphen separated description of the change; +- `component` is used for changes that affect one of the components defined in the [config](.changelog/config.toml), e.g., `tests`, `globalfee`. + +For examples, see the [.changelog](.changelog) folder. + +Use `unclog` to add a changelog entry in `.changelog` (check the [requirements](https://github.com/informalsystems/unclog#requirements) first): +```bash +# add a general entry +unclog add + -i "${pr-number}-${short-description}" + -p "${pr-number}" + -s "${section}" + -m "${description}" + +# add a entry to a component +unclog add + -i "${pr-number}-${short-description}" + -p "${pr-number}" + -c "${component}" + -s "${section}" + -m "${description}" +``` +where `${description}` is a detailed description of the changelog entry. + +For example, +```bash +# add an entry for bumping IBC to v4.4.2 +unclog add -i "2554-bump-ibc" -p 2554 -s dependencies -m "Bump [ibc-go](https://github.com/cosmos/ibc-go) to [v4.4.2](https://github.com/cosmos/ibc-go/releases/tag/v4.4.2)" + +# add an entry for changing the global fee module; +# note that the entry is added to both state-breaking and api-breaking sections +unclog add -i "2424-params" -p 2424 -c globalfee -s state-breaking -m "Add \`bypass-min-fee-msg-types\` and \`maxTotalBypassMinFeeMsgGagUsage\` to globalfee params" +unclog add -i "2424-params" -p 2424 -c globalfee -s api-breaking -m "Add \`bypass-min-fee-msg-types\` and \`maxTotalBypassMinFeeMsgGagUsage\` to globalfee params" +``` + +**Note:** `unclog add` requires an editor. This can be set either by configuring +an `$EDITOR` environment variable or by manually specify an editor binary path +via the `--editor` flag. + +**Note:** Changelog entries should answer the question: "what is important about this +change for users to know?" or "what problem does this solve for users?". It +should not simply be a reiteration of the title of the associated PR, unless the +title of the PR _very_ clearly explains the benefit of a change to a user. + +## Dependencies + +We use [Go Modules](https://github.com/golang/go/wiki/Modules) to manage +dependency versions. + +The main branch of every Cosmos repository should just build with `go get`, +which means they should be kept up-to-date with their dependencies so we can +get away with telling people they can just `go get` our software. + +When dependencies in Onomy's `go.mod` are changed, it is generally accepted practice +to delete `go.sum` and then run `go mod tidy`. + +Since some dependencies are not under our control, a third party may break our +build, in which case we can fall back on `go mod tidy -v`. + +## Protobuf + +We use [Protocol Buffers](https://developers.google.com/protocol-buffers) along with [gogoproto](https://github.com/cosmos/gogoproto) to generate code for use in Onomy. + +For deterministic behavior around Protobuf tooling, everything is containerized using Docker. Make sure to have Docker installed on your machine, or head to [Docker's website](https://docs.docker.com/get-docker/) to install it. + +To generate the protobuf stubs, you can run `make proto-gen`. + +## Branching Model and Release + +User-facing repos should adhere to the trunk based development branching model: https://trunkbaseddevelopment.com. User branches should start with a user name, example: `{moniker}/{issue#}-branch-name`. + +Onomy follows [semantic versioning](https://semver.org), but with the some deviations to account for state-machine and API breaking changes. See [RELEASE_PROCESS.md](./RELEASE_PROCESS.md) for details. + +### PR Targeting + +Ensure that you base and target your PRs on either `main` or a feature branch. + +All complete features and bug fixes must be targeted against `main`. +Exception is for bug fixes which are only related to a released version. +In that case, the related bug fix PRs must target against the release branch. + +If needed, we will backport a commit from `main` to a release branch with appropriate consideration of versioning. + diff --git a/Cargo.toml b/Cargo.toml deleted file mode 100644 index 61866a80..00000000 --- a/Cargo.toml +++ /dev/null @@ -1,7 +0,0 @@ -[workspace] -resolver = "2" -members = [ - "tests", -] - -[patch.crates-io] diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..7bf03481 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,20 @@ +ARG IMG_TAG=latest + +# Compile the onomyd binary +FROM golang:1.21-alpine AS onomyd-builder +WORKDIR /src/app/ +COPY go.mod go.sum* ./ +RUN go mod download +COPY . . +ENV PACKAGES curl make git libc-dev bash gcc linux-headers eudev-dev python3 +RUN apk add --no-cache $PACKAGES +RUN CGO_ENABLED=0 make install + +# Add to a distroless container +FROM cgr.dev/chainguard/static:$IMG_TAG +ARG IMG_TAG +COPY --from=onomyd-builder /go/bin/onomyd /usr/local/bin/ +EXPOSE 26656 26657 1317 9090 +USER 0 + +ENTRYPOINT ["onomyd", "start"] \ No newline at end of file diff --git a/LICENSE b/LICENSE index 0ad25db4..293f9fe8 100644 --- a/LICENSE +++ b/LICENSE @@ -1,661 +1,201 @@ - GNU AFFERO GENERAL PUBLIC LICENSE - Version 3, 19 November 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU Affero General Public License is a free, copyleft license for -software and other kinds of works, specifically designed to ensure -cooperation with the community in the case of network server software. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -our General Public Licenses are intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - Developers that use our General Public Licenses protect your rights -with two steps: (1) assert copyright on the software, and (2) offer -you this License which gives you legal permission to copy, distribute -and/or modify the software. - - A secondary benefit of defending all users' freedom is that -improvements made in alternate versions of the program, if they -receive widespread use, become available for other developers to -incorporate. Many developers of free software are heartened and -encouraged by the resulting cooperation. However, in the case of -software used on network servers, this result may fail to come about. -The GNU General Public License permits making a modified version and -letting the public access it on a server without ever releasing its -source code to the public. - - The GNU Affero General Public License is designed specifically to -ensure that, in such cases, the modified source code becomes available -to the community. It requires the operator of a network server to -provide the source code of the modified version running there to the -users of that server. Therefore, public use of a modified version, on -a publicly accessible server, gives the public access to the source -code of the modified version. - - An older license, called the Affero General Public License and -published by Affero, was designed to accomplish similar goals. This is -a different license, not a version of the Affero GPL, but Affero has -released a new version of the Affero GPL which permits relicensing under -this license. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU Affero General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Remote Network Interaction; Use with the GNU General Public License. - - Notwithstanding any other provision of this License, if you modify the -Program, your modified version must prominently offer all users -interacting with it remotely through a computer network (if your version -supports such interaction) an opportunity to receive the Corresponding -Source of your version by providing access to the Corresponding Source -from a network server at no charge, through some standard or customary -means of facilitating copying of software. This Corresponding Source -shall include the Corresponding Source for any work covered by version 3 -of the GNU General Public License that is incorporated pursuant to the -following paragraph. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the work with which it is combined will remain governed by version -3 of the GNU General Public License. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU Affero General Public License from time to time. Such new versions -will be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU Affero General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU Affero General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU Affero General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU Affero General Public License as published - by the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU Affero General Public License for more details. - - You should have received a copy of the GNU Affero General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If your software can interact with users remotely through a computer -network, you should also make sure that it provides a way for users to -get its source. For example, if your program is a web application, its -interface could display a "Source" link that leads users to an archive -of the code. There are many ways you could offer source, and different -solutions will be better for different programs; see section 13 for the -specific requirements. - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU AGPL, see -. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2016 the Onomy authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/Makefile b/Makefile index 1500727e..d51eb430 100644 --- a/Makefile +++ b/Makefile @@ -1,8 +1,31 @@ -PACKAGES=$(shell go list ./... | grep -v '/simulation') -VERSION := $(shell git describe --abbrev=6 --dirty --always --tags) +#!/usr/bin/make -f + +BRANCH := $(shell git rev-parse --abbrev-ref HEAD) COMMIT := $(shell git log -1 --format='%H') -IMPORT_PREFIX=github.com/onomyprotocol -SCAN_FILES := $(shell find . -type f -name '*.go' -not -name '*mock.go' -not -name '*_gen.go' -not -path "*/vendor/*") + +# don't override user values +ifeq (,$(VERSION)) + VERSION := $(shell git describe --exact-match 2>/dev/null) + # if VERSION is empty, then populate it with branch's name and raw commit hash + ifeq (,$(VERSION)) + VERSION := $(BRANCH)-$(COMMIT) + endif +endif + +PACKAGES_SIMTEST=$(shell go list ./... | grep '/simulation') +LEDGER_ENABLED ?= true +SDK_PACK := $(shell go list -m github.com/cosmos/cosmos-sdk | sed 's/ /\@/g') +TM_VERSION := $(shell go list -m github.com/cometbft/cometbft | sed 's:.* ::') # grab everything after the space in "github.com/cometbft/cometbft v0.34.7" +DOCKER := $(shell which docker) +BUILDDIR ?= $(CURDIR)/build +TEST_DOCKER_REPO=cosmos/contrib-onomytest + +GO_SYSTEM_VERSION = $(shell go version | cut -c 14- | cut -d' ' -f1 | cut -d'.' -f1-2) +REQUIRE_GO_VERSION = 1.21 + +export GO111MODULE = on + +# process build tags build_tags = netgo ifeq ($(LEDGER_ENABLED),true) @@ -28,110 +51,266 @@ ifeq ($(LEDGER_ENABLED),true) endif endif -ifeq (cleveldb,$(findstring cleveldb,$(ONOMY_BUILD_OPTIONS))) - build_tags += gcc +ifeq (cleveldb,$(findstring cleveldb,$(GAIA_BUILD_OPTIONS))) + build_tags += gcc cleveldb endif build_tags += $(BUILD_TAGS) build_tags := $(strip $(build_tags)) whitespace := -whitespace += $(whitespace) +whitespace := $(whitespace) $(whitespace) comma := , build_tags_comma_sep := $(subst $(whitespace),$(comma),$(build_tags)) +# process linker flags + ldflags = -X github.com/cosmos/cosmos-sdk/version.Name=onomy \ - -X github.com/cosmos/cosmos-sdk/version.AppName=onomy \ - -X github.com/cosmos/cosmos-sdk/version.Version=$(VERSION) \ - -X github.com/cosmos/cosmos-sdk/version.Commit=$(COMMIT) \ - -X "github.com/cosmos/cosmos-sdk/version.BuildTags=$(build_tags_comma_sep)" + -X github.com/cosmos/cosmos-sdk/version.AppName=onomyd \ + -X github.com/cosmos/cosmos-sdk/version.Version=$(VERSION) \ + -X github.com/cosmos/cosmos-sdk/version.Commit=$(COMMIT) \ + -X "github.com/cosmos/cosmos-sdk/version.BuildTags=$(build_tags_comma_sep)" \ + -X github.com/cometbft/cometbft/version.TMCoreSemVer=$(TM_VERSION) + +ifeq (cleveldb,$(findstring cleveldb,$(GAIA_BUILD_OPTIONS))) + ldflags += -X github.com/cosmos/cosmos-sdk/types.DBBackend=cleveldb +endif +ifeq (,$(findstring nostrip,$(GAIA_BUILD_OPTIONS))) + ldflags += -w -s +endif ldflags += $(LDFLAGS) ldflags := $(strip $(ldflags)) -BUILD_FLAGS := -ldflags '$(ldflags)' -gcflags="all=-N -l" -.PHONY: all -all: lint proto-lint test install +BUILD_FLAGS := -tags "$(build_tags)" -ldflags '$(ldflags)' +# check for nostrip option +ifeq (,$(findstring nostrip,$(GAIA_BUILD_OPTIONS))) + BUILD_FLAGS += -trimpath +endif + +#$(info $$BUILD_FLAGS is [$(BUILD_FLAGS)]) -.PHONY: build -build: go.sum - go build $(BUILD_FLAGS) ./cmd/onomyd +# The below include contains the tools target. +include contrib/devtools/Makefile -.PHONY: install -install: go.sum - go install $(BUILD_FLAGS) ./cmd/onomyd +############################################################################### +### Build ### +############################################################################### + +check_version: +ifneq ($(GO_SYSTEM_VERSION), $(REQUIRE_GO_VERSION)) + @echo "ERROR: Go version 1.21 is required for $(VERSION) of Onomy." +endif + +all: install lint run-tests test-e2e vulncheck + +BUILD_TARGETS := build install + +build: BUILD_ARGS=-o $(BUILDDIR)/ + +$(BUILD_TARGETS): check_version go.sum $(BUILDDIR)/ + go $@ -mod=readonly $(BUILD_FLAGS) $(BUILD_ARGS) ./... + +$(BUILDDIR)/: + mkdir -p $(BUILDDIR)/ + +vulncheck: $(BUILDDIR)/ + GOBIN=$(BUILDDIR) go install golang.org/x/vuln/cmd/govulncheck@latest + $(BUILDDIR)/govulncheck ./... + +build-linux: go.sum + LEDGER_ENABLED=false GOOS=linux GOARCH=amd64 $(MAKE) build + +go-mod-cache: go.sum + @echo "--> Download go modules to local cache" + @go mod download -.PHONY: go.sum go.sum: go.mod - @echo "--> Ensure dependencies have not been modified" - GO111MODULE=on go mod verify + @echo "--> Ensure dependencies have not been modified" + @go mod verify -.PHONY: test -test: - @go test -mod=readonly $(PACKAGES) +draw-deps: + @# requires brew install graphviz or apt-get install graphviz + go install github.com/RobotsAndPencils/goviz + @goviz -i ./cmd/onomyd -d 2 | dot -Tpng -o dependency-graph.png -# ALCHEMY_KEY env variable is required for the tests execution -.PHONY: test-integration -test-integration: - @go test -v ./tests/... -tags=integration +clean: + rm -rf $(BUILDDIR)/ artifacts/ -.PHONY: build-load-test -build-load-test: - go build -tags tmload -o build/onomy-load-test ./tests/tm-load-test/onomy-load-test/ +distclean: clean + rm -rf vendor/ + +############################################################################### +### Release ### +############################################################################### + +# create tag and run goreleaser without publishing +create-release-dry-run: +ifneq ($(strip $(TAG)),) + @echo "--> Dry running release for tag: $(TAG)" + @echo "--> Create tag: $(TAG) dry run" + git tag -s $(TAG) -m $(TAG) + git push origin $(TAG) --dry-run + @echo "--> Delete local tag: $(TAG)" + @git tag -d $(TAG) + @echo "--> Running goreleaser" + @go install github.com/goreleaser/goreleaser@latest + TM_VERSION=$(TM_VERSION) goreleaser release --snapshot --clean + @rm -rf dist/ + @echo "--> Done create-release-dry-run for tag: $(TAG)" +else + @echo "--> No tag specified, skipping tag release" +endif + +# create tag and publish it +create-release: +ifneq ($(strip $(TAG)),) + @echo "--> Running release for tag: $(TAG)" + @echo "--> Create release tag: $(TAG)" + git tag -s $(TAG) -m $(TAG) + git push origin $(TAG) + @echo "--> Done creating release tag: $(TAG)" +else + @echo "--> No tag specified, skipping create-release" +endif + +############################################################################### +### Documentation ### +############################################################################### + +build-docs: + @cd docs && ./build.sh + +.PHONY: build-docs + + +############################################################################### +### Tests & Simulation ### +############################################################################### + +include sims.mk + +PACKAGES_UNIT=$(shell go list ./... | grep -v -e '/tests/e2e') +PACKAGES_E2E=$(shell cd tests/e2e && go list ./... | grep '/e2e') +TEST_PACKAGES=./... +TEST_TARGETS := test-unit test-unit-cover test-race test-e2e + +test-unit: ARGS=-timeout=5m -tags='norace' +test-unit: TEST_PACKAGES=$(PACKAGES_UNIT) +test-unit-cover: ARGS=-timeout=5m -tags='norace' -coverprofile=coverage.txt -covermode=atomic +test-unit-cover: TEST_PACKAGES=$(PACKAGES_UNIT) +test-race: ARGS=-timeout=5m -race +test-race: TEST_PACKAGES=$(PACKAGES_UNIT) +test-e2e: ARGS=-timeout=25m -v +test-e2e: TEST_PACKAGES=$(PACKAGES_E2E) +$(TEST_TARGETS): run-tests + +run-tests: +ifneq (,$(shell which tparse 2>/dev/null)) + @echo "--> Running tests" + @go test -mod=readonly -json $(ARGS) $(TEST_PACKAGES) | tparse +else + @echo "--> Running tests" + @go test -mod=readonly $(ARGS) $(TEST_PACKAGES) +endif + +.PHONY: run-tests $(TEST_TARGETS) + +docker-build-debug: + @docker build -t onomyprotocol/onomy-rebuildd-e2e -f e2e.Dockerfile . + +# TODO: Push this to the Cosmos Dockerhub so we don't have to keep building it +# in CI. +docker-build-hermes: + @cd tests/e2e/docker; docker build -t ghcr.io/cosmos/hermes-e2e:1.0.0 -f hermes.Dockerfile . + +docker-build-all: docker-build-debug docker-build-hermes + +############################################################################### +### Linting ### +############################################################################### +golangci_lint_cmd=golangci-lint +golangci_version=v1.53.3 -.PHONY: lint lint: - golangci-lint -c .golangci.yml run - gofmt -d -s $(SCAN_FILES) + @echo "--> Running linter" + @go install github.com/golangci/golangci-lint/cmd/golangci-lint@$(golangci_version) + @$(golangci_lint_cmd) run --timeout=10m + +lint-fix: + @echo "--> Running linter" + @go install github.com/golangci/golangci-lint/cmd/golangci-lint@$(golangci_version) + @$(golangci_lint_cmd) run --fix --out-format=tab --issues-exit-code=0 -.PHONY: format format: - gofumpt -lang=1.6 -extra -s -w $(SCAN_FILES) - gogroup -order std,other,prefix=$(IMPORT_PREFIX) -rewrite $(SCAN_FILES) + @go install mvdan.cc/gofumpt@latest + @go install github.com/golangci/golangci-lint/cmd/golangci-lint@$(golangci_version) + find . -name '*.go' -type f -not -path "./vendor*" -not -path "*.git*" -not -path "./client/docs/statik/statik.go" -not -path "./tests/mocks/*" -not -name "*.pb.go" -not -name "*.pb.gw.go" -not -name "*.pulsar.go" -not -path "./crypto/keys/secp256k1/*" | xargs gofumpt -w -l + $(golangci_lint_cmd) run --fix +.PHONY: format ############################################################################### -### Protobuf ### +### Localnet ### ############################################################################### -.PHONY: proto-gen-all -proto-gen-all: proto-gen-go proto-gen-openapi +start-localnet-ci: build + rm -rf ~/.onomyd-liveness + ./build/onomyd init liveness --chain-id liveness --home ~/.onomyd-liveness + ./build/onomyd config chain-id liveness --home ~/.onomyd-liveness + ./build/onomyd config keyring-backend test --home ~/.onomyd-liveness + ./build/onomyd keys add val --home ~/.onomyd-liveness + ./build/onomyd genesis add-genesis-account val 10000000000000000000000000stake --home ~/.onomyd-liveness --keyring-backend test + ./build/onomyd genesis gentx val 1000000000stake --home ~/.onomyd-liveness --chain-id liveness + ./build/onomyd genesis collect-gentxs --home ~/.onomyd-liveness + sed -i.bak'' 's/minimum-gas-prices = ""/minimum-gas-prices = "0uatom"/' ~/.onomyd-liveness/config/app.toml + ./build/onomyd start --home ~/.onomyd-liveness --x-crisis-skip-assert-invariants -.PHONY: proto-gen-openapi -proto-gen-openapi: - bash ./dev/scripts/protoc-swagger-gen.sh +.PHONY: start-localnet-ci -.PHONY: proto-gen-go -proto-gen-go: - bash ./dev/scripts/protocgen.sh - go mod tidy - make format +############################################################################### +### Docker ### +############################################################################### + +test-docker: + @docker build -f contrib/Dockerfile.test -t ${TEST_DOCKER_REPO}:$(shell git rev-parse --short HEAD) . + @docker tag ${TEST_DOCKER_REPO}:$(shell git rev-parse --short HEAD) ${TEST_DOCKER_REPO}:$(shell git rev-parse --abbrev-ref HEAD | sed 's#/#_#g') + @docker tag ${TEST_DOCKER_REPO}:$(shell git rev-parse --short HEAD) ${TEST_DOCKER_REPO}:latest + +test-docker-push: test-docker + @docker push ${TEST_DOCKER_REPO}:$(shell git rev-parse --short HEAD) + @docker push ${TEST_DOCKER_REPO}:$(shell git rev-parse --abbrev-ref HEAD | sed 's#/#_#g') + @docker push ${TEST_DOCKER_REPO}:latest + +.PHONY: all build-linux install format lint go-mod-cache draw-deps clean build \ + docker-build-debug docker-build-hermes docker-build-all -.PHONY: proto-lint -proto-lint: - buf lint proto --config buf.yaml ############################################################################### -### Docker wrapped commands ### +### Protobuf ### ############################################################################### +protoVer=0.13.0 +protoImageName=ghcr.io/cosmos/proto-builder:$(protoVer) +protoImage=$(DOCKER) run --rm -v $(CURDIR):/workspace --workdir /workspace $(protoImageName) -.PHONY: in-docker -in-docker: - docker build -t onomy-dev-utils ./dev/tools -f dev/tools/devtools.Dockerfile - docker run -i --rm \ - -v ${PWD}:/go/src/github.com/onomyprotocol/onomy:delegated \ - --mount source=dev-tools-cache,destination=/cache/,consistency=delegated onomy-dev-utils bash -x -c "\ - $(ARGS)" +proto-all: proto-format proto-lint proto-gen -.PHONY: lint-in-docker -lint-in-docker: - make in-docker ARGS="make lint" +proto-gen: + @echo "Generating Protobuf files" + @$(protoImage) sh ./proto/scripts/protocgen.sh + +proto-swagger-gen: + @echo "Generating Protobuf Swagger" + @$(protoImage) sh ./proto/scripts/protoc-swagger-gen.sh + +proto-format: + @$(protoImage) find ./ -name "*.proto" -exec clang-format -i {} \; + +proto-lint: + @$(protoImage) buf lint --error-format=json -.PHONY: format-in-docker -format-in-docker: - make in-docker ARGS="make format" +proto-check-breaking: + @$(protoImage) buf breaking --against $(HTTPS_GIT)#branch=main -.PHONY: all-in-docker -all-in-docker: - make in-docker ARGS="make all" +proto-update-deps: + @echo "Updating Protobuf dependencies" + $(DOCKER) run --rm -v $(CURDIR)/proto:/workspace --workdir /workspace $(protoImageName) buf mod update -.PHONY: proto-gen-all-in-docker -proto-gen-all-in-docker: - make in-docker ARGS="make proto-gen-all" \ No newline at end of file +.PHONY: proto-all proto-gen proto-swagger-gen proto-format proto-lint proto-check-breaking proto-update-deps diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md new file mode 100644 index 00000000..ebaa4603 --- /dev/null +++ b/RELEASE_NOTES.md @@ -0,0 +1,40 @@ +# Onomy v15.2.0 Release Notes + +***This is a special point release in the v15 release series.*** + +## 🕐 Timeline + +**This is a mandatory upgrade for all validators and full node operators.** +The upgrade height is [19939000](https://www.mintscan.io/cosmos/block/19939000), which is approx. April 10th 2024, 15:00 CET. + +## 📝 Changelog + +Check out the [changelog](https://github.com/onomyprotocol/onomy-rebuild/blob/v15.2.0/CHANGELOG.md) for a list of relevant changes or [compare all changes](https://github.com/onomyprotocol/onomy-rebuild/compare/v15.1.0...v15.2.0) from last release. + + +Refer to the [upgrading guide](https://github.com/onomyprotocol/onomy-rebuild/blob/release/v15.2.x/UPGRADING.md) when migrating from `v15.1.x` to `v15.2.x`. + +## 🚀 Highlights + + + +This release fixes two issues identified after the v15 upgrade: + +- Increases x/gov metadata fields length to 10200. +- Fixes parsing of historic Txs with TxExtensionOptions. + +As both fixes are state breaking, a coordinated upgrade is necessary. + +## 🔨 Build from source + +You must use Golang `v1.21` if building from source. + +```bash +git clone https://github.com/onomyprotocol/onomy-rebuild +cd onomy && git checkout v15.2.0 +make install +``` + +## ⚡️ Download binaries + +Binaries for linux, darwin, and windows are available below. \ No newline at end of file diff --git a/RELEASE_PROCESS.md b/RELEASE_PROCESS.md new file mode 100644 index 00000000..f0228aaa --- /dev/null +++ b/RELEASE_PROCESS.md @@ -0,0 +1,251 @@ +# Release Process + +- [Release Process](#release-process) + - [Breaking Changes](#breaking-changes) + - [Major Release Procedure](#major-release-procedure) + - [Changelog](#changelog) + - [Creating a new release branch](#creating-a-new-release-branch) + - [Cutting a new release](#cutting-a-new-release) + - [Update the changelog on main](#update-the-changelog-on-main) + - [Release Notes](#release-notes) + - [Tagging Procedure](#tagging-procedure) + - [Test building artifacts](#test-building-artifacts) + - [Installing goreleaser](#installing-goreleaser) + - [Non-major Release Procedure](#non-major-release-procedure) + - [Major Release Maintenance](#major-release-maintenance) + - [Stable Release Policy](#stable-release-policy) + + +This document outlines the release process for Cosmos Hub (Onomy). + +Onomy follows [semantic versioning](https://semver.org), but with the following deviations to account for state-machine and API breaking changes: + +- State-machine breaking changes will result in an increase of the major version X (X.y.z). +- Emergency releases & API breaking changes will result in an increase of the minor version Y (x.Y.z | x > 0). +- All other changes will result in an increase of the patch version Z (x.y.Z | x > 0). + +**Note:** In case a major release is deprecated before ending up on the network (due to potential bugs), +it is replaced by a minor release (eg: `v14.0.0` → `v14.1.0`). +As a result, this minor release is considered state-machine breaking. + +### Breaking Changes + +A change is considered to be ***state-machine breaking*** if it requires a coordinated upgrade for the network to preserve [state compatibility](./STATE-COMPATIBILITY.md). +Note that when bumping the dependencies of [Cosmos SDK](https://github.com/cosmos/cosmos-sdk), [IBC](https://github.com/cosmos/ibc-go), and [ICS](https://github.com/cosmos/interchain-security) we will only treat patch releases as non state-machine breaking. + +A change is considered to be ***API breaking*** if it modifies the provided API. This includes events, queries, CLI interfaces. + +## Major Release Procedure + +A _major release_ is an increment of the first number (eg: `v9.1.0` → `v10.0.0`). Each major release opens a _stable release series_ and receives updates outlined in the [Major Release Maintenance](#major-release-maintenance) section. + +**Note**: Generally, PRs should target either `main` or a long-lived feature branch (see [CONTRIBUTING.md](./CONTRIBUTING.md#pull-requests)). +An exception are PRs open via the Github mergify integration (i.e., backported PRs). + +* Once the team feels that `main` is _**feature complete**_, we create a `release/vY` branch (going forward known as release branch), + where `Y` is the version number, with the minor and patch part substituted to `x` (eg: 11.x). + * Update the [GitHub mergify integration](./.mergify.yml) by adding instructions for automatically backporting commits from `main` to the `release/vY` using the `A:backport/vY` label. + * **PRs targeting directly a release branch can be merged _only_ when exceptional circumstances arise**. +* In the release branch + * Create a new version section in the `CHANGELOG.md` (follow the procedure described [below](#changelog)) + * Create release notes, in `RELEASE_NOTES.md`, highlighting the new features and changes in the version. + This is needed so the bot knows which entries to add to the release page on GitHub. + * (To be added in the future) ~~Additionally verify that the `UPGRADING.md` file is up to date and contains all the necessary information for upgrading to the new version.~~ +* We freeze the release branch from receiving any new features and focus on releasing a release candidate. + * Finish audits and reviews. + * Add more tests. + * Fix bugs as they are discovered. +* After the team feels that the release branch works fine (i.e., has `~90%` chance of reaching mainnet), we cut a release candidate. + * Create a new annotated git tag for a release candidate in the release branch (follow the [Tagging Procedure](#tagging-procedure)). + * The release verification on public testnets must pass. + * When bugs are found, create a PR for `main`, and backport fixes to the release branch. + * Create new release candidate tags after bugs are fixed. +* After the team feels the release candidate is mainnet ready, create a full release: + * **Note:** The final release MUST have the same commit hash as the latest corresponding release candidate. + * Create a new annotated git tag in the release branch (follow the [Tagging Procedure](#tagging-procedure)). This will trigger the automated release process (which will also create the release artifacts). + * Once the release process completes, modify release notes if needed. + +### Changelog + +For PRs that are changing production code, please add a changelog entry in `.changelog` (for details, see [contributing guidelines](./CONTRIBUTING.md#changelog)). + +To manage and generate the changelog on Onomy, we currently use [unclog](https://github.com/informalsystems/unclog). + +#### Creating a new release branch + +Unreleased changes are collected on `main` in `.changelog/unreleased/`. +However, `.changelog/` on `main` contains also existing releases (e.g., `v10.0.0`). +Thus, when creating a new release branch (e.g., `release/v11.x`), the following steps are necessary: + +- create a new release branch, e.g., `release/v11.x` + ```bash + git checkout main + git pull + git checkout -b release/v11.x + ``` +- delete all the sub-folders in `.changelog/` except `unreleased/` + ```bash + find ./.changelog -mindepth 1 -maxdepth 1 -type d -not -name unreleased | xargs rm -r + ``` +- replace the content of `.changelog/epilogue.md` with the following text + ```md + ## Previous Versions + + [CHANGELOG of previous versions](https://github.com/onomyprotocol/onomy-rebuild/blob/main/CHANGELOG.md) + ``` +- push the release branch upstream + ```bash + git push + ``` + +#### Cutting a new release + +Before cutting a _**release candidate**_ (e.g., `v11.0.0-rc0`), the following steps are necessary: + +- move to the release branch, e.g., `release/v11.x` + ```bash + git checkout release/v11.x + ``` +- move all entries in ".changelog/unreleased" to the release version, e.g., `v11.0.0`, i.e., + ```bash + unclog release v11.0.0 + ``` +- update `CHANGELOG.md`, i.e., + ```bash + unclog build > CHANGELOG.md + ``` +- open a PR (from this new created branch) against the release branch, e.g., `release/v11.x` + +Now you can cut the release candidate, e.g., v11.0.0-rc0 (follow the [Tagging Procedure](#tagging-procedure)). + +#### Update the changelog on main + +Once the **final release** is cut, the new changelog section must be added to main: + +- checkout a new branch from the `main` branch, i.e., + ```bash + git checkout main + git pull + git checkout -b /backport_changelog + ``` +- bring the new changelog section from the release branch into this branch, e.g., + ```bash + git checkout release/v11.x .changelog/v11.0.0 + ``` +- remove duplicate entries that are both in `.changelog/unreleased/` and the new changelog section, e.g., `.changelog/v11.0.0` +- update `CHANGELOG.md`, i.e., + ```bash + unclog build > CHANGELOG.md + ``` +- open a PR (from this new created branch) against `main` + +### Release Notes + +Release notes will be created using the `RELEASE_NOTES.md` from the release branch. +Once the automated releases process is completed, please add any missing information the release notes using Github UI. + +With every release, the `goreleaser` tool will create a file with all the build artifact checksums and upload it alongside the artifacts. +The file is called `SHA256SUMS-{{.version}}.txt` and contains the following: +``` +098b00ed78ca01456c388d7f1f22d09a93927d7a234429681071b45d94730a05 onomyd_0.0.4_windows_arm64.exe +15b2b9146d99426a64c19d219234cd0fa725589c7dc84e9d4dc4d531ccc58bec onomyd_0.0.4_darwin_amd64 +604912ee7800055b0a1ac36ed31021d2161d7404cea8db8776287eb512cd67a9 onomyd_0.0.4_darwin_arm64 +76e5ff7751d66807ee85bc5301484d0f0bcc5c90582d4ba1692acefc189392be onomyd_0.0.4_linux_arm64 +bcbca82da2cb2387ad6d24c1f6401b229a9b4752156573327250d37e5cc9bb1c onomyd_0.0.4_windows_amd64.exe +f39552cbfcfb2b06f1bd66fd324af54ac9ee06625cfa652b71eba1869efe8670 onomyd_0.0.4_linux_amd64 +``` + +### Tagging Procedure + +**Important**: _**Always create tags from your local machine**_ since all release tags should be signed and annotated. +Using Github UI will create a `lightweight` tag, so it's possible that `onomyd version` returns a commit hash, instead of a tag. +This is important because most operators build from source, and having incorrect information when you run `make install && onomyd version` raises confusion. + +The following steps are the default for tagging a specific branch commit using git on your local machine. Usually, release branches are labeled `release/v*`: + +Ensure you have checked out the commit you wish to tag and then do: +```bash +git pull --tags + +# test tag creation and releasing using goreleaser +make create-release-dry-run TAG=v11.0.0 + +# after successful test push the tag +make create-release TAG=v11.0.0 +``` + +To re-create a tag: +```bash +# delete a tag locally +git tag -d v11.0.0 + +# push the deletion to the remote +git push --delete origin v11.0.0 + +# redo create-release +make create-release-dry-run TAG=v11.0.0 +make create-release TAG=v11.0.0 +``` + +#### Test building artifacts + +Before tagging a new version, please test the building releasing artifacts by running: + +```bash +TM_VERSION=$(go list -m github.com/tendermint/tendermint | sed 's:.* ::') goreleaser release --snapshot --clean --debug +``` + +#### Installing goreleaser +Check the instructions for installing goreleaser locally for your platform +* https://goreleaser.com/install/ + + +## Non-major Release Procedure + +A minor release_ is an increment of the _point number_ (eg: `v9.0.0 → v9.1.0`, also called _point release_). +A _patch release_ is an increment of the patch number (eg: `v10.0.0` → `v10.0.1`). + +**Important**: _**Non-major releases must not break consensus.**_ + +Updates to the release branch should come from `main` by backporting PRs +(usually done by automatic cherry pick followed by a PRs to the release branch). +The backports must be marked using `backport/Y` label in PR for main. +It is the PR author's responsibility to fix merge conflicts, update changelog entries, and +ensure CI passes. If a PR originates from an external contributor, a member of the stewarding team assumes +responsibility to perform this process instead of the original author. +Lastly, it is the stewarding team's responsibility to ensure that the PR meets all the Stable Release Update (SRU) criteria. + +Non-major Release must follow the [Stable Release Policy](#stable-release-policy). + +After the release branch has all commits required for the next patch release: + +* Update the [changelog](#changelog) and the [release notes](#release-notes). +* Create a new annotated git tag in the release branch (follow the [Tagging Procedure](#tagging-procedure)). This will trigger the automated release process (which will also create the release artifacts). +* Once the release process completes, modify release notes if needed. + +## Major Release Maintenance + +Major Release series continue to receive bug fixes (released as either a Minor or a Patch Release) until they reach **End Of Life**. +Major Release series is maintained in compliance with the **Stable Release Policy** as described in this document. + +**Note**: Not every Major Release is denoted as stable releases. + +After two major releases, a supported major release will be transitioned to unsupported and will be deemed EOL with no further updates. +For example, `release/v10.x` is deemed EOL once the network upgrades to `release/v12.x`. + +## Stable Release Policy + +Once a Onomy release has been completed and published, updates for it are released under certain circumstances +and must follow the [Non-major Release Procedure](#non-major-release-procedure). + +The intention of the Stable Release Policy is to ensure that all major release series that are not EOL, +are maintained with the following categories of fixes: + +- Tooling improvements (including code formatting, linting, static analysis and updates to testing frameworks) +- Performance enhancements for running archival and synching nodes +- Test and benchmarking suites, ensuring that fixes are sound and there are no performance regressions +- Library updates including point releases for core libraries such as IBC-Go, Cosmos SDK, Tendermint and other dependencies +- General maintenance improvements, that are deemed necessary by the stewarding team, that help align different releases and reduce the workload on the stewarding team +- Security fixes + +Issues that are likely excluded, are any issues that impact operating a block producing network. diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 00000000..850e6206 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,30 @@ +## How to Report a Security Bug + +If you believe you have found a security vulnerability in Onomy, +you can report it to our primary vulnerability disclosure channel, the +[Cosmos HackerOne Bug Bounty program](https://hackerone.com/cosmos?type=team). + +If you prefer to report an issue via email, you may send a bug report to +security@interchain.io with the issue details, reproduction, impact, and other +information. Please submit only one unique email thread per vulnerability. +Any issues reported via email are ineligible for bounty rewards. + +Artifacts from an email report are saved at the time the email is triaged. +Please note: our team is not able to monitor dynamic content (e.g. a Google +Docs link that is edited after receipt) throughout the lifecycle of a report. +If you would like to share additional information or modify previous +information, please include it in an additional reply as an additional attachment. + +***Please DO NOT file a public issue in this repository to report a security vulnerability.*** + + +## Coordinated Vulnerability Disclosure Policy and Safe Harbor + +For the most up-to-date version of the policies that govern vulnerability +disclosure, please consult the [HackerOne program page](https://hackerone.com/cosmos?type=team&view_policy=true). + +The policy hosted on HackerOne is the official Coordinated Vulnerability +Disclosure policy and Safe Harbor for the Interchain Stack, and the teams and +infrastructure it supports, and it supersedes previous security policies that +have been used in the past by individual teams and projects with targets in +scope of the program. diff --git a/STATE-COMPATIBILITY.md b/STATE-COMPATIBILITY.md new file mode 100644 index 00000000..262444f4 --- /dev/null +++ b/STATE-COMPATIBILITY.md @@ -0,0 +1,211 @@ +# State-Compatibility + +- [State-Compatibility](#state-compatibility) + - [Scope](#scope) + - [Validating State-Compatibility](#validating-state-compatibility) + - [AppHash](#apphash) + - [LastResultsHash](#lastresultshash) + - [Major Sources of State-incompatibility](#major-sources-of-state-incompatibility) + - [Creating Additional State](#creating-additional-state) + - [Changing Proto Field Definitions](#changing-proto-field-definitions) + - [Returning Different Errors Given Same Input](#returning-different-errors-given-same-input) + - [Variability in Gas Usage](#variability-in-gas-usage) + - [Secondary Limitations To Keep In Mind](#secondary-limitations-to-keep-in-mind) + - [Network Requests to External Services](#network-requests-to-external-services) + - [Randomness](#randomness) + - [Parallelism and Shared State](#parallelism-and-shared-state) + - [Hardware Errors](#hardware-errors) + + +It is critical for the patch and minor releases to be state-machine compatible with prior releases in the same minor version. +For example, v13.0.2 must be state-machine compatible with v13.0.1. +_An exception are minor releases that are either emergency releases or replacements of deprecated major releases_. + +This is to ensure **determinism**, i.e., given the same input, the nodes will always produce the same output. + +State-incompatibility is allowed for major upgrades because all nodes in the network perform it at the same time. Therefore, after the upgrade, the nodes continue functioning in a deterministic way. + +## Scope + +The state-machine scope includes the following areas: + +- All ICS messages including: + - Every msg's ValidateBasic method + - Every msg's MsgServer method + - Net gas usage, in all execution paths + - Error result returned + - State changes (namely every store write) +- AnteHandlers in "DeliverTx" mode +- All `BeginBlock`/`EndBlock` logic + +The following are **NOT** in the state-machine scope: + +- Events +- Queries that are not whitelisted +- CLI interfaces + +## Validating State-Compatibility + +CometBFT ensures state compatibility by validating a number of hashes that can be found [here](https://github.com/cometbft/cometbft/blob/v0.38.2/proto/tendermint/types/types.proto#L59-L66). + +`AppHash` and `LastResultsHash` are the common sources of problems stemming from our work. +To avoid these problems, let's now examine how these hashes work. + +### AppHash + +**Note:** The following explanation is simplified for clarity. + +An app hash is a hash of hashes of every store's Merkle root that is returned by ABCI's `Commit()` from Cosmos-SDK to CometBFT. +Cosmos-SDK [takes an app hash of the application state](https://github.com/cosmos/cosmos-sdk/blob/v0.47.6/store/rootmulti/store.go#L468), and propagates it to CometBFT which, in turn, compares it to the app hash of the rest of the network. +Then, CometBFT ensures that the app hash of the local node matches the app hash of the network. + +### LastResultsHash + +`LastResultsHash` is the root hash of all results from the transactions in the block returned by the ABCI's `DeliverTx`. + +The [`LastResultsHash`](https://github.com/cometbft/cometbft/blob/v0.34.29/types/results.go#L47-L54) +in CometBFT [v0.34.29](https://github.com/cometbft/cometbft/releases/tag/v0.34.29) contains: + +1. Tx `GasWanted` + +2. Tx `GasUsed` + > `GasUsed` being Merkelized means that we cannot freely reorder methods that consume gas. + > We should also be careful of modifying any validation logic since changing the + > locations where we error or pass might affect transaction gas usage. + > + > There are plans to remove this field from being Merkelized in a subsequent CometBFT release, + > at which point we will have more flexibility in reordering operations / erroring. + +3. Tx response `Data` + + > The `Data` field includes the proto marshalled Tx response. Therefore, we cannot + > change these in patch releases. + +4. Tx response `Code` + + > This is an error code that is returned by the transaction flow. In the case of + > success, it is `0`. On a general error, it is `1`. Additionally, each module + > defines its custom error codes. + > + > As a result, it is important to avoid changing custom error codes or change + > the semantics of what is valid logic in transaction flows. + +Note that all of the above stem from `DeliverTx` execution path, which handles: + +- `AnteHandler`'s marked as deliver tx +- `msg.ValidateBasic` +- execution of a message from the message server + +The `DeliverTx` return back to the CometBFT is defined [here](https://github.com/cosmos/cosmos-sdk/blob/d11196aad04e57812dbc5ac6248d35375e6603af/baseapp/abci.go#L293-L303). + +## Major Sources of State-incompatibility + +### Creating Additional State + +By erroneously creating database entries that exist in Version A but not in +Version B, we can cause the app hash to differ across nodes running +these versions in the network. Therefore, this must be avoided. + +### Changing Proto Field Definitions + +For example, if we change a field that gets persisted to the database, +the app hash will differ across nodes running these versions in the network. + +Additionally, this affects `LastResultsHash` because it contains a `Data` field that is a marshaled proto message. + +### Returning Different Errors Given Same Input + +```go +// Version A +func (sk Keeper) validateAmount(ctx context.Context, amount math.Int) error { + if amount.IsNegative() { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "amount must be positive or zero") + } + return nil +} +``` + +```go +// Version B +func (sk Keeper) validateAmount(ctx context.Context, amount math.Int) error { + if amount.IsNegative() || amount.IsZero() { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "amount must be positive") + } + return nil +} +``` + +Note that now an amount of 0 can be valid in "Version A", but invalid in "Version B". +Therefore, if some nodes are running "Version A" and others are running "Version B", +the final app hash might not be deterministic. + +Additionally, a different error message does not matter because it +is not included in any hash. However, an error code `sdkerrors.ErrInvalidRequest` does. +It translates to the `Code` field in the `LastResultsHash` and participates in +its validation. + +### Variability in Gas Usage + +For transaction flows (or any other flow that consumes gas), it is important +that the gas usage is deterministic. + +Currently, gas usage is being Merklized in the state. As a result, reordering functions +becomes risky. + +Suppose my gas limit is 2000 and 1600 is used up before entering +`someInternalMethod`. Consider the following: + +```go +func someInternalMethod(ctx sdk.Context) { + object1 := readOnlyFunction1(ctx) # consumes 1000 gas + object2 := readOnlyFunction2(ctx) # consumes 500 gas + doStuff(ctx, object1, object2) +} +``` + +- It will run out of gas with `gasUsed = 2600` where 2600 getting merkelized +into the tx results. + +```go +func someInternalMethod(ctx sdk.Context) { + object2 := readOnlyFunction2(ctx) # consumes 500 gas + object1 := readOnlyFunction1(ctx) # consumes 1000 gas + doStuff(ctx, object1, object2) +} +``` + +- It will run out of gas with `gasUsed = 2100` where 2100 is getting merkelized +into the tx results. + +Therefore, we introduced a state-incompatibility by merklezing diverging gas +usage. + +## Secondary Limitations To Keep In Mind + +### Network Requests to External Services + +It is critical to avoid performing network requests to external services +since it is common for services to be unavailable or rate-limit. + +Imagine a service that returns exchange rates when clients query its HTTP endpoint. +This service might experience downtime or be restricted in some geographical areas. + +As a result, nodes may get diverging responses where some +get successful responses while others errors, leading to state breakage. + +### Randomness + +Randomness cannot be used in the state machine, as the state machine must be deterministic. + +**Note:** Iteration order over `map`s is non-deterministic, so to be deterministic +you must gather the keys, and sort them all prior to iterating over all values. + +### Parallelism and Shared State + +Threads and Goroutines might preempt differently in different hardware. Therefore, +they should be avoided for the sake of determinism. Additionally, it is hard +to predict when the multi-threaded state can be updated. + +### Hardware Errors + +This is out of the developer's control but is mentioned for completeness. \ No newline at end of file diff --git a/UPGRADING.md b/UPGRADING.md new file mode 100644 index 00000000..c7ba9bac --- /dev/null +++ b/UPGRADING.md @@ -0,0 +1,160 @@ +# Upgrade Onomy from v15.1.0 to v15.2.0 + +## This is a coordinated upgrade. IT IS CONSENSUS BREAKING, so please apply the fix only on height 19939000. + +### Release Details +* https://github.com/onomyprotocol/onomy-rebuild/releases/tag/v15.2.0 +* Chain upgrade height : `19939000`. Exact upgrade time can be checked [here](https://www.mintscan.io/cosmos/block/19939000). +* Go version has been frozen at `1.21`. If you are going to build `onomyd` binary from source, make sure you are using the right GO version! + +# Performing the co-ordinated upgrade + +This co-ordinated upgrades requires validators to stop their validators at `halt-height`, switch their binary to `v15.2.0` and restart their nodes with the new version. + +The exact sequence of steps depends on your configuration. Please take care to modify your configuration appropriately if your setup is not included in the instructions. + +# Manual steps + +## Step 1: Configure `halt-height` using v15.1.0 and restart the node. + +This upgrade requires `onomyd` halting execution at a pre-selected `halt-height`. Failing to stop at `halt-height` may cause a consensus failure during chain execution at a later time. + +There are two mutually exclusive options for this stage: + +### Option 1: Set the halt height by modifying `app.toml` + +* Stop the onomyd process. + +* Edit the application configuration file at `~/.onomy/config/app.toml` so that `halt-height` reflects the upgrade plan: + +```toml +# Note: Commitment of state will be attempted on the corresponding block. +halt-height = 19939000 +``` +* restart onomyd process + +* Wait for the upgrade height and confirm that the node has halted + +### Option 2: Restart the `onomyd` binary with command line flags + +* Stop the onomyd process. + +* Do not modify `app.toml`. Restart the `onomyd` process with the flag `--halt-height`: +```shell +onomyd start --halt-height 19939000 +``` + +* Wait for the upgrade height and confirm that the node has halted + +Upon reaching the `halt-height` you need to replace the `v15.1.0` onomyd binary with the new `onomyd v15.2.0` binary and remove the `halt-height` constraint. +Depending on your setup, you may need to set `halt-height = 0` in your `app.toml` before resuming operations. +```shell + git clone https://github.com/onomyprotocol/onomy-rebuild.git +``` + +## Step 2: Build and start the v15.2.0 binary + +### Remember to revert `onomyd` configurations +* Reset `halt-height = 0` option in the `app.toml` or +* Remove it from start parameters of the onomyd binary before restarting the node + +We recommend you perform a backup of your data directory before switching to `v15.2.0`. + +```shell +cd $HOME/onomy +git pull +git fetch --tags +git checkout v15.2.0 +make install + +# verify install +onomyd version +# v15.2.0 +``` + +```shell +onomyd start # starts the v15.2.0 node +``` + +# Cosmovisor steps + +## Prerequisite: Alter systemd service configuration + +Disable automatic restart of the node service. To do so please alter your `onomyd.service` file configuration and set appropriate lines to following values. + +``` +Restart=no + +Environment="DAEMON_ALLOW_DOWNLOAD_BINARIES=false" +Environment="DAEMON_RESTART_AFTER_UPGRADE=false" +``` + +After that you will need to run `sudo systemctl daemon-reload` to apply changes in the service configuration. + +There is no need to restart the node yet; these changes will get applied during the node restart in the next step. + +## Setup Cosmovisor +### Create the updated onomyd binary of v15.2.0 + +### Remember to revert `onomyd` configurations +* Reset `halt-height = 0` option in the `app.toml` or +* Remove it from start parameters of the onomyd binary before starting the node + +#### Go to onomyd directory if present else clone the repository + +```shell + git clone https://github.com/onomyprotocol/onomy-rebuild.git +``` + +#### Follow these steps if onomyd repo already present + +```shell + cd $HOME/.onomy + git pull + git fetch --tags + git checkout v15.2.0 + make install +``` + +#### Check the new onomyd version, verify the latest commit hash +```shell + $ onomyd version --long + name: onomyd + server_name: onomyd + version: 15.2.0 + commit: + ... +``` + +#### Or check checksum of the binary if you decided to download it + +Checksums can be found on the official release page: +* https://github.com/onomyprotocol/onomy-rebuild/releases/tag/v15.2.0 + +The checksums file is located in the `Assets` section: +* e.g. [SHA256SUMS-v15.2.0.txt](https://github.com/onomyprotocol/onomy-rebuild/releases/download/v15.2.0/SHA256SUMS-v15.2.0.txt) + +```shell +$ shasum -a 256 onomyd-v15.2.0-linux-amd64 + onomyd-v15.2.0-linux-amd64 +``` + +### Copy the new onomyd (v15.2.0) binary to cosmovisor current directory +```shell + cp $GOPATH/bin/onomyd ~/.onomyd/cosmovisor/current/bin +``` + +### Restore service file settings + +If you are using a service file, restore the previous `Restart` settings in your service file: +``` +Restart=On-failure +``` +Reload the service control `sudo systemctl daemon-reload`. + +# Revert `onomyd` configurations + +Depending on which path you chose for Step 1, either: + +* Reset `halt-height = 0` option in the `app.toml` or +* Remove it from start parameters of the onomyd binary and start node again \ No newline at end of file diff --git a/ante/ante.go b/ante/ante.go new file mode 100644 index 00000000..79ea8873 --- /dev/null +++ b/ante/ante.go @@ -0,0 +1,76 @@ +package ante + +import ( + ibcante "github.com/cosmos/ibc-go/v7/modules/core/ante" + ibckeeper "github.com/cosmos/ibc-go/v7/modules/core/keeper" + + errorsmod "cosmossdk.io/errors" + + "github.com/cosmos/cosmos-sdk/codec" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/x/auth/ante" + paramtypes "github.com/cosmos/cosmos-sdk/x/params/types" + stakingkeeper "github.com/cosmos/cosmos-sdk/x/staking/keeper" + + onomyerrors "github.com/onomyprotocol/onomy-rebuild/types/errors" + onomyfeeante "github.com/onomyprotocol/onomy-rebuild/x/globalfee/ante" +) + +// HandlerOptions extend the SDK's AnteHandler options by requiring the IBC +// channel keeper. +type HandlerOptions struct { + ante.HandlerOptions + Codec codec.BinaryCodec + IBCkeeper *ibckeeper.Keeper + GlobalFeeSubspace paramtypes.Subspace + StakingKeeper *stakingkeeper.Keeper + TxFeeChecker ante.TxFeeChecker +} + +func NewAnteHandler(opts HandlerOptions) (sdk.AnteHandler, error) { + if opts.AccountKeeper == nil { + return nil, errorsmod.Wrap(onomyerrors.ErrLogic, "account keeper is required for AnteHandler") + } + if opts.BankKeeper == nil { + return nil, errorsmod.Wrap(onomyerrors.ErrLogic, "bank keeper is required for AnteHandler") + } + if opts.SignModeHandler == nil { + return nil, errorsmod.Wrap(onomyerrors.ErrLogic, "sign mode handler is required for AnteHandler") + } + if opts.IBCkeeper == nil { + return nil, errorsmod.Wrap(onomyerrors.ErrLogic, "IBC keeper is required for AnteHandler") + } + + if opts.GlobalFeeSubspace.Name() == "" { + return nil, errorsmod.Wrap(onomyerrors.ErrNotFound, "globalfee param store is required for AnteHandler") + } + + if opts.StakingKeeper == nil { + return nil, errorsmod.Wrap(onomyerrors.ErrNotFound, "staking param store is required for AnteHandler") + } + + sigGasConsumer := opts.SigGasConsumer + if sigGasConsumer == nil { + sigGasConsumer = ante.DefaultSigVerificationGasConsumer + } + + anteDecorators := []sdk.AnteDecorator{ + ante.NewSetUpContextDecorator(), // outermost AnteDecorator. SetUpContext must be called first + ante.NewExtensionOptionsDecorator(opts.ExtensionOptionChecker), + ante.NewValidateBasicDecorator(), + ante.NewTxTimeoutHeightDecorator(), + ante.NewValidateMemoDecorator(opts.AccountKeeper), + ante.NewConsumeGasForTxSizeDecorator(opts.AccountKeeper), + NewGovVoteDecorator(opts.Codec, opts.StakingKeeper), + onomyfeeante.NewFeeDecorator(opts.GlobalFeeSubspace, opts.StakingKeeper), + ante.NewDeductFeeDecorator(opts.AccountKeeper, opts.BankKeeper, opts.FeegrantKeeper, opts.TxFeeChecker), + ante.NewSetPubKeyDecorator(opts.AccountKeeper), // SetPubKeyDecorator must be called before all signature verification decorators + ante.NewValidateSigCountDecorator(opts.AccountKeeper), + ante.NewSigGasConsumeDecorator(opts.AccountKeeper, sigGasConsumer), + ante.NewSigVerificationDecorator(opts.AccountKeeper, opts.SignModeHandler), + ante.NewIncrementSequenceDecorator(opts.AccountKeeper), + ibcante.NewRedundantRelayDecorator(opts.IBCkeeper), + } + + return sdk.ChainAnteDecorators(anteDecorators...), nil +} diff --git a/ante/gov_vote_ante.go b/ante/gov_vote_ante.go new file mode 100644 index 00000000..727347a8 --- /dev/null +++ b/ante/gov_vote_ante.go @@ -0,0 +1,141 @@ +package ante + +import ( + errorsmod "cosmossdk.io/errors" + + "github.com/cosmos/cosmos-sdk/codec" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/x/authz" + govv1 "github.com/cosmos/cosmos-sdk/x/gov/types/v1" + govv1beta1 "github.com/cosmos/cosmos-sdk/x/gov/types/v1beta1" + stakingkeeper "github.com/cosmos/cosmos-sdk/x/staking/keeper" + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + + onomyerrors "github.com/onomyprotocol/onomy-rebuild/types/errors" +) + +var ( + minStakedTokens = sdk.NewDec(1000000) // 1_000_000 uatom (or 1 atom) + maxDelegationsChecked = 100 // number of delegation to check for the minStakedTokens +) + +// SetMinStakedTokens sets the minimum amount of staked tokens required to vote +// Should only be used in testing +func SetMinStakedTokens(tokens sdk.Dec) { + minStakedTokens = tokens +} + +type GovVoteDecorator struct { + stakingKeeper *stakingkeeper.Keeper + cdc codec.BinaryCodec +} + +func NewGovVoteDecorator(cdc codec.BinaryCodec, stakingKeeper *stakingkeeper.Keeper) GovVoteDecorator { + return GovVoteDecorator{ + stakingKeeper: stakingKeeper, + cdc: cdc, + } +} + +func (g GovVoteDecorator) AnteHandle( + ctx sdk.Context, tx sdk.Tx, + simulate bool, next sdk.AnteHandler, +) (newCtx sdk.Context, err error) { + // do not run check during simulations + if simulate { + return next(ctx, tx, simulate) + } + + msgs := tx.GetMsgs() + if err = g.ValidateVoteMsgs(ctx, msgs); err != nil { + return ctx, err + } + + return next(ctx, tx, simulate) +} + +// ValidateVoteMsgs checks if a voter has enough stake to vote +func (g GovVoteDecorator) ValidateVoteMsgs(ctx sdk.Context, msgs []sdk.Msg) error { + validMsg := func(m sdk.Msg) error { + var accAddr sdk.AccAddress + var err error + + switch msg := m.(type) { + case *govv1beta1.MsgVote: + accAddr, err = sdk.AccAddressFromBech32(msg.Voter) + if err != nil { + return err + } + case *govv1.MsgVote: + accAddr, err = sdk.AccAddressFromBech32(msg.Voter) + if err != nil { + return err + } + default: + // not a vote message - nothing to validate + return nil + } + + if minStakedTokens.IsZero() { + return nil + } + + enoughStake := false + delegationCount := 0 + stakedTokens := sdk.NewDec(0) + g.stakingKeeper.IterateDelegatorDelegations(ctx, accAddr, func(delegation stakingtypes.Delegation) bool { + validatorAddr, err := sdk.ValAddressFromBech32(delegation.ValidatorAddress) + if err != nil { + panic(err) // shouldn't happen + } + validator, found := g.stakingKeeper.GetValidator(ctx, validatorAddr) + if found { + shares := delegation.Shares + tokens := validator.TokensFromSharesTruncated(shares) + stakedTokens = stakedTokens.Add(tokens) + if stakedTokens.GTE(minStakedTokens) { + enoughStake = true + return true // break the iteration + } + } + delegationCount++ + // break the iteration if maxDelegationsChecked were already checked + return delegationCount >= maxDelegationsChecked + }) + + if !enoughStake { + return errorsmod.Wrapf(onomyerrors.ErrInsufficientStake, "insufficient stake for voting - min required %v", minStakedTokens) + } + + return nil + } + + validAuthz := func(execMsg *authz.MsgExec) error { + for _, v := range execMsg.Msgs { + var innerMsg sdk.Msg + if err := g.cdc.UnpackAny(v, &innerMsg); err != nil { + return errorsmod.Wrap(onomyerrors.ErrUnauthorized, "cannot unmarshal authz exec msgs") + } + if err := validMsg(innerMsg); err != nil { + return err + } + } + + return nil + } + + for _, m := range msgs { + if msg, ok := m.(*authz.MsgExec); ok { + if err := validAuthz(msg); err != nil { + return err + } + continue + } + + // validate normal msgs + if err := validMsg(m); err != nil { + return err + } + } + return nil +} diff --git a/ante/gov_vote_ante_test.go b/ante/gov_vote_ante_test.go new file mode 100644 index 00000000..42519b62 --- /dev/null +++ b/ante/gov_vote_ante_test.go @@ -0,0 +1,249 @@ +package ante_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + tmproto "github.com/cometbft/cometbft/proto/tendermint/types" + + "cosmossdk.io/math" + + "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519" + sdk "github.com/cosmos/cosmos-sdk/types" + govv1 "github.com/cosmos/cosmos-sdk/x/gov/types/v1" + govv1beta1 "github.com/cosmos/cosmos-sdk/x/gov/types/v1beta1" + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + + "github.com/onomyprotocol/onomy-rebuild/ante" + "github.com/onomyprotocol/onomy-rebuild/app/helpers" +) + +// Test that the GovVoteDecorator rejects v1beta1 vote messages from accounts with less than 1 atom staked +// Submitting v1beta1.VoteMsg should not be possible through the CLI, but it's still possible to craft a transaction +func TestVoteSpamDecoratorGovV1Beta1(t *testing.T) { + onomyApp := helpers.Setup(t) + ctx := onomyApp.NewUncachedContext(true, tmproto.Header{}) + decorator := ante.NewGovVoteDecorator(onomyApp.AppCodec(), onomyApp.StakingKeeper) + stakingKeeper := onomyApp.StakingKeeper + + // Get validator + valAddr1 := stakingKeeper.GetAllValidators(ctx)[0].GetOperator() + + // Create one more validator + pk := ed25519.GenPrivKeyFromSecret([]byte{uint8(13)}).PubKey() + validator2, err := stakingtypes.NewValidator( + sdk.ValAddress(pk.Address()), + pk, + stakingtypes.Description{}, + ) + valAddr2 := validator2.GetOperator() + require.NoError(t, err) + // Make sure the validator is bonded so it's not removed on Undelegate + validator2.Status = stakingtypes.Bonded + stakingKeeper.SetValidator(ctx, validator2) + err = stakingKeeper.SetValidatorByConsAddr(ctx, validator2) + require.NoError(t, err) + stakingKeeper.SetNewValidatorByPowerIndex(ctx, validator2) + err = stakingKeeper.Hooks().AfterValidatorCreated(ctx, validator2.GetOperator()) + require.NoError(t, err) + + // Get delegator (this account was created during setup) + addr := onomyApp.AccountKeeper.GetAccountAddressByID(ctx, 0) + delegator, err := sdk.AccAddressFromBech32(addr) + require.NoError(t, err) + + tests := []struct { + name string + bondAmt math.Int + validators []sdk.ValAddress + expectPass bool + }{ + { + name: "delegate 0 atom", + bondAmt: sdk.ZeroInt(), + validators: []sdk.ValAddress{valAddr1}, + expectPass: false, + }, + { + name: "delegate 0.1 atom", + bondAmt: sdk.NewInt(100000), + validators: []sdk.ValAddress{valAddr1}, + expectPass: false, + }, + { + name: "delegate 1 atom", + bondAmt: sdk.NewInt(1000000), + validators: []sdk.ValAddress{valAddr1}, + expectPass: true, + }, + { + name: "delegate 1 atom to two validators", + bondAmt: sdk.NewInt(1000000), + validators: []sdk.ValAddress{valAddr1, valAddr2}, + expectPass: true, + }, + { + name: "delegate 0.9 atom to two validators", + bondAmt: sdk.NewInt(900000), + validators: []sdk.ValAddress{valAddr1, valAddr2}, + expectPass: false, + }, + { + name: "delegate 10 atom", + bondAmt: sdk.NewInt(10000000), + validators: []sdk.ValAddress{valAddr1}, + expectPass: true, + }, + } + + for _, tc := range tests { + // Unbond all tokens for this delegator + delegations := stakingKeeper.GetAllDelegatorDelegations(ctx, delegator) + for _, del := range delegations { + _, err := stakingKeeper.Undelegate(ctx, delegator, del.GetValidatorAddr(), del.GetShares()) + require.NoError(t, err) + } + + // Delegate tokens + if !tc.bondAmt.IsZero() { + amt := tc.bondAmt.Quo(sdk.NewInt(int64(len(tc.validators)))) + for _, valAddr := range tc.validators { + val, found := stakingKeeper.GetValidator(ctx, valAddr) + require.True(t, found) + _, err := stakingKeeper.Delegate(ctx, delegator, amt, stakingtypes.Unbonded, val, true) + require.NoError(t, err) + } + } + + // Create vote message + msg := govv1beta1.NewMsgVote( + delegator, + 0, + govv1beta1.OptionYes, + ) + + // Validate vote message + err := decorator.ValidateVoteMsgs(ctx, []sdk.Msg{msg}) + if tc.expectPass { + require.NoError(t, err, "expected %v to pass", tc.name) + } else { + require.Error(t, err, "expected %v to fail", tc.name) + } + } +} + +// Test that the GovVoteDecorator rejects v1 vote messages from accounts with less than 1 atom staked +// Usually, only v1.VoteMsg can be submitted using the CLI. +func TestVoteSpamDecoratorGovV1(t *testing.T) { + onomyApp := helpers.Setup(t) + ctx := onomyApp.NewUncachedContext(true, tmproto.Header{}) + decorator := ante.NewGovVoteDecorator(onomyApp.AppCodec(), onomyApp.StakingKeeper) + stakingKeeper := onomyApp.StakingKeeper + + // Get validator + valAddr1 := stakingKeeper.GetAllValidators(ctx)[0].GetOperator() + + // Create one more validator + pk := ed25519.GenPrivKeyFromSecret([]byte{uint8(13)}).PubKey() + validator2, err := stakingtypes.NewValidator( + sdk.ValAddress(pk.Address()), + pk, + stakingtypes.Description{}, + ) + valAddr2 := validator2.GetOperator() + require.NoError(t, err) + // Make sure the validator is bonded so it's not removed on Undelegate + validator2.Status = stakingtypes.Bonded + stakingKeeper.SetValidator(ctx, validator2) + err = stakingKeeper.SetValidatorByConsAddr(ctx, validator2) + require.NoError(t, err) + stakingKeeper.SetNewValidatorByPowerIndex(ctx, validator2) + err = stakingKeeper.Hooks().AfterValidatorCreated(ctx, validator2.GetOperator()) + require.NoError(t, err) + + // Get delegator (this account was created during setup) + addr := onomyApp.AccountKeeper.GetAccountAddressByID(ctx, 0) + delegator, err := sdk.AccAddressFromBech32(addr) + require.NoError(t, err) + + tests := []struct { + name string + bondAmt math.Int + validators []sdk.ValAddress + expectPass bool + }{ + { + name: "delegate 0 atom", + bondAmt: sdk.ZeroInt(), + validators: []sdk.ValAddress{valAddr1}, + expectPass: false, + }, + { + name: "delegate 0.1 atom", + bondAmt: sdk.NewInt(100000), + validators: []sdk.ValAddress{valAddr1}, + expectPass: false, + }, + { + name: "delegate 1 atom", + bondAmt: sdk.NewInt(1000000), + validators: []sdk.ValAddress{valAddr1}, + expectPass: true, + }, + { + name: "delegate 1 atom to two validators", + bondAmt: sdk.NewInt(1000000), + validators: []sdk.ValAddress{valAddr1, valAddr2}, + expectPass: true, + }, + { + name: "delegate 0.9 atom to two validators", + bondAmt: sdk.NewInt(900000), + validators: []sdk.ValAddress{valAddr1, valAddr2}, + expectPass: false, + }, + { + name: "delegate 10 atom", + bondAmt: sdk.NewInt(10000000), + validators: []sdk.ValAddress{valAddr1}, + expectPass: true, + }, + } + + for _, tc := range tests { + // Unbond all tokens for this delegator + delegations := stakingKeeper.GetAllDelegatorDelegations(ctx, delegator) + for _, del := range delegations { + _, err := stakingKeeper.Undelegate(ctx, delegator, del.GetValidatorAddr(), del.GetShares()) + require.NoError(t, err) + } + + // Delegate tokens + if !tc.bondAmt.IsZero() { + amt := tc.bondAmt.Quo(sdk.NewInt(int64(len(tc.validators)))) + for _, valAddr := range tc.validators { + val, found := stakingKeeper.GetValidator(ctx, valAddr) + require.True(t, found) + _, err := stakingKeeper.Delegate(ctx, delegator, amt, stakingtypes.Unbonded, val, true) + require.NoError(t, err) + } + } + + // Create vote message + msg := govv1.NewMsgVote( + delegator, + 0, + govv1.VoteOption_VOTE_OPTION_YES, + "new-v1-vote-message-test", + ) + + // Validate vote message + err := decorator.ValidateVoteMsgs(ctx, []sdk.Msg{msg}) + if tc.expectPass { + require.NoError(t, err, "expected %v to pass", tc.name) + } else { + require.Error(t, err, "expected %v to fail", tc.name) + } + } +} diff --git a/app/app.go b/app/app.go index abb7b5ec..4bb80c71 100644 --- a/app/app.go +++ b/app/app.go @@ -1,4 +1,4 @@ -package app +package onomy import ( "fmt" @@ -7,658 +7,278 @@ import ( "os" "path/filepath" + "github.com/gorilla/mux" + "github.com/rakyll/statik/fs" + "github.com/spf13/cast" + + // unnamed import of statik for swagger UI support + _ "github.com/cosmos/cosmos-sdk/client/docs/statik" + + dbm "github.com/cometbft/cometbft-db" + abci "github.com/cometbft/cometbft/abci/types" + tmjson "github.com/cometbft/cometbft/libs/json" + "github.com/cometbft/cometbft/libs/log" + tmos "github.com/cometbft/cometbft/libs/os" + + ibctesting "github.com/cosmos/ibc-go/v7/testing" + providertypes "github.com/cosmos/interchain-security/v3/x/ccv/provider/types" + + autocliv1 "cosmossdk.io/api/cosmos/autocli/v1" + reflectionv1 "cosmossdk.io/api/cosmos/reflection/v1" + errorsmod "cosmossdk.io/errors" + "github.com/cosmos/cosmos-sdk/baseapp" "github.com/cosmos/cosmos-sdk/client" + nodeservice "github.com/cosmos/cosmos-sdk/client/grpc/node" "github.com/cosmos/cosmos-sdk/client/grpc/tmservice" - "github.com/cosmos/cosmos-sdk/client/rpc" "github.com/cosmos/cosmos-sdk/codec" "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/runtime" + runtimeservices "github.com/cosmos/cosmos-sdk/runtime/services" + "github.com/cosmos/cosmos-sdk/server" "github.com/cosmos/cosmos-sdk/server/api" "github.com/cosmos/cosmos-sdk/server/config" servertypes "github.com/cosmos/cosmos-sdk/server/types" - "github.com/cosmos/cosmos-sdk/simapp" - storetypes "github.com/cosmos/cosmos-sdk/store/types" + "github.com/cosmos/cosmos-sdk/testutil/testdata" sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/cosmos/cosmos-sdk/types/module" "github.com/cosmos/cosmos-sdk/version" - "github.com/cosmos/cosmos-sdk/x/auth" "github.com/cosmos/cosmos-sdk/x/auth/ante" - authrest "github.com/cosmos/cosmos-sdk/x/auth/client/rest" - authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper" - authsims "github.com/cosmos/cosmos-sdk/x/auth/simulation" authtx "github.com/cosmos/cosmos-sdk/x/auth/tx" authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" - "github.com/cosmos/cosmos-sdk/x/auth/vesting" - vestingtypes "github.com/cosmos/cosmos-sdk/x/auth/vesting/types" - "github.com/cosmos/cosmos-sdk/x/authz" - authzkeeper "github.com/cosmos/cosmos-sdk/x/authz/keeper" - authzmodule "github.com/cosmos/cosmos-sdk/x/authz/module" - "github.com/cosmos/cosmos-sdk/x/bank" - bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" - banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" - "github.com/cosmos/cosmos-sdk/x/capability" - capabilitykeeper "github.com/cosmos/cosmos-sdk/x/capability/keeper" - capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" "github.com/cosmos/cosmos-sdk/x/crisis" - crisiskeeper "github.com/cosmos/cosmos-sdk/x/crisis/keeper" - crisistypes "github.com/cosmos/cosmos-sdk/x/crisis/types" - distr "github.com/cosmos/cosmos-sdk/x/distribution" - distrclient "github.com/cosmos/cosmos-sdk/x/distribution/client" - distrkeeper "github.com/cosmos/cosmos-sdk/x/distribution/keeper" - distrtypes "github.com/cosmos/cosmos-sdk/x/distribution/types" - "github.com/cosmos/cosmos-sdk/x/evidence" - evidencekeeper "github.com/cosmos/cosmos-sdk/x/evidence/keeper" - evidencetypes "github.com/cosmos/cosmos-sdk/x/evidence/types" - "github.com/cosmos/cosmos-sdk/x/feegrant" - feegrantkeeper "github.com/cosmos/cosmos-sdk/x/feegrant/keeper" - feegrantmodule "github.com/cosmos/cosmos-sdk/x/feegrant/module" - "github.com/cosmos/cosmos-sdk/x/genutil" - genutiltypes "github.com/cosmos/cosmos-sdk/x/genutil/types" - "github.com/cosmos/cosmos-sdk/x/gov" - govclient "github.com/cosmos/cosmos-sdk/x/gov/client" - govkeeper "github.com/cosmos/cosmos-sdk/x/gov/keeper" govtypes "github.com/cosmos/cosmos-sdk/x/gov/types" - "github.com/cosmos/cosmos-sdk/x/mint" - mintkeeper "github.com/cosmos/cosmos-sdk/x/mint/keeper" - minttypes "github.com/cosmos/cosmos-sdk/x/mint/types" - "github.com/cosmos/cosmos-sdk/x/params" - paramsclient "github.com/cosmos/cosmos-sdk/x/params/client" - paramskeeper "github.com/cosmos/cosmos-sdk/x/params/keeper" - paramstypes "github.com/cosmos/cosmos-sdk/x/params/types" - paramproposal "github.com/cosmos/cosmos-sdk/x/params/types/proposal" - "github.com/cosmos/cosmos-sdk/x/slashing" - slashingkeeper "github.com/cosmos/cosmos-sdk/x/slashing/keeper" - slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" - "github.com/cosmos/cosmos-sdk/x/staking" - stakingkeeper "github.com/cosmos/cosmos-sdk/x/staking/keeper" - stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" - "github.com/cosmos/cosmos-sdk/x/upgrade" - upgradeclient "github.com/cosmos/cosmos-sdk/x/upgrade/client" - upgradekeeper "github.com/cosmos/cosmos-sdk/x/upgrade/keeper" upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" - "github.com/cosmos/ibc-go/v4/modules/apps/transfer" - ibctransferkeeper "github.com/cosmos/ibc-go/v4/modules/apps/transfer/keeper" - ibctransfertypes "github.com/cosmos/ibc-go/v4/modules/apps/transfer/types" - ibc "github.com/cosmos/ibc-go/v4/modules/core" - ibcclient "github.com/cosmos/ibc-go/v4/modules/core/02-client" - ibcclientclient "github.com/cosmos/ibc-go/v4/modules/core/02-client/client" - ibcclienttypes "github.com/cosmos/ibc-go/v4/modules/core/02-client/types" - ibcporttypes "github.com/cosmos/ibc-go/v4/modules/core/05-port/types" - ibchost "github.com/cosmos/ibc-go/v4/modules/core/24-host" - ibckeeper "github.com/cosmos/ibc-go/v4/modules/core/keeper" - "github.com/spf13/cast" - "github.com/tendermint/starport/starport/pkg/cosmoscmd" - "github.com/tendermint/starport/starport/pkg/openapiconsole" - abci "github.com/tendermint/tendermint/abci/types" - tmjson "github.com/tendermint/tendermint/libs/json" - "github.com/tendermint/tendermint/libs/log" - tmos "github.com/tendermint/tendermint/libs/os" - dbm "github.com/tendermint/tm-db" - - ibcprovider "github.com/cosmos/interchain-security/x/ccv/provider" - ibcproviderclient "github.com/cosmos/interchain-security/x/ccv/provider/client" - ibcproviderkeeper "github.com/cosmos/interchain-security/x/ccv/provider/keeper" - providertypes "github.com/cosmos/interchain-security/x/ccv/provider/types" - - v1_0_1 "github.com/onomyprotocol/onomy/app/upgrades/v1.0.1" - v1_0_3 "github.com/onomyprotocol/onomy/app/upgrades/v1.0.3" - v1_0_3_4 "github.com/onomyprotocol/onomy/app/upgrades/v1.0.3.4" - v1_0_3_5 "github.com/onomyprotocol/onomy/app/upgrades/v1.0.3.5" - v1_1_1 "github.com/onomyprotocol/onomy/app/upgrades/v1.1.1" - v1_1_2 "github.com/onomyprotocol/onomy/app/upgrades/v1.1.2" - v1_1_4 "github.com/onomyprotocol/onomy/app/upgrades/v1.1.4" - "github.com/onomyprotocol/onomy/docs" - "github.com/onomyprotocol/onomy/x/dao" - daoclient "github.com/onomyprotocol/onomy/x/dao/client" - daokeeper "github.com/onomyprotocol/onomy/x/dao/keeper" - daotypes "github.com/onomyprotocol/onomy/x/dao/types" -) -const ( - // AccountAddressPrefix is cosmos-sdk accounts prefixes. - AccountAddressPrefix = "onomy" - // Name is the name of the onomy chain. - Name = "onomy" + onomyante "github.com/onomyprotocol/onomy-rebuild/ante" + "github.com/onomyprotocol/onomy-rebuild/app/keepers" + "github.com/onomyprotocol/onomy-rebuild/app/params" + "github.com/onomyprotocol/onomy-rebuild/app/upgrades" + v15 "github.com/onomyprotocol/onomy-rebuild/app/upgrades/v15" + "github.com/onomyprotocol/onomy-rebuild/x/globalfee" ) -func getGovProposalHandlers() []govclient.ProposalHandler { - var govProposalHandlers []govclient.ProposalHandler - - govProposalHandlers = append(govProposalHandlers, - paramsclient.ProposalHandler, - distrclient.ProposalHandler, - upgradeclient.ProposalHandler, - upgradeclient.CancelProposalHandler, - ibcclientclient.UpdateClientProposalHandler, - ibcclientclient.UpgradeProposalHandler, - daoclient.FundTreasuryProposalHandler, - daoclient.ExchangeWithTreasuryProposalProposalHandler, - ibcproviderclient.ConsumerAdditionProposalHandler, - ibcproviderclient.ConsumerRemovalProposalHandler, - ibcproviderclient.EquivocationProposalHandler, - ) - - return govProposalHandlers -} - var ( - // DefaultNodeHome default home directories for the application daemon. - DefaultNodeHome string // nolint:gochecknoglobals // cosmos-sdk application style - - // ModuleBasics defines the module BasicManager is in charge of setting up basic, - // non-dependant module elements, such as codec registration - // and genesis verification. - ModuleBasics = module.NewBasicManager( // nolint:gochecknoglobals // cosmos-sdk application style - auth.AppModuleBasic{}, - genutil.AppModuleBasic{}, - bank.AppModuleBasic{}, - capability.AppModuleBasic{}, - staking.AppModuleBasic{}, - mint.AppModuleBasic{}, - distr.AppModuleBasic{}, - gov.NewAppModuleBasic(getGovProposalHandlers()...), - params.AppModuleBasic{}, - crisis.AppModuleBasic{}, - slashing.AppModuleBasic{}, - feegrantmodule.AppModuleBasic{}, - authzmodule.AppModuleBasic{}, - ibc.AppModuleBasic{}, - upgrade.AppModuleBasic{}, - evidence.AppModuleBasic{}, - transfer.AppModuleBasic{}, - vesting.AppModuleBasic{}, - dao.AppModuleBasic{}, - ibcprovider.AppModuleBasic{}, - ) - - // module account permissions. - maccPerms = map[string][]string{ // nolint:gochecknoglobals // cosmos-sdk application style - authtypes.FeeCollectorName: nil, - daotypes.ModuleName: {authtypes.Minter}, - distrtypes.ModuleName: nil, - minttypes.ModuleName: {authtypes.Minter}, - stakingtypes.BondedPoolName: {authtypes.Burner, authtypes.Staking}, - stakingtypes.NotBondedPoolName: {authtypes.Burner, authtypes.Staking}, - govtypes.ModuleName: {authtypes.Burner}, - ibctransfertypes.ModuleName: {authtypes.Minter, authtypes.Burner}, - providertypes.ConsumerRewardsPool: nil, - } + // DefaultNodeHome default home directories for the application daemon + DefaultNodeHome string - // module accounts that are allowed to receive tokens. - allowedReceivingModAcc = map[string]bool{ // nolint:gochecknoglobals // cosmos-sdk application style - distrtypes.ModuleName: true, - daotypes.ModuleName: true, - // provider chain note: the fee-pool is allowed to receive tokens - authtypes.FeeCollectorName: true, - } + Upgrades = []upgrades.Upgrade{v15.Upgrade} ) var ( - _ cosmoscmd.CosmosApp = (*OnomyApp)(nil) + _ runtime.AppI = (*OnomyApp)(nil) _ servertypes.Application = (*OnomyApp)(nil) - _ simapp.App = (*OnomyApp)(nil) + _ ibctesting.TestingApp = (*OnomyApp)(nil) ) -func init() { // nolint: gochecknoinits // init funcs is are commonly used in cosmos - userHomeDir, err := os.UserHomeDir() - if err != nil { - panic(err) - } - - DefaultNodeHome = filepath.Join(userHomeDir, "."+Name) - - // change default power reduction to 18 digits, since the onomy anom is 18 digits based. - sdk.DefaultPowerReduction = sdk.NewIntWithDecimal(1, 18) // nolint: gomnd - // change default min deposit token to 18 digits. - govtypes.DefaultMinDepositTokens = sdk.NewIntWithDecimal(1, 18) // nolint: gomnd -} - // OnomyApp extends an ABCI application, but with most of its parameters exported. // They are exported for convenience in creating helper functions, as object // capabilities aren't needed for testing. -type OnomyApp struct { +type OnomyApp struct { //nolint: revive *baseapp.BaseApp + keepers.AppKeepers - cdc *codec.LegacyAmino + legacyAmino *codec.LegacyAmino appCodec codec.Codec + txConfig client.TxConfig interfaceRegistry types.InterfaceRegistry invCheckPeriod uint - // keys to access the substores - keys map[string]*sdk.KVStoreKey - tkeys map[string]*sdk.TransientStoreKey - memKeys map[string]*sdk.MemoryStoreKey - - // keepers - AccountKeeper authkeeper.AccountKeeper - BankKeeper bankkeeper.BaseKeeper - CapabilityKeeper *capabilitykeeper.Keeper - StakingKeeper stakingkeeper.Keeper - SlashingKeeper slashingkeeper.Keeper - MintKeeper mintkeeper.Keeper - DistrKeeper distrkeeper.Keeper - GovKeeper govkeeper.Keeper - CrisisKeeper crisiskeeper.Keeper - UpgradeKeeper upgradekeeper.Keeper - ParamsKeeper paramskeeper.Keeper - IBCKeeper *ibckeeper.Keeper // IBC Keeper must be a pointer in the app, so we can SetRouter on it correctly - EvidenceKeeper evidencekeeper.Keeper - TransferKeeper ibctransferkeeper.Keeper - FeeGrantKeeper feegrantkeeper.Keeper - AuthzKeeper authzkeeper.Keeper - ProviderKeeper ibcproviderkeeper.Keeper - - // make scoped keepers public for test purposes - ScopedIBCKeeper capabilitykeeper.ScopedKeeper - ScopedTransferKeeper capabilitykeeper.ScopedKeeper - ScopedIBCProviderKeeper capabilitykeeper.ScopedKeeper - - DaoKeeper daokeeper.Keeper - - // mm is the module manager - mm *module.Manager + // the module manager + mm *module.Manager + // simulation manager + sm *module.SimulationManager configurator module.Configurator +} - // sm is the simulation manager - sm *module.SimulationManager +func init() { + userHomeDir, err := os.UserHomeDir() + if err != nil { + panic(err) + } + + DefaultNodeHome = filepath.Join(userHomeDir, ".onomy") } -// New returns a reference to an initialized blockchain app. -func New( // nolint:funlen // app new cosmos func +// NewOnomyApp returns a reference to an initialized Onomy. +func NewOnomyApp( logger log.Logger, db dbm.DB, traceStore io.Writer, loadLatest bool, skipUpgradeHeights map[int64]bool, homePath string, - invCheckPeriod uint, - encodingConfig cosmoscmd.EncodingConfig, + encodingConfig params.EncodingConfig, appOpts servertypes.AppOptions, baseAppOptions ...func(*baseapp.BaseApp), -) cosmoscmd.App { +) *OnomyApp { appCodec := encodingConfig.Marshaler - cdc := encodingConfig.Amino + legacyAmino := encodingConfig.Amino interfaceRegistry := encodingConfig.InterfaceRegistry + txConfig := encodingConfig.TxConfig + + // App Opts + skipGenesisInvariants := cast.ToBool(appOpts.Get(crisis.FlagSkipGenesisInvariants)) + invCheckPeriod := cast.ToUint(appOpts.Get(server.FlagInvCheckPeriod)) + + bApp := baseapp.NewBaseApp( + appName, + logger, + db, + txConfig.TxDecoder(), + baseAppOptions...) - bApp := baseapp.NewBaseApp(Name, logger, db, encodingConfig.TxConfig.TxDecoder(), baseAppOptions...) bApp.SetCommitMultiStoreTracer(traceStore) bApp.SetVersion(version.Version) bApp.SetInterfaceRegistry(interfaceRegistry) - - keys := sdk.NewKVStoreKeys( - authtypes.StoreKey, banktypes.StoreKey, stakingtypes.StoreKey, - minttypes.StoreKey, distrtypes.StoreKey, slashingtypes.StoreKey, - govtypes.StoreKey, paramstypes.StoreKey, ibchost.StoreKey, upgradetypes.StoreKey, feegrant.StoreKey, - evidencetypes.StoreKey, ibctransfertypes.StoreKey, capabilitytypes.StoreKey, authzkeeper.StoreKey, - providertypes.StoreKey, - ) - tkeys := sdk.NewTransientStoreKeys(paramstypes.TStoreKey) - memKeys := sdk.NewMemoryStoreKeys(capabilitytypes.MemStoreKey) + bApp.SetTxEncoder(txConfig.TxEncoder()) app := &OnomyApp{ BaseApp: bApp, - cdc: cdc, + legacyAmino: legacyAmino, + txConfig: txConfig, appCodec: appCodec, interfaceRegistry: interfaceRegistry, invCheckPeriod: invCheckPeriod, - keys: keys, - tkeys: tkeys, - memKeys: memKeys, } - app.ParamsKeeper = initParamsKeeper(appCodec, cdc, keys[paramstypes.StoreKey], tkeys[paramstypes.TStoreKey]) - - // set the BaseApp's parameter store - bApp.SetParamStore(app.ParamsKeeper.Subspace(baseapp.Paramspace).WithKeyTable(paramskeeper.ConsensusParamsKeyTable())) - - // add capability keeper and ScopeToModule for ibc module - app.CapabilityKeeper = capabilitykeeper.NewKeeper(appCodec, keys[capabilitytypes.StoreKey], memKeys[capabilitytypes.MemStoreKey]) - - // grant capabilities for the ibc and ibc-transfer modules - scopedIBCKeeper := app.CapabilityKeeper.ScopeToModule(ibchost.ModuleName) - scopedTransferKeeper := app.CapabilityKeeper.ScopeToModule(ibctransfertypes.ModuleName) - scopedIBCProviderKeeper := app.CapabilityKeeper.ScopeToModule(providertypes.ModuleName) + moduleAccountAddresses := app.ModuleAccountAddrs() - // add keepers - app.AccountKeeper = authkeeper.NewAccountKeeper( - appCodec, keys[authtypes.StoreKey], app.GetSubspace(authtypes.ModuleName), authtypes.ProtoBaseAccount, maccPerms, - ) - // provider note: `allowedReceivingModAcc` has been modified to allow the provider chain's - // fee-pool to receive tokens from the consumer chain - app.BankKeeper = bankkeeper.NewBaseKeeper( - appCodec, keys[banktypes.StoreKey], app.AccountKeeper, app.GetSubspace(banktypes.ModuleName), app.BlockedAddrs(), - ) - app.AuthzKeeper = authzkeeper.NewKeeper( - keys[authzkeeper.StoreKey], + // Setup keepers + app.AppKeepers = keepers.NewAppKeeper( appCodec, - app.BaseApp.MsgServiceRouter(), - ) - stakingKeeper := stakingkeeper.NewKeeper( - appCodec, keys[stakingtypes.StoreKey], app.AccountKeeper, app.BankKeeper, app.GetSubspace(stakingtypes.ModuleName), - ) - app.MintKeeper = mintkeeper.NewKeeper( - appCodec, keys[minttypes.StoreKey], app.GetSubspace(minttypes.ModuleName), &stakingKeeper, - app.AccountKeeper, app.BankKeeper, authtypes.FeeCollectorName, - ) - app.DistrKeeper = distrkeeper.NewKeeper( - appCodec, keys[distrtypes.StoreKey], app.GetSubspace(distrtypes.ModuleName), app.AccountKeeper, app.BankKeeper, - &stakingKeeper, authtypes.FeeCollectorName, app.ModuleAccountAddrs(), - ) - app.SlashingKeeper = slashingkeeper.NewKeeper( - appCodec, keys[slashingtypes.StoreKey], &stakingKeeper, app.GetSubspace(slashingtypes.ModuleName), - ) - app.CrisisKeeper = crisiskeeper.NewKeeper( - app.GetSubspace(crisistypes.ModuleName), invCheckPeriod, app.BankKeeper, authtypes.FeeCollectorName, - ) - - app.FeeGrantKeeper = feegrantkeeper.NewKeeper(appCodec, keys[feegrant.StoreKey], app.AccountKeeper) - app.UpgradeKeeper = upgradekeeper.NewKeeper(skipUpgradeHeights, keys[upgradetypes.StoreKey], appCodec, homePath, app.BaseApp) - - // Create IBC Keeper - app.IBCKeeper = ibckeeper.NewKeeper( - appCodec, keys[ibchost.StoreKey], app.GetSubspace(ibchost.ModuleName), &app.StakingKeeper, app.UpgradeKeeper, scopedIBCKeeper, + bApp, + legacyAmino, + maccPerms, + moduleAccountAddresses, + app.BlockedModuleAccountAddrs(moduleAccountAddresses), + skipUpgradeHeights, + homePath, + invCheckPeriod, + logger, + appOpts, ) - // Create Transfer Keepers - app.TransferKeeper = ibctransferkeeper.NewKeeper( - appCodec, - keys[ibctransfertypes.StoreKey], - app.GetSubspace(ibctransfertypes.ModuleName), - app.IBCKeeper.ChannelKeeper, - app.IBCKeeper.ChannelKeeper, - &app.IBCKeeper.PortKeeper, - app.AccountKeeper, - app.BankKeeper, - scopedTransferKeeper, - ) - transferModule := transfer.NewAppModule(app.TransferKeeper) - ibcmodule := transfer.NewIBCModule(app.TransferKeeper) - - // Create evidence Keeper for to register the IBC light client misbehavior evidence route - evidenceKeeper := evidencekeeper.NewKeeper( - appCodec, keys[evidencetypes.StoreKey], &app.StakingKeeper, app.SlashingKeeper, - ) - // If evidence needs to be handled for the app, set routes in router here and seal - app.EvidenceKeeper = *evidenceKeeper - - app.DaoKeeper = *daokeeper.NewKeeper( - appCodec, - keys[daotypes.StoreKey], - keys[daotypes.MemStoreKey], - app.GetSubspace(daotypes.ModuleName), - &app.BankKeeper, - &app.AccountKeeper, - &app.DistrKeeper, - &app.GovKeeper, - &app.MintKeeper, - &app.StakingKeeper, - ) - - // register the staking hooks - // NOTE: stakingKeeper above is passed by reference, so that it will contain these hooks - app.StakingKeeper = *stakingKeeper.SetHooks( - stakingtypes.NewMultiStakingHooks( - app.DistrKeeper.Hooks(), - app.SlashingKeeper.Hooks(), - app.ProviderKeeper.Hooks(), - ), - ) - - // protect the dao module form the slashing - app.StakingKeeper = *stakingKeeper.SetSlashingProtestedModules(func() map[string]struct{} { - return map[string]struct{}{ - daotypes.ModuleName: {}, - } - }) - - app.ProviderKeeper = ibcproviderkeeper.NewKeeper( - appCodec, - keys[providertypes.StoreKey], - app.GetSubspace(providertypes.ModuleName), - scopedIBCProviderKeeper, - app.IBCKeeper.ChannelKeeper, - &app.IBCKeeper.PortKeeper, - app.IBCKeeper.ConnectionKeeper, - app.IBCKeeper.ClientKeeper, - app.StakingKeeper, - app.SlashingKeeper, - app.AccountKeeper, - app.EvidenceKeeper, - app.DistrKeeper, - app.BankKeeper, - authtypes.FeeCollectorName, - ) - providerModule := ibcprovider.NewAppModule(&app.ProviderKeeper) - - // Create static IBC router, add transfer route, then set and seal it - ibcRouter := ibcporttypes.NewRouter() - ibcRouter.AddRoute(ibctransfertypes.ModuleName, ibcmodule) - ibcRouter.AddRoute(providertypes.ModuleName, providerModule) - app.IBCKeeper.SetRouter(ibcRouter) - - // register the proposal types - govRouter := govtypes.NewRouter() - govRouter.AddRoute(govtypes.RouterKey, govtypes.ProposalHandler). - AddRoute(paramproposal.RouterKey, params.NewParamChangeProposalHandler(app.ParamsKeeper)). - AddRoute(distrtypes.RouterKey, distr.NewCommunityPoolSpendProposalHandler(app.DistrKeeper)). - AddRoute(upgradetypes.RouterKey, upgrade.NewSoftwareUpgradeProposalHandler(app.UpgradeKeeper)). - AddRoute(ibcclienttypes.RouterKey, ibcclient.NewClientProposalHandler(app.IBCKeeper.ClientKeeper)). - AddRoute(daotypes.RouterKey, dao.NewProposalHandler(app.DaoKeeper)). - AddRoute(providertypes.RouterKey, ibcprovider.NewProviderProposalHandler(app.ProviderKeeper)) - - app.GovKeeper = govkeeper.NewKeeper( - appCodec, keys[govtypes.StoreKey], app.GetSubspace(govtypes.ModuleName), app.AccountKeeper, app.BankKeeper, - &stakingKeeper, govRouter, - ) - - /**** Module Options ****/ - - // NOTE: we may consider parsing `appOpts` inside module constructors. For the moment - // we prefer to be more strict in what arguments the modules expect. - skipGenesisInvariants := cast.ToBool(appOpts.Get(crisis.FlagSkipGenesisInvariants)) - // NOTE: Any module instantiated in the module manager that is later modified // must be passed by reference here. - - app.mm = module.NewManager( - genutil.NewAppModule( - app.AccountKeeper, app.StakingKeeper, app.BaseApp.DeliverTx, - encodingConfig.TxConfig, - ), - auth.NewAppModule(appCodec, app.AccountKeeper, nil), - vesting.NewAppModule(app.AccountKeeper, app.BankKeeper), - bank.NewAppModule(appCodec, app.BankKeeper, app.AccountKeeper), - capability.NewAppModule(appCodec, *app.CapabilityKeeper), - feegrantmodule.NewAppModule(appCodec, app.AccountKeeper, app.BankKeeper, app.FeeGrantKeeper, app.interfaceRegistry), - crisis.NewAppModule(&app.CrisisKeeper, skipGenesisInvariants), - gov.NewAppModule(appCodec, app.GovKeeper, app.AccountKeeper, app.BankKeeper), - mint.NewAppModule(appCodec, app.MintKeeper, app.AccountKeeper), - slashing.NewAppModule(appCodec, app.SlashingKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper), - distr.NewAppModule(appCodec, app.DistrKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper), - staking.NewAppModule(appCodec, app.StakingKeeper, app.AccountKeeper, app.BankKeeper), - upgrade.NewAppModule(app.UpgradeKeeper), - evidence.NewAppModule(app.EvidenceKeeper), - ibc.NewAppModule(app.IBCKeeper), - params.NewAppModule(app.ParamsKeeper), - authzmodule.NewAppModule(appCodec, app.AuthzKeeper, app.AccountKeeper, app.BankKeeper, app.interfaceRegistry), - transferModule, - dao.NewAppModule(appCodec, app.DaoKeeper), - providerModule, - ) + app.mm = module.NewManager(appModules(app, encodingConfig, skipGenesisInvariants)...) // During begin block slashing happens after distr.BeginBlocker so that // there is nothing left over in the validator fee pool, so as to keep the // CanWithdrawInvariant invariant. // NOTE: staking module is required if HistoricalEntries param > 0 - app.mm.SetOrderBeginBlockers( - upgradetypes.ModuleName, - capabilitytypes.ModuleName, - authtypes.ModuleName, - banktypes.ModuleName, - distrtypes.ModuleName, - stakingtypes.ModuleName, - slashingtypes.ModuleName, - govtypes.ModuleName, - minttypes.ModuleName, - ibchost.ModuleName, - genutiltypes.ModuleName, - evidencetypes.ModuleName, - ibctransfertypes.ModuleName, - crisistypes.ModuleName, - authz.ModuleName, - paramstypes.ModuleName, - vestingtypes.ModuleName, - feegrant.ModuleName, - daotypes.ModuleName, - providertypes.ModuleName, - ) - app.mm.SetOrderEndBlockers( - upgradetypes.ModuleName, - capabilitytypes.ModuleName, - authtypes.ModuleName, - banktypes.ModuleName, - distrtypes.ModuleName, - stakingtypes.ModuleName, - slashingtypes.ModuleName, - govtypes.ModuleName, - minttypes.ModuleName, - ibchost.ModuleName, - genutiltypes.ModuleName, - evidencetypes.ModuleName, - ibctransfertypes.ModuleName, - crisistypes.ModuleName, - authz.ModuleName, - paramstypes.ModuleName, - vestingtypes.ModuleName, - feegrant.ModuleName, - daotypes.ModuleName, - providertypes.ModuleName, - ) + // NOTE: capability module's beginblocker must come before any modules using capabilities (e.g. IBC) + // Tell the app's module manager how to set the order of BeginBlockers, which are run at the beginning of every block. + app.mm.SetOrderBeginBlockers(orderBeginBlockers()...) + + app.mm.SetOrderEndBlockers(orderEndBlockers()...) // NOTE: The genutils module must occur after staking so that pools are // properly initialized with tokens from genesis accounts. + // NOTE: The genutils module must also occur after auth so that it can access the params from auth. // NOTE: Capability module must occur first so that it can initialize any capabilities // so that other modules that want to create or claim capabilities afterwards in InitChain // can do so safely. - app.mm.SetOrderInitGenesis( - upgradetypes.ModuleName, - capabilitytypes.ModuleName, - authtypes.ModuleName, - banktypes.ModuleName, - distrtypes.ModuleName, - stakingtypes.ModuleName, - slashingtypes.ModuleName, - govtypes.ModuleName, - minttypes.ModuleName, - ibchost.ModuleName, - genutiltypes.ModuleName, - evidencetypes.ModuleName, - authz.ModuleName, - ibctransfertypes.ModuleName, - crisistypes.ModuleName, - paramstypes.ModuleName, - vestingtypes.ModuleName, - feegrant.ModuleName, - daotypes.ModuleName, - providertypes.ModuleName, - ) + app.mm.SetOrderInitGenesis(orderInitBlockers()...) - app.mm.RegisterInvariants(&app.CrisisKeeper) - app.mm.RegisterRoutes(app.Router(), app.QueryRouter(), encodingConfig.Amino) + // Uncomment if you want to set a custom migration order here. + // app.mm.SetOrderMigrations(custom order) + + app.mm.RegisterInvariants(app.CrisisKeeper) app.configurator = module.NewConfigurator(app.appCodec, app.MsgServiceRouter(), app.GRPCQueryRouter()) app.mm.RegisterServices(app.configurator) + autocliv1.RegisterQueryServer(app.GRPCQueryRouter(), runtimeservices.NewAutoCLIQueryService(app.mm.Modules)) + + reflectionSvc, err := runtimeservices.NewReflectionService() + if err != nil { + panic(err) + } + reflectionv1.RegisterReflectionServiceServer(app.GRPCQueryRouter(), reflectionSvc) + + // add test gRPC service for testing gRPC queries in isolation + testdata.RegisterQueryServer(app.GRPCQueryRouter(), testdata.QueryImpl{}) + // create the simulation manager and define the order of the modules for deterministic simulations // // NOTE: this is not required apps that don't use the simulator for fuzz testing // transactions - app.sm = module.NewSimulationManager( - auth.NewAppModule(appCodec, app.AccountKeeper, authsims.RandomGenesisAccounts), - bank.NewAppModule(appCodec, app.BankKeeper, app.AccountKeeper), - capability.NewAppModule(appCodec, *app.CapabilityKeeper), - feegrantmodule.NewAppModule(appCodec, app.AccountKeeper, app.BankKeeper, app.FeeGrantKeeper, app.interfaceRegistry), - gov.NewAppModule(appCodec, app.GovKeeper, app.AccountKeeper, app.BankKeeper), - mint.NewAppModule(appCodec, app.MintKeeper, app.AccountKeeper), - staking.NewAppModule(appCodec, app.StakingKeeper, app.AccountKeeper, app.BankKeeper), - distr.NewAppModule(appCodec, app.DistrKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper), - slashing.NewAppModule(appCodec, app.SlashingKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper), - params.NewAppModule(app.ParamsKeeper), - evidence.NewAppModule(app.EvidenceKeeper), - authzmodule.NewAppModule(appCodec, app.AuthzKeeper, app.AccountKeeper, app.BankKeeper, app.interfaceRegistry), - ibc.NewAppModule(app.IBCKeeper), - transferModule, - dao.NewAppModule(appCodec, app.DaoKeeper), - ) + app.sm = module.NewSimulationManager(simulationModules(app, encodingConfig, skipGenesisInvariants)...) + app.sm.RegisterStoreDecoders() // initialize stores - app.MountKVStores(keys) - app.MountTransientStores(tkeys) - app.MountMemoryStores(memKeys) - - // initialize BaseApp - app.SetInitChainer(app.InitChainer) - app.SetBeginBlocker(app.BeginBlocker) - - anteHandler, err := ante.NewAnteHandler( - ante.HandlerOptions{ - AccountKeeper: app.AccountKeeper, - BankKeeper: app.BankKeeper, - SignModeHandler: encodingConfig.TxConfig.SignModeHandler(), - FeegrantKeeper: app.FeeGrantKeeper, - SigGasConsumer: ante.DefaultSigVerificationGasConsumer, + app.MountKVStores(app.GetKVStoreKey()) + app.MountTransientStores(app.GetTransientStoreKey()) + app.MountMemoryStores(app.GetMemoryStoreKey()) + + anteHandler, err := onomyante.NewAnteHandler( + onomyante.HandlerOptions{ + HandlerOptions: ante.HandlerOptions{ + AccountKeeper: app.AccountKeeper, + BankKeeper: app.BankKeeper, + FeegrantKeeper: app.FeeGrantKeeper, + SignModeHandler: encodingConfig.TxConfig.SignModeHandler(), + SigGasConsumer: ante.DefaultSigVerificationGasConsumer, + }, + Codec: appCodec, + IBCkeeper: app.IBCKeeper, + GlobalFeeSubspace: app.GetSubspace(globalfee.ModuleName), + StakingKeeper: app.StakingKeeper, + // If TxFeeChecker is nil the default ante TxFeeChecker is used + // so we use this no-op to keep the global fee module behaviour unchanged + TxFeeChecker: noOpTxFeeChecker, }, ) if err != nil { - panic(err) + panic(fmt.Errorf("failed to create AnteHandler: %s", err)) } app.SetAnteHandler(anteHandler) + app.SetInitChainer(app.InitChainer) + app.SetBeginBlocker(app.BeginBlocker) app.SetEndBlocker(app.EndBlocker) app.setupUpgradeHandlers() + app.setupUpgradeStoreLoaders() if loadLatest { if err := app.LoadLatestVersion(); err != nil { - tmos.Exit(err.Error()) + tmos.Exit(fmt.Sprintf("failed to load latest version: %s", err)) } } - app.ScopedIBCKeeper = scopedIBCKeeper - app.ScopedTransferKeeper = scopedTransferKeeper - app.ScopedIBCProviderKeeper = scopedIBCProviderKeeper - return app } -// Name returns the name of the OnomyApp. +// Name returns the name of the App func (app *OnomyApp) Name() string { return app.BaseApp.Name() } -// GetBaseApp returns the base app of the application. -func (app OnomyApp) GetBaseApp() *baseapp.BaseApp { return app.BaseApp } - -// BeginBlocker application updates every begin block. +// BeginBlocker application updates every begin block func (app *OnomyApp) BeginBlocker(ctx sdk.Context, req abci.RequestBeginBlock) abci.ResponseBeginBlock { return app.mm.BeginBlock(ctx, req) } -// EndBlocker application updates every end block. +// EndBlocker application updates every end block func (app *OnomyApp) EndBlocker(ctx sdk.Context, req abci.RequestEndBlock) abci.ResponseEndBlock { return app.mm.EndBlock(ctx, req) } -// InitChainer application update at chain initialization. +// InitChainer application update at chain initialization func (app *OnomyApp) InitChainer(ctx sdk.Context, req abci.RequestInitChain) abci.ResponseInitChain { var genesisState GenesisState if err := tmjson.Unmarshal(req.AppStateBytes, &genesisState); err != nil { panic(err) } + app.UpgradeKeeper.SetModuleVersionMap(ctx, app.mm.GetVersionMap()) + return app.mm.InitGenesis(ctx, app.appCodec, genesisState) } -// LoadHeight loads a particular height. +// LoadHeight loads a particular height func (app *OnomyApp) LoadHeight(height int64) error { return app.LoadVersion(height) } @@ -673,29 +293,27 @@ func (app *OnomyApp) ModuleAccountAddrs() map[string]bool { return modAccAddrs } -// BlockedAddrs returns all the app's module account addresses that are not -// allowed to receive external tokens. -func (app *OnomyApp) BlockedAddrs() map[string]bool { - blockedAddrs := make(map[string]bool) - for acc := range maccPerms { - blockedAddrs[authtypes.NewModuleAddress(acc).String()] = !allowedReceivingModAcc[acc] - } +// BlockedModuleAccountAddrs returns all the app's blocked module account +// addresses. +func (app *OnomyApp) BlockedModuleAccountAddrs(modAccAddrs map[string]bool) map[string]bool { + // remove module accounts that are ALLOWED to received funds + delete(modAccAddrs, authtypes.NewModuleAddress(govtypes.ModuleName).String()) - // For ICS multiden fix - delete(blockedAddrs, authtypes.NewModuleAddress(providertypes.ConsumerRewardsPool).String()) + // Remove the ConsumerRewardsPool from the group of blocked recipient addresses in bank + delete(modAccAddrs, authtypes.NewModuleAddress(providertypes.ConsumerRewardsPool).String()) - return blockedAddrs + return modAccAddrs } -// LegacyAmino returns SimApp's amino codec. +// LegacyAmino returns OnomyApp's amino codec. // // NOTE: This is solely to be used for testing purposes as it may be desirable // for modules to register their own custom testing types. func (app *OnomyApp) LegacyAmino() *codec.LegacyAmino { - return app.cdc + return app.legacyAmino } -// AppCodec returns an app codec. +// AppCodec returns Onomy's app codec. // // NOTE: This is solely to be used for testing purposes as it may be desirable // for modules to register their own custom testing types. @@ -703,59 +321,40 @@ func (app *OnomyApp) AppCodec() codec.Codec { return app.appCodec } -// InterfaceRegistry returns an InterfaceRegistry. +// InterfaceRegistry returns Onomy's InterfaceRegistry func (app *OnomyApp) InterfaceRegistry() types.InterfaceRegistry { return app.interfaceRegistry } -// GetKey returns the KVStoreKey for the provided store key. -// -// NOTE: This is solely to be used for testing purposes. -func (app *OnomyApp) GetKey(storeKey string) *sdk.KVStoreKey { - return app.keys[storeKey] -} - -// GetTKey returns the TransientStoreKey for the provided store key. -// -// NOTE: This is solely to be used for testing purposes. -func (app *OnomyApp) GetTKey(storeKey string) *sdk.TransientStoreKey { - return app.tkeys[storeKey] -} - -// GetMemKey returns the MemStoreKey for the provided mem key. -// -// NOTE: This is solely used for testing purposes. -func (app *OnomyApp) GetMemKey(storeKey string) *sdk.MemoryStoreKey { - return app.memKeys[storeKey] -} - -// GetSubspace returns a param subspace for a given module name. -// -// NOTE: This is solely to be used for testing purposes. -func (app *OnomyApp) GetSubspace(moduleName string) paramstypes.Subspace { - subspace, _ := app.ParamsKeeper.GetSubspace(moduleName) - return subspace +// SimulationManager implements the SimulationApp interface +func (app *OnomyApp) SimulationManager() *module.SimulationManager { + return app.sm } // RegisterAPIRoutes registers all application module routes with the provided // API server. func (app *OnomyApp) RegisterAPIRoutes(apiSvr *api.Server, apiConfig config.APIConfig) { clientCtx := apiSvr.ClientCtx - rpc.RegisterRoutes(clientCtx, apiSvr.Router) - // Register legacy tx routes. - authrest.RegisterTxRoutes(clientCtx, apiSvr.Router) // Register new tx routes from grpc-gateway. authtx.RegisterGRPCGatewayRoutes(clientCtx, apiSvr.GRPCGatewayRouter) // Register new tendermint queries routes from grpc-gateway. tmservice.RegisterGRPCGatewayRoutes(clientCtx, apiSvr.GRPCGatewayRouter) // Register legacy and grpc-gateway routes for all modules. - ModuleBasics.RegisterRESTRoutes(clientCtx, apiSvr.Router) ModuleBasics.RegisterGRPCGatewayRoutes(clientCtx, apiSvr.GRPCGatewayRouter) - // register app's OpenAPI routes. - apiSvr.Router.Handle("/openapi/openapi.yml", http.FileServer(http.FS(docs.Docs))) - apiSvr.Router.HandleFunc("/", openapiconsole.Handler(Name, "/openapi/openapi.yml")) + // Register nodeservice grpc-gateway routes. + nodeservice.RegisterGRPCGatewayRoutes(clientCtx, apiSvr.GRPCGatewayRouter) + + // register swagger API from root so that other applications can override easily + if err := server.RegisterSwaggerAPI(apiSvr.ClientCtx, apiSvr.Router, apiConfig.Swagger); err != nil { + panic(err) + } +} + +// RegisterTxService allows query minimum-gas-prices in app.toml +func (app *OnomyApp) RegisterNodeService(clientCtx client.Context) { + nodeservice.RegisterNodeService(clientCtx, app.GRPCQueryRouter()) } // RegisterTxService implements the Application.RegisterTxService method. @@ -765,87 +364,91 @@ func (app *OnomyApp) RegisterTxService(clientCtx client.Context) { // RegisterTendermintService implements the Application.RegisterTendermintService method. func (app *OnomyApp) RegisterTendermintService(clientCtx client.Context) { - tmservice.RegisterTendermintService(app.BaseApp.GRPCQueryRouter(), clientCtx, app.interfaceRegistry) + tmservice.RegisterTendermintService( + clientCtx, + app.BaseApp.GRPCQueryRouter(), + app.interfaceRegistry, + app.Query, + ) } -// SetOrderEndBlockers sets the order of set end-blocker calls. -func (app *OnomyApp) SetOrderEndBlockers(moduleNames ...string) { - app.mm.SetOrderEndBlockers(moduleNames...) -} +// configure store loader that checks if version == upgradeHeight and applies store upgrades +func (app *OnomyApp) setupUpgradeStoreLoaders() { + upgradeInfo, err := app.UpgradeKeeper.ReadUpgradeInfoFromDisk() + if err != nil { + panic(fmt.Sprintf("failed to read upgrade info from disk %s", err)) + } -// initParamsKeeper init params keeper and its subspaces. -func initParamsKeeper(appCodec codec.BinaryCodec, legacyAmino *codec.LegacyAmino, key, tkey sdk.StoreKey) paramskeeper.Keeper { - paramsKeeper := paramskeeper.NewKeeper(appCodec, legacyAmino, key, tkey) - - paramsKeeper.Subspace(authtypes.ModuleName) - paramsKeeper.Subspace(banktypes.ModuleName) - paramsKeeper.Subspace(stakingtypes.ModuleName) - paramsKeeper.Subspace(minttypes.ModuleName) - paramsKeeper.Subspace(distrtypes.ModuleName) - paramsKeeper.Subspace(slashingtypes.ModuleName) - paramsKeeper.Subspace(govtypes.ModuleName).WithKeyTable(govtypes.ParamKeyTable()) - paramsKeeper.Subspace(crisistypes.ModuleName) - paramsKeeper.Subspace(ibctransfertypes.ModuleName) - paramsKeeper.Subspace(ibchost.ModuleName) - paramsKeeper.Subspace(daotypes.ModuleName) - paramsKeeper.Subspace(providertypes.ModuleName) - - return paramsKeeper -} + if app.UpgradeKeeper.IsSkipHeight(upgradeInfo.Height) { + return + } -// SimulationManager implements the SimulationApp interface. -func (app *OnomyApp) SimulationManager() *module.SimulationManager { - return app.sm + for _, upgrade := range Upgrades { + upgrade := upgrade + if upgradeInfo.Name == upgrade.UpgradeName { + storeUpgrades := upgrade.StoreUpgrades + app.SetStoreLoader(upgradetypes.UpgradeStoreLoader(upgradeInfo.Height, &storeUpgrades)) + } + } } func (app *OnomyApp) setupUpgradeHandlers() { - app.UpgradeKeeper.SetUpgradeHandler(v1_0_1.Name, v1_0_1.UpgradeHandler) - app.UpgradeKeeper.SetUpgradeHandler(v1_0_3.Name, v1_0_3.UpgradeHandler) - app.UpgradeKeeper.SetUpgradeHandler(v1_0_3_4.Name, v1_0_3_4.UpgradeHandler) - app.UpgradeKeeper.SetUpgradeHandler(v1_0_3_5.Name, v1_0_3_5.UpgradeHandler) - // we need to have the reference to `app` which is why we need this `func` here - app.UpgradeKeeper.SetUpgradeHandler( - v1_1_1.Name, - func(ctx sdk.Context, _ upgradetypes.Plan, fromVM module.VersionMap) (module.VersionMap, error) { - for moduleName, eachModule := range app.mm.Modules { - fromVM[moduleName] = eachModule.ConsensusVersion() - } - - // This is critical for the chain upgrade to work - app.ProviderKeeper.InitGenesis(ctx, providertypes.DefaultGenesisState()) - - return app.mm.RunMigrations(ctx, app.configurator, fromVM) - }, - ) - app.UpgradeKeeper.SetUpgradeHandler(v1_1_2.Name, v1_1_2.UpgradeHandler) - app.UpgradeKeeper.SetUpgradeHandler(v1_1_4.Name, v1_1_4.UpgradeHandler) + for _, upgrade := range Upgrades { + app.UpgradeKeeper.SetUpgradeHandler( + upgrade.UpgradeName, + upgrade.CreateUpgradeHandler( + app.mm, + app.configurator, + &app.AppKeepers, + ), + ) + } +} - upgradeInfo, err := app.UpgradeKeeper.ReadUpgradeInfoFromDisk() +// RegisterSwaggerAPI registers swagger route with API Server +func RegisterSwaggerAPI(rtr *mux.Router) { + statikFS, err := fs.New() if err != nil { - panic(fmt.Errorf("failed to read upgrade info from disk: %w", err)) + panic(err) } - // configure store loader that checks if version == upgradeHeight and applies store upgrades - if app.UpgradeKeeper.IsSkipHeight(upgradeInfo.Height) { - return - } + staticServer := http.FileServer(statikFS) + rtr.PathPrefix("/swagger/").Handler(http.StripPrefix("/swagger/", staticServer)) +} - var storeUpgrades *storetypes.StoreUpgrades +func (app *OnomyApp) OnTxSucceeded(_ sdk.Context, _, _ string, _ []byte, _ []byte) { +} - switch upgradeInfo.Name { - case v1_1_1.Name: - storeUpgrades = &storetypes.StoreUpgrades{ - Added: []string{providertypes.ModuleName, providertypes.StoreKey}, - } - case v1_1_4.Name: - storeUpgrades = &storetypes.StoreUpgrades{ - Added: []string{authz.ModuleName, authzkeeper.StoreKey}, - } - default: - // no store upgrades - } +func (app *OnomyApp) OnTxFailed(_ sdk.Context, _, _ string, _ []byte, _ []byte) { +} - if storeUpgrades != nil { - app.SetStoreLoader(upgradetypes.UpgradeStoreLoader(upgradeInfo.Height, storeUpgrades)) +// TestingApp functions + +// GetBaseApp implements the TestingApp interface. +func (app *OnomyApp) GetBaseApp() *baseapp.BaseApp { + return app.BaseApp +} + +// GetTxConfig implements the TestingApp interface. +func (app *OnomyApp) GetTxConfig() client.TxConfig { + return app.txConfig +} + +// EmptyAppOptions is a stub implementing AppOptions +type EmptyAppOptions struct{} + +// Get implements AppOptions +func (ao EmptyAppOptions) Get(_ string) interface{} { + return nil +} + +// noOpTxFeeChecker is an ante TxFeeChecker for the DeductFeeDecorator, see x/auth/ante/fee.go, +// it performs a no-op by not checking tx fees and always returns a zero tx priority +func noOpTxFeeChecker(_ sdk.Context, tx sdk.Tx) (sdk.Coins, int64, error) { + feeTx, ok := tx.(sdk.FeeTx) + if !ok { + return nil, 0, errorsmod.Wrap(sdkerrors.ErrTxDecode, "Tx must be a FeeTx") } + + return feeTx.GetFee(), 0, nil } diff --git a/app/app_helpers.go b/app/app_helpers.go new file mode 100644 index 00000000..d93ebcb2 --- /dev/null +++ b/app/app_helpers.go @@ -0,0 +1,56 @@ +package onomy + +import ( + ibckeeper "github.com/cosmos/ibc-go/v7/modules/core/keeper" + ibctestingtypes "github.com/cosmos/ibc-go/v7/testing/types" + icstest "github.com/cosmos/interchain-security/v3/testutil/integration" + ibcproviderkeeper "github.com/cosmos/interchain-security/v3/x/ccv/provider/keeper" + + capabilitykeeper "github.com/cosmos/cosmos-sdk/x/capability/keeper" +) + +// ProviderApp interface implementations for icstest tests + +// GetProviderKeeper implements the ProviderApp interface. +func (app *OnomyApp) GetProviderKeeper() ibcproviderkeeper.Keeper { //nolint:nolintlint + return app.ProviderKeeper +} + +// GetStakingKeeper implements the TestingApp interface. Needed for ICS. +func (app *OnomyApp) GetStakingKeeper() ibctestingtypes.StakingKeeper { //nolint:nolintlint + return app.StakingKeeper +} + +// GetIBCKeeper implements the TestingApp interface. +func (app *OnomyApp) GetIBCKeeper() *ibckeeper.Keeper { //nolint:nolintlint + return app.IBCKeeper +} + +// GetScopedIBCKeeper implements the TestingApp interface. +func (app *OnomyApp) GetScopedIBCKeeper() capabilitykeeper.ScopedKeeper { //nolint:nolintlint + return app.ScopedIBCKeeper +} + +// GetTestStakingKeeper implements the ProviderApp interface. +func (app *OnomyApp) GetTestStakingKeeper() icstest.TestStakingKeeper { //nolint:nolintlint + return app.StakingKeeper +} + +// GetTestBankKeeper implements the ProviderApp interface. +func (app *OnomyApp) GetTestBankKeeper() icstest.TestBankKeeper { //nolint:nolintlint + return app.BankKeeper +} + +// GetTestSlashingKeeper implements the ProviderApp interface. +func (app *OnomyApp) GetTestSlashingKeeper() icstest.TestSlashingKeeper { //nolint:nolintlint + return app.SlashingKeeper +} + +// GetTestDistributionKeeper implements the ProviderApp interface. +func (app *OnomyApp) GetTestDistributionKeeper() icstest.TestDistributionKeeper { //nolint:nolintlint + return app.DistrKeeper +} + +func (app *OnomyApp) GetTestAccountKeeper() icstest.TestAccountKeeper { //nolint:nolintlint + return app.AccountKeeper +} diff --git a/app/app_test.go b/app/app_test.go new file mode 100644 index 00000000..2276891d --- /dev/null +++ b/app/app_test.go @@ -0,0 +1,47 @@ +package onomy_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + db "github.com/cometbft/cometbft-db" + "github.com/cometbft/cometbft/libs/log" + + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + govtypes "github.com/cosmos/cosmos-sdk/x/gov/types" + + onomy "github.com/onomyprotocol/onomy-rebuild/app" + onomyhelpers "github.com/onomyprotocol/onomy-rebuild/app/helpers" +) + +type EmptyAppOptions struct{} + +func (ao EmptyAppOptions) Get(_ string) interface{} { + return nil +} + +func TestOnomyApp_BlockedModuleAccountAddrs(t *testing.T) { + encConfig := onomy.RegisterEncodingConfig() + app := onomy.NewOnomyApp( + log.NewNopLogger(), + db.NewMemDB(), + nil, + true, + map[int64]bool{}, + onomy.DefaultNodeHome, + encConfig, + EmptyAppOptions{}, + ) + + moduleAccountAddresses := app.ModuleAccountAddrs() + blockedAddrs := app.BlockedModuleAccountAddrs(moduleAccountAddresses) + + require.NotContains(t, blockedAddrs, authtypes.NewModuleAddress(govtypes.ModuleName).String()) +} + +func TestOnomyApp_Export(t *testing.T) { + app := onomyhelpers.Setup(t) + _, err := app.ExportAppStateAndValidators(true, []string{}, []string{}) + require.NoError(t, err, "ExportAppStateAndValidators should not have an error") +} diff --git a/app/const.go b/app/const.go new file mode 100644 index 00000000..1c739253 --- /dev/null +++ b/app/const.go @@ -0,0 +1,5 @@ +package onomy + +const ( + appName = "OnomyApp" +) diff --git a/app/encoding.go b/app/encoding.go new file mode 100644 index 00000000..eb4b6130 --- /dev/null +++ b/app/encoding.go @@ -0,0 +1,18 @@ +package onomy + +import ( + "github.com/cosmos/cosmos-sdk/std" + + "github.com/onomyprotocol/onomy-rebuild/app/params" +) + +func RegisterEncodingConfig() params.EncodingConfig { + encConfig := params.MakeEncodingConfig() + + std.RegisterLegacyAminoCodec(encConfig.Amino) + std.RegisterInterfaces(encConfig.InterfaceRegistry) + ModuleBasics.RegisterLegacyAminoCodec(encConfig.Amino) + ModuleBasics.RegisterInterfaces(encConfig.InterfaceRegistry) + + return encConfig +} diff --git a/app/export.go b/app/export.go index aed40f09..787fa114 100644 --- a/app/export.go +++ b/app/export.go @@ -1,22 +1,23 @@ -// Package app contains configuration of the network. -package app +package onomy import ( "encoding/json" - "log" + + tmproto "github.com/cometbft/cometbft/proto/tendermint/types" servertypes "github.com/cosmos/cosmos-sdk/server/types" sdk "github.com/cosmos/cosmos-sdk/types" slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" "github.com/cosmos/cosmos-sdk/x/staking" stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" - tmproto "github.com/tendermint/tendermint/proto/tendermint/types" ) // ExportAppStateAndValidators exports the state of the application for a genesis // file. func (app *OnomyApp) ExportAppStateAndValidators( - forZeroHeight bool, jailAllowedAddrs []string, + forZeroHeight bool, + jailAllowedAddrs []string, + modulesToExport []string, ) (servertypes.ExportedApp, error) { // as if they could withdraw from the start of the next block ctx := app.NewContext(true, tmproto.Header{Height: app.LastBlockHeight()}) @@ -29,28 +30,25 @@ func (app *OnomyApp) ExportAppStateAndValidators( app.prepForZeroHeightGenesis(ctx, jailAllowedAddrs) } - genState := app.mm.ExportGenesis(ctx, app.appCodec) + genState := app.mm.ExportGenesisForModules(ctx, app.appCodec, modulesToExport) appState, err := json.MarshalIndent(genState, "", " ") if err != nil { return servertypes.ExportedApp{}, err } validators, err := staking.WriteValidators(ctx, app.StakingKeeper) - if err != nil { - return servertypes.ExportedApp{}, err - } return servertypes.ExportedApp{ AppState: appState, Validators: validators, Height: height, ConsensusParams: app.BaseApp.GetConsensusParams(ctx), - }, nil + }, err } // prepare for fresh start at zero height // NOTE zero height genesis is a temporary feature which will be deprecated -// in favor of export at a block height -func (app *OnomyApp) prepForZeroHeightGenesis(ctx sdk.Context, jailAllowedAddrs []string) { // nolint:cyclop,gocyclo,gocognit // generated by cosmos +// in favour of export at a block height +func (app *OnomyApp) prepForZeroHeightGenesis(ctx sdk.Context, jailAllowedAddrs []string) { applyAllowedAddrs := false // check if there is a allowed address list @@ -63,7 +61,7 @@ func (app *OnomyApp) prepForZeroHeightGenesis(ctx sdk.Context, jailAllowedAddrs for _, addr := range jailAllowedAddrs { _, err := sdk.ValAddressFromBech32(addr) if err != nil { - log.Fatal(err) + panic(err) } allowedAddrsMap[addr] = true } @@ -77,7 +75,7 @@ func (app *OnomyApp) prepForZeroHeightGenesis(ctx sdk.Context, jailAllowedAddrs app.StakingKeeper.IterateValidators(ctx, func(_ int64, val stakingtypes.ValidatorI) (stop bool) { _, err := app.DistrKeeper.WithdrawValidatorCommission(ctx, val.GetOperator()) if err != nil { - panic(err) + app.Logger().Error(err.Error(), "ValOperatorAddress", val.GetOperator()) } return false }) @@ -85,7 +83,17 @@ func (app *OnomyApp) prepForZeroHeightGenesis(ctx sdk.Context, jailAllowedAddrs // withdraw all delegator rewards dels := app.StakingKeeper.GetAllDelegations(ctx) for _, delegation := range dels { - _, err := app.DistrKeeper.WithdrawDelegationRewards(ctx, delegation.GetDelegatorAddr(), delegation.GetValidatorAddr()) + valAddr, err := sdk.ValAddressFromBech32(delegation.ValidatorAddress) + if err != nil { + panic(err) + } + + delAddr, err := sdk.AccAddressFromBech32(delegation.DelegatorAddress) + if err != nil { + panic(err) + } + + _, err = app.DistrKeeper.WithdrawDelegationRewards(ctx, delAddr, valAddr) if err != nil { panic(err) } @@ -101,6 +109,21 @@ func (app *OnomyApp) prepForZeroHeightGenesis(ctx sdk.Context, jailAllowedAddrs height := ctx.BlockHeight() ctx = ctx.WithBlockHeight(0) + // reinitialize all validators (v0.46 version) + // app.StakingKeeper.IterateValidators(ctx, func(_ int64, val stakingtypes.ValidatorI) (stop bool) { + // // donate any unwithdrawn outstanding reward fraction tokens to the community pool + // scraps := app.DistrKeeper.GetValidatorOutstandingRewardsCoins(ctx, val.GetOperator()) + // feePool := app.DistrKeeper.GetFeePool(ctx) + // feePool.CommunityPool = feePool.CommunityPool.Add(scraps...) + // app.DistrKeeper.SetFeePool(ctx, feePool) + + // err := app.DistrKeeper.Hooks().AfterValidatorCreated(ctx, val.GetOperator()) + // if err != nil { + // panic(err) + // } + // return false + // }) + // reinitialize all validators app.StakingKeeper.IterateValidators(ctx, func(_ int64, val stakingtypes.ValidatorI) (stop bool) { // donate any unwithdrawn outstanding reward fraction tokens to the community pool @@ -109,14 +132,28 @@ func (app *OnomyApp) prepForZeroHeightGenesis(ctx sdk.Context, jailAllowedAddrs feePool.CommunityPool = feePool.CommunityPool.Add(scraps...) app.DistrKeeper.SetFeePool(ctx, feePool) - app.DistrKeeper.Hooks().AfterValidatorCreated(ctx, val.GetOperator()) + if err := app.DistrKeeper.Hooks().AfterValidatorCreated(ctx, val.GetOperator()); err != nil { + panic(err) + } return false }) // reinitialize all delegations for _, del := range dels { - app.DistrKeeper.Hooks().BeforeDelegationCreated(ctx, del.GetDelegatorAddr(), del.GetValidatorAddr()) - app.DistrKeeper.Hooks().AfterDelegationModified(ctx, del.GetDelegatorAddr(), del.GetValidatorAddr()) + valAddr, err := sdk.ValAddressFromBech32(del.ValidatorAddress) + if err != nil { + panic(err) + } + delAddr, err := sdk.AccAddressFromBech32(del.DelegatorAddress) + if err != nil { + panic(err) + } + if err := app.DistrKeeper.Hooks().BeforeDelegationCreated(ctx, delAddr, valAddr); err != nil { + panic(err) + } + if err := app.DistrKeeper.Hooks().AfterDelegationModified(ctx, delAddr, valAddr); err != nil { + panic(err) + } } // reset context height @@ -144,31 +181,33 @@ func (app *OnomyApp) prepForZeroHeightGenesis(ctx sdk.Context, jailAllowedAddrs // Iterate through validators by power descending, reset bond heights, and // update bond intra-tx counters. - store := ctx.KVStore(app.keys[stakingtypes.StoreKey]) + store := ctx.KVStore(app.GetKey(stakingtypes.StoreKey)) iter := sdk.KVStoreReversePrefixIterator(store, stakingtypes.ValidatorsKey) - counter := int16(0) - for ; iter.Valid(); iter.Next() { - addr := sdk.ValAddress(iter.Key()[1:]) - validator, found := app.StakingKeeper.GetValidator(ctx, addr) - if !found { - panic("expected validator, not found") - } + counter := int16(0) - validator.UnbondingHeight = 0 - if applyAllowedAddrs && !allowedAddrsMap[addr.String()] { - validator.Jailed = true + // Closure to ensure iterator doesn't leak. + func() { + defer iter.Close() + for ; iter.Valid(); iter.Next() { + addr := sdk.ValAddress(stakingtypes.AddressFromValidatorsKey(iter.Key())) + validator, found := app.StakingKeeper.GetValidator(ctx, addr) + if !found { + panic("expected validator, not found") + } + + validator.UnbondingHeight = 0 + if applyAllowedAddrs && !allowedAddrsMap[addr.String()] { + validator.Jailed = true + } + + app.StakingKeeper.SetValidator(ctx, validator) + counter++ } + }() - app.StakingKeeper.SetValidator(ctx, validator) - counter++ - } - - if err := iter.Close(); err != nil { - panic(err) - } - - if _, err := app.StakingKeeper.ApplyAndReturnValidatorSetUpdates(ctx); err != nil { + _, err := app.StakingKeeper.ApplyAndReturnValidatorSetUpdates(ctx) + if err != nil { panic(err) } diff --git a/app/genesis.go b/app/genesis.go index 2900679c..4d5b7151 100644 --- a/app/genesis.go +++ b/app/genesis.go @@ -1,12 +1,12 @@ -package app +package onomy import ( "encoding/json" - "github.com/cosmos/cosmos-sdk/codec" + "github.com/onomyprotocol/onomy-rebuild/app/params" ) -// GenesisState of the blockchain is represented here as a map of raw json +// The genesis state of the blockchain is represented here as a map of raw json // messages key'd by a identifier string. // The identifier is used to determine which module genesis information belongs // to so it may be appropriately routed during init chain. @@ -16,6 +16,6 @@ import ( type GenesisState map[string]json.RawMessage // NewDefaultGenesisState generates the default state for the application. -func NewDefaultGenesisState(cdc codec.JSONCodec) GenesisState { - return ModuleBasics.DefaultGenesis(cdc) +func NewDefaultGenesisState(encConfig params.EncodingConfig) GenesisState { + return ModuleBasics.DefaultGenesis(encConfig.Marshaler) } diff --git a/app/genesis_account.go b/app/genesis_account.go new file mode 100644 index 00000000..079e1c76 --- /dev/null +++ b/app/genesis_account.go @@ -0,0 +1,56 @@ +package onomy + +import ( + "errors" + + sdk "github.com/cosmos/cosmos-sdk/types" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" +) + +var _ authtypes.GenesisAccount = (*SimGenesisAccount)(nil) + +// SimGenesisAccount defines a type that implements the GenesisAccount interface +// to be used for simulation accounts in the genesis state. +type SimGenesisAccount struct { + *authtypes.BaseAccount + + // vesting account fields + OriginalVesting sdk.Coins `json:"original_vesting" yaml:"original_vesting"` // total vesting coins upon initialization + DelegatedFree sdk.Coins `json:"delegated_free" yaml:"delegated_free"` // delegated vested coins at time of delegation + DelegatedVesting sdk.Coins `json:"delegated_vesting" yaml:"delegated_vesting"` // delegated vesting coins at time of delegation + StartTime int64 `json:"start_time" yaml:"start_time"` // vesting start time (UNIX Epoch time) + EndTime int64 `json:"end_time" yaml:"end_time"` // vesting end time (UNIX Epoch time) + + // module account fields + ModuleName string `json:"module_name" yaml:"module_name"` // name of the module account + ModulePermissions []string `json:"module_permissions" yaml:"module_permissions"` // permissions of module account +} + +// Validate checks for errors on the vesting and module account parameters +func (sga SimGenesisAccount) Validate() error { + if sga.OriginalVesting.IsAnyNil() { + return errors.New("OriginalVesting amount must not be nil") + } + + if !sga.OriginalVesting.IsZero() { + if sga.StartTime >= sga.EndTime { + return errors.New("vesting start-time cannot be before end-time") + } + } + + if sga.BaseAccount == nil { + return errors.New("BaseAccount must not be nil") + } + + if sga.ModuleName != "" { + ma := authtypes.ModuleAccount{ + BaseAccount: sga.BaseAccount, Name: sga.ModuleName, Permissions: sga.ModulePermissions, + } + + if err := ma.Validate(); err != nil { + return err + } + } + + return sga.BaseAccount.Validate() +} diff --git a/app/genesis_account_fuzz_test.go b/app/genesis_account_fuzz_test.go new file mode 100644 index 00000000..6b6c5a2b --- /dev/null +++ b/app/genesis_account_fuzz_test.go @@ -0,0 +1,35 @@ +package onomy + +import ( + "runtime/debug" + "testing" + + "github.com/google/gofuzz" +) + +func TestFuzzGenesisAccountValidate(t *testing.T) { + if testing.Short() { + t.Skip("running in -short mode") + } + + t.Parallel() + + acct := new(SimGenesisAccount) + i := 0 + defer func() { + r := recover() + if r == nil { + return + } + + // Otherwise report on the configuration and iteration. + t.Fatalf("Failed SimGenesisAccount on iteration #%d: %#v\n\n%s\n\n%s", i, acct, r, debug.Stack()) + }() + + f := fuzz.New() + for i = 0; i < 1e5; i++ { + acct = new(SimGenesisAccount) + f.Fuzz(acct) + acct.Validate() //nolint:errcheck + } +} diff --git a/app/helpers/test_helpers.go b/app/helpers/test_helpers.go new file mode 100644 index 00000000..206ce4ca --- /dev/null +++ b/app/helpers/test_helpers.go @@ -0,0 +1,204 @@ +package helpers + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/require" + + dbm "github.com/cometbft/cometbft-db" + abci "github.com/cometbft/cometbft/abci/types" + "github.com/cometbft/cometbft/libs/log" + tmproto "github.com/cometbft/cometbft/proto/tendermint/types" + tmtypes "github.com/cometbft/cometbft/types" + + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec" + cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" + "github.com/cosmos/cosmos-sdk/server" + "github.com/cosmos/cosmos-sdk/testutil/mock" + simtestutil "github.com/cosmos/cosmos-sdk/testutil/sims" + sdk "github.com/cosmos/cosmos-sdk/types" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + + onomyapp "github.com/onomyprotocol/onomy-rebuild/app" +) + +// SimAppChainID hardcoded chainID for simulation +const ( + SimAppChainID = "onomy-app" +) + +// DefaultConsensusParams defines the default Tendermint consensus params used +// in OnomyApp testing. +var DefaultConsensusParams = &tmproto.ConsensusParams{ + Block: &tmproto.BlockParams{ + MaxBytes: 200000, + MaxGas: 2000000, + }, + Evidence: &tmproto.EvidenceParams{ + MaxAgeNumBlocks: 302400, + MaxAgeDuration: 504 * time.Hour, // 3 weeks is the max duration + MaxBytes: 10000, + }, + Validator: &tmproto.ValidatorParams{ + PubKeyTypes: []string{ + tmtypes.ABCIPubKeyTypeEd25519, + }, + }, +} + +type PV struct { + PrivKey cryptotypes.PrivKey +} + +type EmptyAppOptions struct{} + +func (EmptyAppOptions) Get(_ string) interface{} { return nil } + +func Setup(t *testing.T) *onomyapp.OnomyApp { + t.Helper() + + privVal := mock.NewPV() + pubKey, err := privVal.GetPubKey() + require.NoError(t, err) + // create validator set with single validator + validator := tmtypes.NewValidator(pubKey, 1) + valSet := tmtypes.NewValidatorSet([]*tmtypes.Validator{validator}) + + // generate genesis account + senderPrivKey := mock.NewPV() + senderPubKey := senderPrivKey.PrivKey.PubKey() + + acc := authtypes.NewBaseAccount(senderPubKey.Address().Bytes(), senderPubKey, 0, 0) + balance := banktypes.Balance{ + Address: acc.GetAddress().String(), + Coins: sdk.NewCoins(sdk.NewCoin(sdk.DefaultBondDenom, sdk.NewInt(100000000000000))), + } + genesisAccounts := []authtypes.GenesisAccount{acc} + app := SetupWithGenesisValSet(t, valSet, genesisAccounts, balance) + + return app +} + +// SetupWithGenesisValSet initializes a new OnomyApp with a validator set and genesis accounts +// that also act as delegators. For simplicity, each validator is bonded with a delegation +// of one consensus engine unit in the default token of the OnomyApp from first genesis +// account. A Nop logger is set in OnomyApp. +func SetupWithGenesisValSet(t *testing.T, valSet *tmtypes.ValidatorSet, genAccs []authtypes.GenesisAccount, balances ...banktypes.Balance) *onomyapp.OnomyApp { + t.Helper() + + onomyApp, genesisState := setup() + genesisState = genesisStateWithValSet(t, onomyApp, genesisState, valSet, genAccs, balances...) + + stateBytes, err := json.MarshalIndent(genesisState, "", " ") + require.NoError(t, err) + + // init chain will set the validator set and initialize the genesis accounts + onomyApp.InitChain( + abci.RequestInitChain{ + Validators: []abci.ValidatorUpdate{}, + ConsensusParams: DefaultConsensusParams, + AppStateBytes: stateBytes, + }, + ) + + // commit genesis changes + onomyApp.Commit() + onomyApp.BeginBlock(abci.RequestBeginBlock{Header: tmproto.Header{ + Height: onomyApp.LastBlockHeight() + 1, + AppHash: onomyApp.LastCommitID().Hash, + ValidatorsHash: valSet.Hash(), + NextValidatorsHash: valSet.Hash(), + }}) + + return onomyApp +} + +func setup() (*onomyapp.OnomyApp, onomyapp.GenesisState) { + db := dbm.NewMemDB() + appOptions := make(simtestutil.AppOptionsMap, 0) + appOptions[server.FlagInvCheckPeriod] = 5 + appOptions[server.FlagMinGasPrices] = "0uatom" + + encConfig := onomyapp.RegisterEncodingConfig() + + onomyApp := onomyapp.NewOnomyApp( + log.NewNopLogger(), + db, + nil, + true, + map[int64]bool{}, + onomyapp.DefaultNodeHome, + encConfig, + appOptions, + ) + return onomyApp, onomyapp.NewDefaultGenesisState(encConfig) +} + +func genesisStateWithValSet(t *testing.T, + app *onomyapp.OnomyApp, genesisState onomyapp.GenesisState, + valSet *tmtypes.ValidatorSet, genAccs []authtypes.GenesisAccount, + balances ...banktypes.Balance, +) onomyapp.GenesisState { + t.Helper() + // set genesis accounts + authGenesis := authtypes.NewGenesisState(authtypes.DefaultParams(), genAccs) + genesisState[authtypes.ModuleName] = app.AppCodec().MustMarshalJSON(authGenesis) + + validators := make([]stakingtypes.Validator, 0, len(valSet.Validators)) + delegations := make([]stakingtypes.Delegation, 0, len(valSet.Validators)) + + bondAmt := sdk.DefaultPowerReduction + + for _, val := range valSet.Validators { + pk, err := cryptocodec.FromTmPubKeyInterface(val.PubKey) + require.NoError(t, err) + pkAny, err := codectypes.NewAnyWithValue(pk) + require.NoError(t, err) + validator := stakingtypes.Validator{ + OperatorAddress: sdk.ValAddress(val.Address).String(), + ConsensusPubkey: pkAny, + Jailed: false, + Status: stakingtypes.Bonded, + Tokens: bondAmt, + DelegatorShares: sdk.OneDec(), + Description: stakingtypes.Description{}, + UnbondingHeight: int64(0), + UnbondingTime: time.Unix(0, 0).UTC(), + Commission: stakingtypes.NewCommission(sdk.ZeroDec(), sdk.ZeroDec(), sdk.ZeroDec()), + } + validators = append(validators, validator) + delegations = append(delegations, stakingtypes.NewDelegation(genAccs[0].GetAddress(), val.Address.Bytes(), sdk.OneDec())) + + } + // set validators and delegations + stakingGenesis := stakingtypes.NewGenesisState(stakingtypes.DefaultParams(), validators, delegations) + genesisState[stakingtypes.ModuleName] = app.AppCodec().MustMarshalJSON(stakingGenesis) + + totalSupply := sdk.NewCoins() + for _, b := range balances { + // add genesis acc tokens to total supply + totalSupply = totalSupply.Add(b.Coins...) + } + + for range delegations { + // add delegated tokens to total supply + totalSupply = totalSupply.Add(sdk.NewCoin(sdk.DefaultBondDenom, bondAmt)) + } + + // add bonded amount to bonded pool module account + balances = append(balances, banktypes.Balance{ + Address: authtypes.NewModuleAddress(stakingtypes.BondedPoolName).String(), + Coins: sdk.Coins{sdk.NewCoin(sdk.DefaultBondDenom, bondAmt)}, + }) + + // update total supply + bankGenesis := banktypes.NewGenesisState(banktypes.DefaultGenesisState().Params, balances, totalSupply, []banktypes.Metadata{}, []banktypes.SendEnabled{}) + genesisState[banktypes.ModuleName] = app.AppCodec().MustMarshalJSON(bankGenesis) + + return genesisState +} diff --git a/app/keepers/keepers.go b/app/keepers/keepers.go new file mode 100644 index 00000000..a2b39cf7 --- /dev/null +++ b/app/keepers/keepers.go @@ -0,0 +1,451 @@ +package keepers + +import ( + "os" + + // unnamed import of statik for swagger UI support + _ "github.com/cosmos/cosmos-sdk/client/docs/statik" + + "github.com/cometbft/cometbft/libs/log" + + pfmrouter "github.com/cosmos/ibc-apps/middleware/packet-forward-middleware/v7/packetforward" + pfmrouterkeeper "github.com/cosmos/ibc-apps/middleware/packet-forward-middleware/v7/packetforward/keeper" + pfmroutertypes "github.com/cosmos/ibc-apps/middleware/packet-forward-middleware/v7/packetforward/types" + ica "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts" + icahost "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/host" + icahostkeeper "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/host/keeper" + icahosttypes "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/host/types" + "github.com/cosmos/ibc-go/v7/modules/apps/transfer" + ibctransferkeeper "github.com/cosmos/ibc-go/v7/modules/apps/transfer/keeper" + ibctransfertypes "github.com/cosmos/ibc-go/v7/modules/apps/transfer/types" + ibcclient "github.com/cosmos/ibc-go/v7/modules/core/02-client" + ibcclienttypes "github.com/cosmos/ibc-go/v7/modules/core/02-client/types" + porttypes "github.com/cosmos/ibc-go/v7/modules/core/05-port/types" + ibcexported "github.com/cosmos/ibc-go/v7/modules/core/exported" + ibckeeper "github.com/cosmos/ibc-go/v7/modules/core/keeper" + ibcprovider "github.com/cosmos/interchain-security/v3/x/ccv/provider" + ibcproviderkeeper "github.com/cosmos/interchain-security/v3/x/ccv/provider/keeper" + providertypes "github.com/cosmos/interchain-security/v3/x/ccv/provider/types" + + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/codec" + servertypes "github.com/cosmos/cosmos-sdk/server/types" + "github.com/cosmos/cosmos-sdk/store/streaming" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + authzkeeper "github.com/cosmos/cosmos-sdk/x/authz/keeper" + bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + capabilitykeeper "github.com/cosmos/cosmos-sdk/x/capability/keeper" + capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" + consensusparamkeeper "github.com/cosmos/cosmos-sdk/x/consensus/keeper" + consensusparamtypes "github.com/cosmos/cosmos-sdk/x/consensus/types" + crisiskeeper "github.com/cosmos/cosmos-sdk/x/crisis/keeper" + crisistypes "github.com/cosmos/cosmos-sdk/x/crisis/types" + distrkeeper "github.com/cosmos/cosmos-sdk/x/distribution/keeper" + distrtypes "github.com/cosmos/cosmos-sdk/x/distribution/types" + evidencekeeper "github.com/cosmos/cosmos-sdk/x/evidence/keeper" + evidencetypes "github.com/cosmos/cosmos-sdk/x/evidence/types" + "github.com/cosmos/cosmos-sdk/x/feegrant" + feegrantkeeper "github.com/cosmos/cosmos-sdk/x/feegrant/keeper" + govkeeper "github.com/cosmos/cosmos-sdk/x/gov/keeper" + govtypes "github.com/cosmos/cosmos-sdk/x/gov/types" + govv1 "github.com/cosmos/cosmos-sdk/x/gov/types/v1" + govv1beta1 "github.com/cosmos/cosmos-sdk/x/gov/types/v1beta1" + mintkeeper "github.com/cosmos/cosmos-sdk/x/mint/keeper" + minttypes "github.com/cosmos/cosmos-sdk/x/mint/types" + "github.com/cosmos/cosmos-sdk/x/params" + paramskeeper "github.com/cosmos/cosmos-sdk/x/params/keeper" + paramstypes "github.com/cosmos/cosmos-sdk/x/params/types" + paramproposal "github.com/cosmos/cosmos-sdk/x/params/types/proposal" + slashingkeeper "github.com/cosmos/cosmos-sdk/x/slashing/keeper" + slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" + stakingkeeper "github.com/cosmos/cosmos-sdk/x/staking/keeper" + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + "github.com/cosmos/cosmos-sdk/x/upgrade" + upgradekeeper "github.com/cosmos/cosmos-sdk/x/upgrade/keeper" + upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" + + "github.com/onomyprotocol/onomy-rebuild/x/globalfee" +) + +type AppKeepers struct { + // keys to access the substores + keys map[string]*storetypes.KVStoreKey + tkeys map[string]*storetypes.TransientStoreKey + memKeys map[string]*storetypes.MemoryStoreKey + + // keepers + AccountKeeper authkeeper.AccountKeeper + BankKeeper bankkeeper.Keeper + CapabilityKeeper *capabilitykeeper.Keeper + StakingKeeper *stakingkeeper.Keeper + SlashingKeeper slashingkeeper.Keeper + MintKeeper mintkeeper.Keeper + DistrKeeper distrkeeper.Keeper + GovKeeper *govkeeper.Keeper + CrisisKeeper *crisiskeeper.Keeper + UpgradeKeeper *upgradekeeper.Keeper + ParamsKeeper paramskeeper.Keeper + // IBC Keeper must be a pointer in the app, so we can SetRouter on it correctly + IBCKeeper *ibckeeper.Keeper + ICAHostKeeper icahostkeeper.Keeper + EvidenceKeeper evidencekeeper.Keeper + TransferKeeper ibctransferkeeper.Keeper + FeeGrantKeeper feegrantkeeper.Keeper + AuthzKeeper authzkeeper.Keeper + ConsensusParamsKeeper consensusparamkeeper.Keeper + + // ICS + ProviderKeeper ibcproviderkeeper.Keeper + + PFMRouterKeeper *pfmrouterkeeper.Keeper + + // Modules + ICAModule ica.AppModule + TransferModule transfer.AppModule + PFMRouterModule pfmrouter.AppModule + ProviderModule ibcprovider.AppModule + + // make scoped keepers public for test purposes + ScopedIBCKeeper capabilitykeeper.ScopedKeeper + ScopedTransferKeeper capabilitykeeper.ScopedKeeper + ScopedICAHostKeeper capabilitykeeper.ScopedKeeper + ScopedIBCProviderKeeper capabilitykeeper.ScopedKeeper +} + +func NewAppKeeper( + appCodec codec.Codec, + bApp *baseapp.BaseApp, + legacyAmino *codec.LegacyAmino, + maccPerms map[string][]string, + modAccAddrs map[string]bool, + blockedAddress map[string]bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + logger log.Logger, + appOpts servertypes.AppOptions, +) AppKeepers { + appKeepers := AppKeepers{} + + // Set keys KVStoreKey, TransientStoreKey, MemoryStoreKey + appKeepers.GenerateKeys() + + /* + configure state listening capabilities using AppOptions + we are doing nothing with the returned streamingServices and waitGroup in this case + */ + // load state streaming if enabled + + if _, _, err := streaming.LoadStreamingServices(bApp, appOpts, appCodec, logger, appKeepers.keys); err != nil { + logger.Error("failed to load state streaming", "err", err) + os.Exit(1) + } + + appKeepers.ParamsKeeper = initParamsKeeper( + appCodec, + legacyAmino, + appKeepers.keys[paramstypes.StoreKey], + appKeepers.tkeys[paramstypes.TStoreKey], + ) + + // set the BaseApp's parameter store + appKeepers.ConsensusParamsKeeper = consensusparamkeeper.NewKeeper( + appCodec, + appKeepers.keys[consensusparamtypes.StoreKey], + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + bApp.SetParamStore(&appKeepers.ConsensusParamsKeeper) + + // add capability keeper and ScopeToModule for ibc module + appKeepers.CapabilityKeeper = capabilitykeeper.NewKeeper( + appCodec, + appKeepers.keys[capabilitytypes.StoreKey], + appKeepers.memKeys[capabilitytypes.MemStoreKey], + ) + + appKeepers.ScopedIBCKeeper = appKeepers.CapabilityKeeper.ScopeToModule(ibcexported.ModuleName) + appKeepers.ScopedICAHostKeeper = appKeepers.CapabilityKeeper.ScopeToModule(icahosttypes.SubModuleName) + appKeepers.ScopedTransferKeeper = appKeepers.CapabilityKeeper.ScopeToModule(ibctransfertypes.ModuleName) + appKeepers.ScopedIBCProviderKeeper = appKeepers.CapabilityKeeper.ScopeToModule(providertypes.ModuleName) + + // Applications that wish to enforce statically created ScopedKeepers should call `Seal` after creating + // their scoped modules in `NewApp` with `ScopeToModule` + appKeepers.CapabilityKeeper.Seal() + + appKeepers.CrisisKeeper = crisiskeeper.NewKeeper( + appCodec, + appKeepers.keys[crisistypes.StoreKey], + invCheckPeriod, + appKeepers.BankKeeper, + authtypes.FeeCollectorName, + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + // Add normal keepers + appKeepers.AccountKeeper = authkeeper.NewAccountKeeper( + appCodec, + appKeepers.keys[authtypes.StoreKey], + authtypes.ProtoBaseAccount, + maccPerms, + sdk.GetConfig().GetBech32AccountAddrPrefix(), + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + appKeepers.BankKeeper = bankkeeper.NewBaseKeeper( + appCodec, + appKeepers.keys[banktypes.StoreKey], + appKeepers.AccountKeeper, + blockedAddress, + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + appKeepers.AuthzKeeper = authzkeeper.NewKeeper( + appKeepers.keys[authzkeeper.StoreKey], + appCodec, + bApp.MsgServiceRouter(), + appKeepers.AccountKeeper, + ) + + appKeepers.FeeGrantKeeper = feegrantkeeper.NewKeeper( + appCodec, + appKeepers.keys[feegrant.StoreKey], + appKeepers.AccountKeeper, + ) + + appKeepers.StakingKeeper = stakingkeeper.NewKeeper( + appCodec, + appKeepers.keys[stakingtypes.StoreKey], + appKeepers.AccountKeeper, + appKeepers.BankKeeper, + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + appKeepers.MintKeeper = mintkeeper.NewKeeper( + appCodec, + appKeepers.keys[minttypes.StoreKey], + appKeepers.StakingKeeper, + appKeepers.AccountKeeper, + appKeepers.BankKeeper, + authtypes.FeeCollectorName, + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + appKeepers.DistrKeeper = distrkeeper.NewKeeper( + appCodec, + appKeepers.keys[distrtypes.StoreKey], + appKeepers.AccountKeeper, + appKeepers.BankKeeper, + appKeepers.StakingKeeper, + authtypes.FeeCollectorName, + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + appKeepers.SlashingKeeper = slashingkeeper.NewKeeper( + appCodec, + legacyAmino, + appKeepers.keys[slashingtypes.StoreKey], + appKeepers.StakingKeeper, + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + // register the staking hooks + // NOTE: stakingKeeper above is passed by reference, so that it will contain these hooks + appKeepers.StakingKeeper.SetHooks( + stakingtypes.NewMultiStakingHooks( + appKeepers.DistrKeeper.Hooks(), + appKeepers.SlashingKeeper.Hooks(), + appKeepers.ProviderKeeper.Hooks(), + ), + ) + + // UpgradeKeeper must be created before IBCKeeper + appKeepers.UpgradeKeeper = upgradekeeper.NewKeeper( + skipUpgradeHeights, + appKeepers.keys[upgradetypes.StoreKey], + appCodec, + homePath, + bApp, + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + + // UpgradeKeeper must be created before IBCKeeper + appKeepers.IBCKeeper = ibckeeper.NewKeeper( + appCodec, + appKeepers.keys[ibcexported.StoreKey], + appKeepers.GetSubspace(ibcexported.ModuleName), + appKeepers.StakingKeeper, + appKeepers.UpgradeKeeper, + appKeepers.ScopedIBCKeeper, + ) + + // provider depends on gov, so gov must be registered first + govConfig := govtypes.DefaultConfig() + // set the MaxMetadataLen for proposals to the same value as it was pre-sdk v0.47.x + govConfig.MaxMetadataLen = 10200 + appKeepers.GovKeeper = govkeeper.NewKeeper( + appCodec, + appKeepers.keys[govtypes.StoreKey], + appKeepers.AccountKeeper, + appKeepers.BankKeeper, + appKeepers.StakingKeeper, + bApp.MsgServiceRouter(), + govConfig, + authtypes.NewModuleAddress(govtypes.ModuleName).String(), + ) + appKeepers.ProviderKeeper = ibcproviderkeeper.NewKeeper( + appCodec, + appKeepers.keys[providertypes.StoreKey], + appKeepers.GetSubspace(providertypes.ModuleName), + appKeepers.ScopedIBCProviderKeeper, + appKeepers.IBCKeeper.ChannelKeeper, + &appKeepers.IBCKeeper.PortKeeper, + appKeepers.IBCKeeper.ConnectionKeeper, + appKeepers.IBCKeeper.ClientKeeper, + appKeepers.StakingKeeper, + appKeepers.SlashingKeeper, + appKeepers.AccountKeeper, + appKeepers.DistrKeeper, + appKeepers.BankKeeper, + appKeepers.GovKeeper, + authtypes.FeeCollectorName, + ) + + appKeepers.ProviderModule = ibcprovider.NewAppModule(&appKeepers.ProviderKeeper, appKeepers.GetSubspace(providertypes.ModuleName)) + + // Register the proposal types + // Deprecated: Avoid adding new handlers, instead use the new proposal flow + // by granting the governance module the right to execute the message. + // See: https://docs.cosmos.network/main/modules/gov#proposal-messages + govRouter := govv1beta1.NewRouter() + govRouter. + AddRoute(govtypes.RouterKey, govv1beta1.ProposalHandler). + AddRoute(paramproposal.RouterKey, params.NewParamChangeProposalHandler(appKeepers.ParamsKeeper)). + AddRoute(upgradetypes.RouterKey, upgrade.NewSoftwareUpgradeProposalHandler(appKeepers.UpgradeKeeper)). + AddRoute(ibcclienttypes.RouterKey, ibcclient.NewClientProposalHandler(appKeepers.IBCKeeper.ClientKeeper)). + AddRoute(providertypes.RouterKey, ibcprovider.NewProviderProposalHandler(appKeepers.ProviderKeeper)) + + // Set legacy router for backwards compatibility with gov v1beta1 + appKeepers.GovKeeper.SetLegacyRouter(govRouter) + + appKeepers.GovKeeper = appKeepers.GovKeeper.SetHooks( + govtypes.NewMultiGovHooks( + appKeepers.ProviderKeeper.Hooks(), + ), + ) + + evidenceKeeper := evidencekeeper.NewKeeper( + appCodec, + appKeepers.keys[evidencetypes.StoreKey], + appKeepers.StakingKeeper, + appKeepers.SlashingKeeper, + ) + // If evidence needs to be handled for the app, set routes in router here and seal + appKeepers.EvidenceKeeper = *evidenceKeeper + + // ICA Host keeper + appKeepers.ICAHostKeeper = icahostkeeper.NewKeeper( + appCodec, + appKeepers.keys[icahosttypes.StoreKey], + appKeepers.GetSubspace(icahosttypes.SubModuleName), + appKeepers.IBCKeeper.ChannelKeeper, // ICS4Wrapper + appKeepers.IBCKeeper.ChannelKeeper, + &appKeepers.IBCKeeper.PortKeeper, + appKeepers.AccountKeeper, + appKeepers.ScopedICAHostKeeper, + bApp.MsgServiceRouter(), + ) + + // PFMRouterKeeper must be created before TransferKeeper + authority := authtypes.NewModuleAddress(govtypes.ModuleName).String() + appKeepers.PFMRouterKeeper = pfmrouterkeeper.NewKeeper( + appCodec, + appKeepers.keys[pfmroutertypes.StoreKey], + nil, // Will be zero-value here. Reference is set later on with SetTransferKeeper. + appKeepers.IBCKeeper.ChannelKeeper, + appKeepers.DistrKeeper, + appKeepers.BankKeeper, + appKeepers.IBCKeeper.ChannelKeeper, + authority, + ) + + appKeepers.TransferKeeper = ibctransferkeeper.NewKeeper( + appCodec, + appKeepers.keys[ibctransfertypes.StoreKey], + appKeepers.GetSubspace(ibctransfertypes.ModuleName), + appKeepers.PFMRouterKeeper, // ISC4 Wrapper: PFM Router middleware + appKeepers.IBCKeeper.ChannelKeeper, + &appKeepers.IBCKeeper.PortKeeper, + appKeepers.AccountKeeper, + appKeepers.BankKeeper, + appKeepers.ScopedTransferKeeper, + ) + // Must be called on PFMRouter AFTER TransferKeeper initialized + appKeepers.PFMRouterKeeper.SetTransferKeeper(appKeepers.TransferKeeper) + + // Middleware Stacks + appKeepers.ICAModule = ica.NewAppModule(nil, &appKeepers.ICAHostKeeper) + appKeepers.TransferModule = transfer.NewAppModule(appKeepers.TransferKeeper) + appKeepers.PFMRouterModule = pfmrouter.NewAppModule(appKeepers.PFMRouterKeeper, appKeepers.GetSubspace(pfmroutertypes.ModuleName)) + + // create IBC module from bottom to top of stack + var transferStack porttypes.IBCModule + transferStack = transfer.NewIBCModule(appKeepers.TransferKeeper) + transferStack = pfmrouter.NewIBCMiddleware( + transferStack, + appKeepers.PFMRouterKeeper, + 0, + pfmrouterkeeper.DefaultForwardTransferPacketTimeoutTimestamp, + pfmrouterkeeper.DefaultRefundTransferPacketTimeoutTimestamp, + ) + + // Add transfer stack to IBC Router + + // Create Interchain Accounts Stack + var icaHostStack porttypes.IBCModule = icahost.NewIBCModule(appKeepers.ICAHostKeeper) + + // Create IBC Router & seal + ibcRouter := porttypes.NewRouter(). + AddRoute(icahosttypes.SubModuleName, icaHostStack). + AddRoute(ibctransfertypes.ModuleName, transferStack). + AddRoute(providertypes.ModuleName, appKeepers.ProviderModule) + + appKeepers.IBCKeeper.SetRouter(ibcRouter) + + return appKeepers +} + +// GetSubspace returns a param subspace for a given module name. +func (appKeepers *AppKeepers) GetSubspace(moduleName string) paramstypes.Subspace { + subspace, ok := appKeepers.ParamsKeeper.GetSubspace(moduleName) + if !ok { + panic("couldn't load subspace for module: " + moduleName) + } + return subspace +} + +// initParamsKeeper init params keeper and its subspaces +func initParamsKeeper(appCodec codec.BinaryCodec, legacyAmino *codec.LegacyAmino, key, tkey storetypes.StoreKey) paramskeeper.Keeper { + paramsKeeper := paramskeeper.NewKeeper(appCodec, legacyAmino, key, tkey) + + //nolint: staticcheck // SA1019: moduletypes.ParamKeyTable is deprecated + paramsKeeper.Subspace(authtypes.ModuleName).WithKeyTable(authtypes.ParamKeyTable()) + paramsKeeper.Subspace(stakingtypes.ModuleName).WithKeyTable(stakingtypes.ParamKeyTable()) + paramsKeeper.Subspace(banktypes.ModuleName).WithKeyTable(banktypes.ParamKeyTable()) //nolint: staticcheck // SA1019 + paramsKeeper.Subspace(minttypes.ModuleName).WithKeyTable(minttypes.ParamKeyTable()) //nolint: staticcheck // SA1019 + paramsKeeper.Subspace(distrtypes.ModuleName).WithKeyTable(distrtypes.ParamKeyTable()) //nolint: staticcheck // SA1019 + paramsKeeper.Subspace(slashingtypes.ModuleName).WithKeyTable(slashingtypes.ParamKeyTable()) //nolint: staticcheck // SA1019 + paramsKeeper.Subspace(govtypes.ModuleName).WithKeyTable(govv1.ParamKeyTable()) //nolint: staticcheck // SA1019 + paramsKeeper.Subspace(crisistypes.ModuleName).WithKeyTable(crisistypes.ParamKeyTable()) //nolint: staticcheck // SA1019 + paramsKeeper.Subspace(ibctransfertypes.ModuleName) + paramsKeeper.Subspace(ibcexported.ModuleName) + paramsKeeper.Subspace(icahosttypes.SubModuleName) + paramsKeeper.Subspace(pfmroutertypes.ModuleName).WithKeyTable(pfmroutertypes.ParamKeyTable()) + paramsKeeper.Subspace(globalfee.ModuleName) + paramsKeeper.Subspace(providertypes.ModuleName) + + return paramsKeeper +} diff --git a/app/keepers/keys.go b/app/keepers/keys.go new file mode 100644 index 00000000..5996f243 --- /dev/null +++ b/app/keepers/keys.go @@ -0,0 +1,93 @@ +package keepers + +import ( + routertypes "github.com/cosmos/ibc-apps/middleware/packet-forward-middleware/v7/packetforward/types" + icahosttypes "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/host/types" + ibctransfertypes "github.com/cosmos/ibc-go/v7/modules/apps/transfer/types" + ibcexported "github.com/cosmos/ibc-go/v7/modules/core/exported" + providertypes "github.com/cosmos/interchain-security/v3/x/ccv/provider/types" + + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + authzkeeper "github.com/cosmos/cosmos-sdk/x/authz/keeper" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" + consensusparamtypes "github.com/cosmos/cosmos-sdk/x/consensus/types" + crisistypes "github.com/cosmos/cosmos-sdk/x/crisis/types" + distrtypes "github.com/cosmos/cosmos-sdk/x/distribution/types" + evidencetypes "github.com/cosmos/cosmos-sdk/x/evidence/types" + "github.com/cosmos/cosmos-sdk/x/feegrant" + govtypes "github.com/cosmos/cosmos-sdk/x/gov/types" + minttypes "github.com/cosmos/cosmos-sdk/x/mint/types" + paramstypes "github.com/cosmos/cosmos-sdk/x/params/types" + slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" +) + +func (appKeepers *AppKeepers) GenerateKeys() { + // Define what keys will be used in the cosmos-sdk key/value store. + // Cosmos-SDK modules each have a "key" that allows the application to reference what they've stored on the chain. + appKeepers.keys = sdk.NewKVStoreKeys( + authtypes.StoreKey, + banktypes.StoreKey, + stakingtypes.StoreKey, + crisistypes.StoreKey, + minttypes.StoreKey, + distrtypes.StoreKey, + slashingtypes.StoreKey, + govtypes.StoreKey, + paramstypes.StoreKey, + ibcexported.StoreKey, + upgradetypes.StoreKey, + evidencetypes.StoreKey, + ibctransfertypes.StoreKey, + icahosttypes.StoreKey, + capabilitytypes.StoreKey, + feegrant.StoreKey, + authzkeeper.StoreKey, + routertypes.StoreKey, + providertypes.StoreKey, + consensusparamtypes.StoreKey, + ) + + // Define transient store keys + appKeepers.tkeys = sdk.NewTransientStoreKeys(paramstypes.TStoreKey) + + // MemKeys are for information that is stored only in RAM. + appKeepers.memKeys = sdk.NewMemoryStoreKeys(capabilitytypes.MemStoreKey) +} + +func (appKeepers *AppKeepers) GetKVStoreKey() map[string]*storetypes.KVStoreKey { + return appKeepers.keys +} + +func (appKeepers *AppKeepers) GetTransientStoreKey() map[string]*storetypes.TransientStoreKey { + return appKeepers.tkeys +} + +func (appKeepers *AppKeepers) GetMemoryStoreKey() map[string]*storetypes.MemoryStoreKey { + return appKeepers.memKeys +} + +// GetKey returns the KVStoreKey for the provided store key. +// +// NOTE: This is solely to be used for testing purposes. +func (appKeepers *AppKeepers) GetKey(storeKey string) *storetypes.KVStoreKey { + return appKeepers.keys[storeKey] +} + +// GetTKey returns the TransientStoreKey for the provided store key. +// +// NOTE: This is solely to be used for testing purposes. +func (appKeepers *AppKeepers) GetTKey(storeKey string) *storetypes.TransientStoreKey { + return appKeepers.tkeys[storeKey] +} + +// GetMemKey returns the MemStoreKey for the provided mem key. +// +// NOTE: This is solely used for testing purposes. +func (appKeepers *AppKeepers) GetMemKey(storeKey string) *storetypes.MemoryStoreKey { + return appKeepers.memKeys[storeKey] +} diff --git a/app/modules.go b/app/modules.go new file mode 100644 index 00000000..e3b670ab --- /dev/null +++ b/app/modules.go @@ -0,0 +1,310 @@ +package onomy + +import ( + pfmrouter "github.com/cosmos/ibc-apps/middleware/packet-forward-middleware/v7/packetforward" + pfmroutertypes "github.com/cosmos/ibc-apps/middleware/packet-forward-middleware/v7/packetforward/types" + ica "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts" + icatypes "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/types" + "github.com/cosmos/ibc-go/v7/modules/apps/transfer" + ibctransfertypes "github.com/cosmos/ibc-go/v7/modules/apps/transfer/types" + ibc "github.com/cosmos/ibc-go/v7/modules/core" + ibcclientclient "github.com/cosmos/ibc-go/v7/modules/core/02-client/client" + ibcexported "github.com/cosmos/ibc-go/v7/modules/core/exported" + ibctm "github.com/cosmos/ibc-go/v7/modules/light-clients/07-tendermint" + icsprovider "github.com/cosmos/interchain-security/v3/x/ccv/provider" + icsproviderclient "github.com/cosmos/interchain-security/v3/x/ccv/provider/client" + providertypes "github.com/cosmos/interchain-security/v3/x/ccv/provider/types" + + "github.com/cosmos/cosmos-sdk/types/module" + "github.com/cosmos/cosmos-sdk/x/auth" + authsims "github.com/cosmos/cosmos-sdk/x/auth/simulation" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + "github.com/cosmos/cosmos-sdk/x/auth/vesting" + vestingtypes "github.com/cosmos/cosmos-sdk/x/auth/vesting/types" + "github.com/cosmos/cosmos-sdk/x/authz" + authzmodule "github.com/cosmos/cosmos-sdk/x/authz/module" + "github.com/cosmos/cosmos-sdk/x/bank" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/cosmos/cosmos-sdk/x/capability" + capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" + "github.com/cosmos/cosmos-sdk/x/consensus" + consensusparamtypes "github.com/cosmos/cosmos-sdk/x/consensus/types" + "github.com/cosmos/cosmos-sdk/x/crisis" + crisistypes "github.com/cosmos/cosmos-sdk/x/crisis/types" + distr "github.com/cosmos/cosmos-sdk/x/distribution" + distrtypes "github.com/cosmos/cosmos-sdk/x/distribution/types" + "github.com/cosmos/cosmos-sdk/x/evidence" + evidencetypes "github.com/cosmos/cosmos-sdk/x/evidence/types" + "github.com/cosmos/cosmos-sdk/x/feegrant" + feegrantmodule "github.com/cosmos/cosmos-sdk/x/feegrant/module" + "github.com/cosmos/cosmos-sdk/x/genutil" + genutiltypes "github.com/cosmos/cosmos-sdk/x/genutil/types" + "github.com/cosmos/cosmos-sdk/x/gov" + govclient "github.com/cosmos/cosmos-sdk/x/gov/client" + govtypes "github.com/cosmos/cosmos-sdk/x/gov/types" + "github.com/cosmos/cosmos-sdk/x/mint" + minttypes "github.com/cosmos/cosmos-sdk/x/mint/types" + sdkparams "github.com/cosmos/cosmos-sdk/x/params" + paramsclient "github.com/cosmos/cosmos-sdk/x/params/client" + paramstypes "github.com/cosmos/cosmos-sdk/x/params/types" + "github.com/cosmos/cosmos-sdk/x/slashing" + slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" + "github.com/cosmos/cosmos-sdk/x/staking" + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + "github.com/cosmos/cosmos-sdk/x/upgrade" + upgradeclient "github.com/cosmos/cosmos-sdk/x/upgrade/client" + upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" + + onomyappparams "github.com/onomyprotocol/onomy-rebuild/app/params" + "github.com/onomyprotocol/onomy-rebuild/x/globalfee" + "github.com/onomyprotocol/onomy-rebuild/x/metaprotocols" + metaprotocolstypes "github.com/onomyprotocol/onomy-rebuild/x/metaprotocols/types" +) + +var maccPerms = map[string][]string{ + authtypes.FeeCollectorName: nil, + distrtypes.ModuleName: nil, + icatypes.ModuleName: nil, + minttypes.ModuleName: {authtypes.Minter}, + stakingtypes.BondedPoolName: {authtypes.Burner, authtypes.Staking}, + stakingtypes.NotBondedPoolName: {authtypes.Burner, authtypes.Staking}, + govtypes.ModuleName: {authtypes.Burner}, + // liquiditytypes.ModuleName: {authtypes.Minter, authtypes.Burner}, + ibctransfertypes.ModuleName: {authtypes.Minter, authtypes.Burner}, + providertypes.ConsumerRewardsPool: nil, +} + +// ModuleBasics defines the module BasicManager is in charge of setting up basic, +// non-dependant module elements, such as codec registration +// and genesis verification. +var ModuleBasics = module.NewBasicManager( + auth.AppModuleBasic{}, + genutil.NewAppModuleBasic(genutiltypes.DefaultMessageValidator), + bank.AppModuleBasic{}, + capability.AppModuleBasic{}, + staking.AppModuleBasic{}, + mint.AppModuleBasic{}, + distr.AppModuleBasic{}, + gov.NewAppModuleBasic( + []govclient.ProposalHandler{ + paramsclient.ProposalHandler, + upgradeclient.LegacyProposalHandler, + upgradeclient.LegacyCancelProposalHandler, + ibcclientclient.UpdateClientProposalHandler, + ibcclientclient.UpgradeProposalHandler, + icsproviderclient.ConsumerAdditionProposalHandler, + icsproviderclient.ConsumerRemovalProposalHandler, + icsproviderclient.ChangeRewardDenomsProposalHandler, + }, + ), + sdkparams.AppModuleBasic{}, + crisis.AppModuleBasic{}, + slashing.AppModuleBasic{}, + feegrantmodule.AppModuleBasic{}, + authzmodule.AppModuleBasic{}, + ibc.AppModuleBasic{}, + ibctm.AppModuleBasic{}, + upgrade.AppModuleBasic{}, + evidence.AppModuleBasic{}, + transfer.AppModuleBasic{}, + vesting.AppModuleBasic{}, + pfmrouter.AppModuleBasic{}, + ica.AppModuleBasic{}, + globalfee.AppModule{}, + icsprovider.AppModuleBasic{}, + consensus.AppModuleBasic{}, + metaprotocols.AppModuleBasic{}, +) + +func appModules( + app *OnomyApp, + encodingConfig onomyappparams.EncodingConfig, + skipGenesisInvariants bool, +) []module.AppModule { + appCodec := encodingConfig.Marshaler + + return []module.AppModule{ + genutil.NewAppModule( + app.AccountKeeper, + app.StakingKeeper, + app.BaseApp.DeliverTx, + encodingConfig.TxConfig, + ), + auth.NewAppModule(appCodec, app.AccountKeeper, nil, app.GetSubspace(authtypes.ModuleName)), + vesting.NewAppModule(app.AccountKeeper, app.BankKeeper), + bank.NewAppModule(appCodec, app.BankKeeper, app.AccountKeeper, app.GetSubspace(banktypes.ModuleName)), + capability.NewAppModule(appCodec, *app.CapabilityKeeper, false), + crisis.NewAppModule(app.CrisisKeeper, skipGenesisInvariants, app.GetSubspace(crisistypes.ModuleName)), + gov.NewAppModule(appCodec, app.GovKeeper, app.AccountKeeper, app.BankKeeper, app.GetSubspace(govtypes.ModuleName)), + mint.NewAppModule(appCodec, app.MintKeeper, app.AccountKeeper, nil, app.GetSubspace(minttypes.ModuleName)), + slashing.NewAppModule(appCodec, app.SlashingKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper, app.GetSubspace(slashingtypes.ModuleName)), + distr.NewAppModule(appCodec, app.DistrKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper, app.GetSubspace(distrtypes.ModuleName)), + staking.NewAppModule(appCodec, app.StakingKeeper, app.AccountKeeper, app.BankKeeper, app.GetSubspace(stakingtypes.ModuleName)), + upgrade.NewAppModule(app.UpgradeKeeper), + evidence.NewAppModule(app.EvidenceKeeper), + feegrantmodule.NewAppModule(appCodec, app.AccountKeeper, app.BankKeeper, app.FeeGrantKeeper, app.interfaceRegistry), + authzmodule.NewAppModule(appCodec, app.AuthzKeeper, app.AccountKeeper, app.BankKeeper, app.interfaceRegistry), + ibc.NewAppModule(app.IBCKeeper), + sdkparams.NewAppModule(app.ParamsKeeper), + globalfee.NewAppModule(app.GetSubspace(globalfee.ModuleName)), + consensus.NewAppModule(appCodec, app.ConsensusParamsKeeper), + app.TransferModule, + app.ICAModule, + app.PFMRouterModule, + app.ProviderModule, + metaprotocols.NewAppModule(), + } +} + +// simulationModules returns modules for simulation manager +// define the order of the modules for deterministic simulations +func simulationModules( + app *OnomyApp, + encodingConfig onomyappparams.EncodingConfig, + _ bool, +) []module.AppModuleSimulation { + appCodec := encodingConfig.Marshaler + + return []module.AppModuleSimulation{ + auth.NewAppModule(appCodec, app.AccountKeeper, authsims.RandomGenesisAccounts, app.GetSubspace(authtypes.ModuleName)), + bank.NewAppModule(appCodec, app.BankKeeper, app.AccountKeeper, app.GetSubspace(banktypes.ModuleName)), + capability.NewAppModule(appCodec, *app.CapabilityKeeper, false), + feegrantmodule.NewAppModule(appCodec, app.AccountKeeper, app.BankKeeper, app.FeeGrantKeeper, app.interfaceRegistry), + gov.NewAppModule(appCodec, app.GovKeeper, app.AccountKeeper, app.BankKeeper, app.GetSubspace(govtypes.ModuleName)), + mint.NewAppModule(appCodec, app.MintKeeper, app.AccountKeeper, nil, app.GetSubspace(minttypes.ModuleName)), + staking.NewAppModule(appCodec, app.StakingKeeper, app.AccountKeeper, app.BankKeeper, app.GetSubspace(stakingtypes.ModuleName)), + distr.NewAppModule(appCodec, app.DistrKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper, app.GetSubspace(distrtypes.ModuleName)), + slashing.NewAppModule(appCodec, app.SlashingKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper, app.GetSubspace(slashingtypes.ModuleName)), + sdkparams.NewAppModule(app.ParamsKeeper), + evidence.NewAppModule(app.EvidenceKeeper), + authzmodule.NewAppModule(appCodec, app.AuthzKeeper, app.AccountKeeper, app.BankKeeper, app.interfaceRegistry), + ibc.NewAppModule(app.IBCKeeper), + app.TransferModule, + app.ICAModule, + app.ProviderModule, + } +} + +/* +orderBeginBlockers tells the app's module manager how to set the order of +BeginBlockers, which are run at the beginning of every block. + +Interchain Security Requirements: +During begin block slashing happens after distr.BeginBlocker so that +there is nothing left over in the validator fee pool, so as to keep the +CanWithdrawInvariant invariant. +NOTE: staking module is required if HistoricalEntries param > 0 +NOTE: capability module's beginblocker must come before any modules using capabilities (e.g. IBC) +*/ + +func orderBeginBlockers() []string { + return []string{ + // upgrades should be run first + upgradetypes.ModuleName, + capabilitytypes.ModuleName, + minttypes.ModuleName, + distrtypes.ModuleName, + slashingtypes.ModuleName, + evidencetypes.ModuleName, + stakingtypes.ModuleName, + authtypes.ModuleName, + banktypes.ModuleName, + govtypes.ModuleName, + crisistypes.ModuleName, + ibcexported.ModuleName, + ibctransfertypes.ModuleName, + icatypes.ModuleName, + pfmroutertypes.ModuleName, + genutiltypes.ModuleName, + authz.ModuleName, + feegrant.ModuleName, + paramstypes.ModuleName, + vestingtypes.ModuleName, + globalfee.ModuleName, + providertypes.ModuleName, + consensusparamtypes.ModuleName, + metaprotocolstypes.ModuleName, + } +} + +/* +Interchain Security Requirements: +- provider.EndBlock gets validator updates from the staking module; +thus, staking.EndBlock must be executed before provider.EndBlock; +- creating a new consumer chain requires the following order, +CreateChildClient(), staking.EndBlock, provider.EndBlock; +thus, gov.EndBlock must be executed before staking.EndBlock +*/ +func orderEndBlockers() []string { + return []string{ + crisistypes.ModuleName, + govtypes.ModuleName, + stakingtypes.ModuleName, + ibcexported.ModuleName, + ibctransfertypes.ModuleName, + icatypes.ModuleName, + pfmroutertypes.ModuleName, + capabilitytypes.ModuleName, + authtypes.ModuleName, + banktypes.ModuleName, + distrtypes.ModuleName, + slashingtypes.ModuleName, + minttypes.ModuleName, + genutiltypes.ModuleName, + evidencetypes.ModuleName, + authz.ModuleName, + feegrant.ModuleName, + paramstypes.ModuleName, + upgradetypes.ModuleName, + vestingtypes.ModuleName, + globalfee.ModuleName, + providertypes.ModuleName, + consensusparamtypes.ModuleName, + metaprotocolstypes.ModuleName, + } +} + +/* +NOTE: The genutils module must occur after staking so that pools are +properly initialized with tokens from genesis accounts. +NOTE: The genutils module must also occur after auth so that it can access the params from auth. +NOTE: Capability module must occur first so that it can initialize any capabilities +so that other modules that want to create or claim capabilities afterwards in InitChain +can do so safely. +*/ +func orderInitBlockers() []string { + return []string{ + capabilitytypes.ModuleName, + authtypes.ModuleName, + banktypes.ModuleName, + distrtypes.ModuleName, + govtypes.ModuleName, + stakingtypes.ModuleName, + slashingtypes.ModuleName, + minttypes.ModuleName, + crisistypes.ModuleName, + genutiltypes.ModuleName, + ibctransfertypes.ModuleName, + ibcexported.ModuleName, + icatypes.ModuleName, + evidencetypes.ModuleName, + authz.ModuleName, + feegrant.ModuleName, + pfmroutertypes.ModuleName, + paramstypes.ModuleName, + upgradetypes.ModuleName, + vestingtypes.ModuleName, + // The globalfee module should ideally be initialized before the genutil module in theory: + // The globalfee antehandler performs checks in DeliverTx, which is called by gentx. + // When the global fee > 0, gentx needs to pay the fee. However, this is not expected, + // (in our case, the global fee is initialized with an empty value, which might not be a problem + // if the globalfee in genesis is not changed.) + // To resolve this issue, we should initialize the globalfee module after genutil, ensuring that the global + // min fee is empty when gentx is called. + // For more details, please refer to the following link: https://github.com/onomyprotocol/onomy-rebuild/issues/2489 + globalfee.ModuleName, + providertypes.ModuleName, + consensusparamtypes.ModuleName, + metaprotocolstypes.ModuleName, + } +} diff --git a/app/params/amino.go b/app/params/amino.go new file mode 100644 index 00000000..364e917b --- /dev/null +++ b/app/params/amino.go @@ -0,0 +1,23 @@ +//go:build test_amino +// +build test_amino + +package params + +import ( + "github.com/cosmos/cosmos-sdk/codec" + cdctypes "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/x/auth/migrations/legacytx" +) + +func MakeTestEncodingConfig() EncodingConfig { + cdc := codec.NewLegacyAmino() + interfaceRegistry := cdctypes.NewInterfaceRegistry() + codec := codec.NewProtoCodec(interfaceRegistry) + + return EncodingConfig{ + InterfaceRegistry: interfaceRegistry, + Marshaler: codec, + TxConfig: legacytx.StdTxConfig{Cdc: cdc}, + Amino: cdc, + } +} diff --git a/app/params/doc.go b/app/params/doc.go new file mode 100644 index 00000000..9e135c8c --- /dev/null +++ b/app/params/doc.go @@ -0,0 +1,19 @@ +/* +Package params defines the simulation parameters in the onomy. + +It contains the default weights used for each transaction used on the module's +simulation. These weights define the chance for a transaction to be simulated at +any given operation. + +You can replace the default values for the weights by providing a params.json +file with the weights defined for each of the transaction operations: + + { + "op_weight_msg_send": 60, + "op_weight_msg_delegate": 100, + } + +In the example above, the `MsgSend` has 60% chance to be simulated, while the +`MsgDelegate` will always be simulated. +*/ +package params diff --git a/app/params/encoding.go b/app/params/encoding.go new file mode 100644 index 00000000..3d634abf --- /dev/null +++ b/app/params/encoding.go @@ -0,0 +1,16 @@ +package params + +import ( + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/codec" + "github.com/cosmos/cosmos-sdk/codec/types" +) + +// EncodingConfig specifies the concrete encoding types to use for a given app. +// This is provided for compatibility between protobuf and amino implementations. +type EncodingConfig struct { + InterfaceRegistry types.InterfaceRegistry + Marshaler codec.Codec + TxConfig client.TxConfig + Amino *codec.LegacyAmino +} diff --git a/app/params/params.go b/app/params/params.go new file mode 100644 index 00000000..b6aa5fb5 --- /dev/null +++ b/app/params/params.go @@ -0,0 +1,7 @@ +package params + +// Simulation parameter constants +const ( + StakePerAccount = "stake_per_account" + InitiallyBondedValidators = "initially_bonded_validators" +) diff --git a/app/params/proto.go b/app/params/proto.go new file mode 100644 index 00000000..4e6c8494 --- /dev/null +++ b/app/params/proto.go @@ -0,0 +1,21 @@ +package params + +import ( + "github.com/cosmos/cosmos-sdk/codec" + codectypes "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/x/auth/tx" +) + +// MakeEncodingConfig creates an EncodingConfig for an amino based test configuration. +func MakeEncodingConfig() EncodingConfig { + amino := codec.NewLegacyAmino() + interfaceRegistry := codectypes.NewInterfaceRegistry() + cdc := codec.NewProtoCodec(interfaceRegistry) + txCfg := tx.NewTxConfig(cdc, tx.DefaultSignModes) + return EncodingConfig{ + InterfaceRegistry: interfaceRegistry, + Marshaler: cdc, + TxConfig: txCfg, + Amino: amino, + } +} diff --git a/app/params/weights.go b/app/params/weights.go new file mode 100644 index 00000000..6e43b905 --- /dev/null +++ b/app/params/weights.go @@ -0,0 +1,24 @@ +package params + +// Default simulation operation weights for messages and gov proposals +const ( + DefaultWeightMsgSend int = 100 + DefaultWeightMsgMultiSend int = 10 + DefaultWeightMsgSetWithdrawAddress int = 50 + DefaultWeightMsgWithdrawDelegationReward int = 50 + DefaultWeightMsgWithdrawValidatorCommission int = 50 + DefaultWeightMsgFundCommunityPool int = 50 + DefaultWeightMsgDeposit int = 100 + DefaultWeightMsgVote int = 67 + DefaultWeightMsgUnjail int = 100 + DefaultWeightMsgCreateValidator int = 100 + DefaultWeightMsgEditValidator int = 5 + DefaultWeightMsgDelegate int = 100 + DefaultWeightMsgUndelegate int = 100 + DefaultWeightMsgBeginRedelegate int = 100 + DefaultWeightMsgCancelUnbondingDelegation int = 100 + + DefaultWeightCommunitySpendProposal int = 5 + DefaultWeightTextProposal int = 5 + DefaultWeightParamChangeProposal int = 5 +) diff --git a/app/sim/sim_config.go b/app/sim/sim_config.go new file mode 100644 index 00000000..43c15030 --- /dev/null +++ b/app/sim/sim_config.go @@ -0,0 +1,75 @@ +package sim + +import ( + "flag" + + "github.com/cosmos/cosmos-sdk/types/simulation" +) + +// List of available flags for the simulator +var ( + FlagGenesisFileValue string + FlagParamsFileValue string + FlagExportParamsPathValue string + FlagExportParamsHeightValue int + FlagExportStatePathValue string + FlagExportStatsPathValue string + FlagSeedValue int64 + FlagInitialBlockHeightValue int + FlagNumBlocksValue int + FlagBlockSizeValue int + FlagLeanValue bool + FlagCommitValue bool + FlagOnOperationValue bool // TODO: Remove in favor of binary search for invariant violation + FlagAllInvariantsValue bool + + FlagEnabledValue bool + FlagVerboseValue bool + FlagPeriodValue uint + FlagGenesisTimeValue int64 +) + +// GetSimulatorFlags gets the values of all the available simulation flags +func GetSimulatorFlags() { + // config fields + flag.StringVar(&FlagGenesisFileValue, "Genesis", "", "custom simulation genesis file; cannot be used with params file") + flag.StringVar(&FlagParamsFileValue, "Params", "", "custom simulation params file which overrides any random params; cannot be used with genesis") + flag.StringVar(&FlagExportParamsPathValue, "ExportParamsPath", "", "custom file path to save the exported params JSON") + flag.IntVar(&FlagExportParamsHeightValue, "ExportParamsHeight", 0, "height to which export the randomly generated params") + flag.StringVar(&FlagExportStatePathValue, "ExportStatePath", "", "custom file path to save the exported app state JSON") + flag.StringVar(&FlagExportStatsPathValue, "ExportStatsPath", "", "custom file path to save the exported simulation statistics JSON") + flag.Int64Var(&FlagSeedValue, "Seed", 42, "simulation random seed") + flag.IntVar(&FlagInitialBlockHeightValue, "InitialBlockHeight", 1, "initial block to start the simulation") + flag.IntVar(&FlagNumBlocksValue, "NumBlocks", 500, "number of new blocks to simulate from the initial block height") + flag.IntVar(&FlagBlockSizeValue, "BlockSize", 200, "operations per block") + flag.BoolVar(&FlagLeanValue, "Lean", false, "lean simulation log output") + flag.BoolVar(&FlagCommitValue, "Commit", false, "have the simulation commit") + flag.BoolVar(&FlagOnOperationValue, "SimulateEveryOperation", false, "run slow invariants every operation") + flag.BoolVar(&FlagAllInvariantsValue, "PrintAllInvariants", false, "print all invariants if a broken invariant is found") + + // simulation flags + flag.BoolVar(&FlagEnabledValue, "Enabled", false, "enable the simulation") + flag.BoolVar(&FlagVerboseValue, "Verbose", false, "verbose log output") + flag.UintVar(&FlagPeriodValue, "Period", 0, "run slow invariants only once every period assertions") + flag.Int64Var(&FlagGenesisTimeValue, "GenesisTime", 0, "override genesis UNIX time instead of using a random UNIX time") +} + +// NewConfigFromFlags creates a simulation from the retrieved values of the flags. +func NewConfigFromFlags() simulation.Config { + return simulation.Config{ + GenesisFile: FlagGenesisFileValue, + ParamsFile: FlagParamsFileValue, + ExportParamsPath: FlagExportParamsPathValue, + ExportParamsHeight: FlagExportParamsHeightValue, + ExportStatePath: FlagExportStatePathValue, + ExportStatsPath: FlagExportStatsPathValue, + Seed: FlagSeedValue, + InitialBlockHeight: FlagInitialBlockHeightValue, + NumBlocks: FlagNumBlocksValue, + BlockSize: FlagBlockSizeValue, + Lean: FlagLeanValue, + Commit: FlagCommitValue, + OnOperation: FlagOnOperationValue, + AllInvariants: FlagAllInvariantsValue, + } +} diff --git a/app/sim/sim_state.go b/app/sim/sim_state.go new file mode 100644 index 00000000..71d0b265 --- /dev/null +++ b/app/sim/sim_state.go @@ -0,0 +1,262 @@ +package sim + +import ( + "encoding/json" + "fmt" + "io" + "math/rand" + "os" + "time" + + tmjson "github.com/cometbft/cometbft/libs/json" + tmtypes "github.com/cometbft/cometbft/types" + + "cosmossdk.io/math" + + "github.com/cosmos/cosmos-sdk/codec" + "github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/module" + simtypes "github.com/cosmos/cosmos-sdk/types/simulation" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + + onomy "github.com/onomyprotocol/onomy-rebuild/app" + "github.com/onomyprotocol/onomy-rebuild/app/params" +) + +// Simulation parameter constants +const ( + StakePerAccount = "stake_per_account" + InitiallyBondedValidators = "initially_bonded_validators" +) + +// AppStateFn returns the initial application state using a genesis or the simulation parameters. +// It panics if the user provides files for both of them. +// If a file is not given for the genesis or the sim params, it creates a randomized one. +func AppStateFn(encConfig params.EncodingConfig, simManager *module.SimulationManager) simtypes.AppStateFn { + return func(r *rand.Rand, accs []simtypes.Account, config simtypes.Config, + ) (appState json.RawMessage, simAccs []simtypes.Account, chainID string, genesisTimestamp time.Time) { + cdc := encConfig.Marshaler + + if FlagGenesisTimeValue == 0 { + genesisTimestamp = simtypes.RandTimestamp(r) + } else { + genesisTimestamp = time.Unix(FlagGenesisTimeValue, 0) + } + + chainID = config.ChainID + switch { + case config.ParamsFile != "" && config.GenesisFile != "": + panic("cannot provide both a genesis file and a params file") + + case config.GenesisFile != "": + // override the default chain-id from simapp to set it later to the config + genesisDoc, accounts, err := AppStateFromGenesisFileFn(r, cdc, config.GenesisFile) + if err != nil { + panic(err) + } + + if FlagGenesisTimeValue == 0 { + // use genesis timestamp if no custom timestamp is provided (i.e no random timestamp) + genesisTimestamp = genesisDoc.GenesisTime + } + + appState = genesisDoc.AppState + chainID = genesisDoc.ChainID + simAccs = accounts + + case config.ParamsFile != "": + appParams := make(simtypes.AppParams) + bz, err := os.ReadFile(config.ParamsFile) + if err != nil { + panic(err) + } + + err = json.Unmarshal(bz, &appParams) + if err != nil { + panic(err) + } + appState, simAccs = AppStateRandomizedFn(simManager, r, encConfig, accs, genesisTimestamp, appParams) + + default: + appParams := make(simtypes.AppParams) + appState, simAccs = AppStateRandomizedFn(simManager, r, encConfig, accs, genesisTimestamp, appParams) + } + + rawState := make(map[string]json.RawMessage) + err := json.Unmarshal(appState, &rawState) + if err != nil { + panic(err) + } + + stakingStateBz, ok := rawState[stakingtypes.ModuleName] + if !ok { + panic("staking genesis state is missing") + } + + stakingState := new(stakingtypes.GenesisState) + err = cdc.UnmarshalJSON(stakingStateBz, stakingState) + if err != nil { + panic(err) + } + // compute not bonded balance + notBondedTokens := sdk.ZeroInt() + for _, val := range stakingState.Validators { + if val.Status != stakingtypes.Unbonded { + continue + } + notBondedTokens = notBondedTokens.Add(val.GetTokens()) + } + notBondedCoins := sdk.NewCoin(stakingState.Params.BondDenom, notBondedTokens) + // edit bank state to make it have the not bonded pool tokens + bankStateBz, ok := rawState[banktypes.ModuleName] + // TODO(fdymylja/jonathan): should we panic in this case + if !ok { + panic("bank genesis state is missing") + } + bankState := new(banktypes.GenesisState) + err = cdc.UnmarshalJSON(bankStateBz, bankState) + if err != nil { + panic(err) + } + + stakingAddr := authtypes.NewModuleAddress(stakingtypes.NotBondedPoolName).String() + var found bool + for _, balance := range bankState.Balances { + if balance.Address == stakingAddr { + found = true + break + } + } + if !found { + bankState.Balances = append(bankState.Balances, banktypes.Balance{ + Address: stakingAddr, + Coins: sdk.NewCoins(notBondedCoins), + }) + } + + // change appState back + rawState[stakingtypes.ModuleName] = cdc.MustMarshalJSON(stakingState) + rawState[banktypes.ModuleName] = cdc.MustMarshalJSON(bankState) + + // replace appstate + appState, err = json.Marshal(rawState) + if err != nil { + panic(err) + } + return appState, simAccs, chainID, genesisTimestamp + } +} + +// AppStateRandomizedFn creates calls each module's GenesisState generator function +// and creates the simulation params +func AppStateRandomizedFn( + simManager *module.SimulationManager, r *rand.Rand, encConfig params.EncodingConfig, + accs []simtypes.Account, genesisTimestamp time.Time, appParams simtypes.AppParams, +) (json.RawMessage, []simtypes.Account) { + numAccs := int64(len(accs)) + cdc := encConfig.Marshaler + genesisState := onomy.NewDefaultGenesisState(encConfig) + + // generate a random amount of initial stake coins and a random initial + // number of bonded accounts + var ( + numInitiallyBonded int64 + initialStake math.Int + ) + + appParams.GetOrGenerate( + cdc, StakePerAccount, &initialStake, r, + func(r *rand.Rand) { initialStake = math.NewInt(r.Int63n(1e12)) }, + ) + appParams.GetOrGenerate( + cdc, InitiallyBondedValidators, &numInitiallyBonded, r, + func(r *rand.Rand) { numInitiallyBonded = int64(r.Intn(300)) }, + ) + + if numInitiallyBonded > numAccs { + numInitiallyBonded = numAccs + } + + fmt.Printf( + `Selected randomly generated parameters for simulated genesis: +{ + stake_per_account: "%d", + initially_bonded_validators: "%d" +} +`, initialStake, numInitiallyBonded, + ) + + simState := &module.SimulationState{ + AppParams: appParams, + Cdc: cdc, + Rand: r, + GenState: genesisState, + Accounts: accs, + InitialStake: initialStake, + NumBonded: numInitiallyBonded, + GenTimestamp: genesisTimestamp, + } + + simManager.GenerateGenesisStates(simState) + + appState, err := json.Marshal(genesisState) + if err != nil { + panic(err) + } + + return appState, accs +} + +// AppStateFromGenesisFileFn util function to generate the genesis AppState +// from a genesis.json file. +func AppStateFromGenesisFileFn(r io.Reader, cdc codec.JSONCodec, genesisFile string) (tmtypes.GenesisDoc, []simtypes.Account, error) { + bytes, err := os.ReadFile(genesisFile) + if err != nil { + panic(err) + } + + var genesis tmtypes.GenesisDoc + // NOTE: Tendermint uses a custom JSON decoder for GenesisDoc + err = tmjson.Unmarshal(bytes, &genesis) + if err != nil { + panic(err) + } + + var appState onomy.GenesisState + err = json.Unmarshal(genesis.AppState, &appState) + if err != nil { + panic(err) + } + + var authGenesis authtypes.GenesisState + if appState[authtypes.ModuleName] != nil { + cdc.MustUnmarshalJSON(appState[authtypes.ModuleName], &authGenesis) + } + + newAccs := make([]simtypes.Account, len(authGenesis.Accounts)) + for i, acc := range authGenesis.Accounts { + // Pick a random private key, since we don't know the actual key + // This should be fine as it's only used for mock Tendermint validators + // and these keys are never actually used to sign by mock Tendermint. + privkeySeed := make([]byte, 15) + if _, err := r.Read(privkeySeed); err != nil { + panic(err) + } + + privKey := secp256k1.GenPrivKeyFromSecret(privkeySeed) + + a, ok := acc.GetCachedValue().(authtypes.AccountI) + if !ok { + return genesis, nil, fmt.Errorf("expected account") + } + + // create simulator accounts + simAcc := simtypes.Account{PrivKey: privKey, PubKey: privKey.PubKey(), Address: a.GetAddress()} + newAccs[i] = simAcc + } + + return genesis, newAccs, nil +} diff --git a/app/sim/sim_utils.go b/app/sim/sim_utils.go new file mode 100644 index 00000000..5786ac9b --- /dev/null +++ b/app/sim/sim_utils.go @@ -0,0 +1,77 @@ +package sim + +import ( + "encoding/json" + "fmt" + "os" + + dbm "github.com/cometbft/cometbft-db" + + "github.com/cosmos/cosmos-sdk/codec" + "github.com/cosmos/cosmos-sdk/runtime" + "github.com/cosmos/cosmos-sdk/types/module" + simtypes "github.com/cosmos/cosmos-sdk/types/simulation" + + onomy "github.com/onomyprotocol/onomy-rebuild/app" +) + +// SimulationOperations retrieves the simulation params from the provided file path +// and returns all the modules weighted operations +func SimulationOperations(app *onomy.OnomyApp, cdc codec.JSONCodec, config simtypes.Config) []simtypes.WeightedOperation { + simState := module.SimulationState{ + AppParams: make(simtypes.AppParams), + Cdc: cdc, + } + + if config.ParamsFile != "" { + bz, err := os.ReadFile(config.ParamsFile) + if err != nil { + panic(err) + } + + err = json.Unmarshal(bz, &simState.AppParams) + if err != nil { + panic(err) + } + } + + simState.LegacyProposalContents = app.SimulationManager().GetProposalContents(simState) //nolint:staticcheck + simState.ProposalMsgs = app.SimulationManager().GetProposalMsgs(simState) + return app.SimulationManager().WeightedOperations(simState) +} + +// CheckExportSimulation exports the app state and simulation parameters to JSON +// if the export paths are defined. +func CheckExportSimulation(app runtime.AppI, config simtypes.Config, params simtypes.Params) error { + if config.ExportStatePath != "" { + fmt.Println("exporting app state...") + exported, err := app.ExportAppStateAndValidators(false, nil, nil) + if err != nil { + return err + } + + if err := os.WriteFile(config.ExportStatePath, []byte(exported.AppState), 0o600); err != nil { + return err + } + } + + if config.ExportParamsPath != "" { + fmt.Println("exporting simulation params...") + paramsBz, err := json.MarshalIndent(params, "", " ") + if err != nil { + return err + } + + if err := os.WriteFile(config.ExportParamsPath, paramsBz, 0o600); err != nil { + return err + } + } + return nil +} + +// PrintStats prints the corresponding statistics from the app DB. +func PrintStats(db dbm.DB) { + fmt.Println("\nLevelDB Stats") + fmt.Println(db.Stats()["leveldb.stats"]) + fmt.Println("LevelDB cached block size", db.Stats()["leveldb.cachedblock"]) +} diff --git a/app/sim_bench_test.go b/app/sim_bench_test.go new file mode 100644 index 00000000..4cccc7f2 --- /dev/null +++ b/app/sim_bench_test.go @@ -0,0 +1,85 @@ +package onomy_test + +import ( + "os" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/server" + simtestutil "github.com/cosmos/cosmos-sdk/testutil/sims" + simulation2 "github.com/cosmos/cosmos-sdk/types/simulation" + "github.com/cosmos/cosmos-sdk/x/simulation" + simcli "github.com/cosmos/cosmos-sdk/x/simulation/client/cli" + + onomy "github.com/onomyprotocol/onomy-rebuild/app" + "github.com/onomyprotocol/onomy-rebuild/app/sim" +) + +// Profile with: +// /usr/local/go/bin/go test -benchmem -run=^$ github.com/cosmos/cosmos-sdk/OnomyApp -bench ^BenchmarkFullAppSimulation$ -Commit=true -cpuprofile cpu.out +func BenchmarkFullAppSimulation(b *testing.B) { + b.ReportAllocs() + + config := simcli.NewConfigFromFlags() + config.ChainID = AppChainID + + db, dir, logger, skip, err := simtestutil.SetupSimulation(config, "goleveldb-app-sim", "Simulation", simcli.FlagVerboseValue, simcli.FlagEnabledValue) + if err != nil { + b.Fatalf("simulation setup failed: %s", err.Error()) + } + + if skip { + b.Skip("skipping benchmark application simulation") + } + + defer func() { + require.NoError(b, db.Close()) + require.NoError(b, os.RemoveAll(dir)) + }() + + appOptions := make(simtestutil.AppOptionsMap, 0) + appOptions[server.FlagInvCheckPeriod] = simcli.FlagPeriodValue + + encConfig := onomy.RegisterEncodingConfig() + + app := onomy.NewOnomyApp( + logger, + db, + nil, + true, + map[int64]bool{}, + onomy.DefaultNodeHome, + encConfig, + appOptions, + interBlockCacheOpt(), + baseapp.SetChainID(AppChainID), + ) + + // Run randomized simulation:w + _, simParams, simErr := simulation.SimulateFromSeed( + b, + os.Stdout, + app.BaseApp, + sim.AppStateFn(encConfig, app.SimulationManager()), + simulation2.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 + sim.SimulationOperations(app, app.AppCodec(), config), + app.ModuleAccountAddrs(), + config, + app.AppCodec(), + ) + + // export state and simParams before the simulation error is checked + if err = sim.CheckExportSimulation(app, config, simParams); err != nil { + b.Fatal(err) + } + + if simErr != nil { + b.Fatal(simErr) + } + + if config.Commit { + sim.PrintStats(db) + } +} diff --git a/app/sim_test.go b/app/sim_test.go new file mode 100644 index 00000000..38242da4 --- /dev/null +++ b/app/sim_test.go @@ -0,0 +1,143 @@ +package onomy_test + +import ( + "encoding/json" + "fmt" + "math/rand" + "os" + "testing" + + "github.com/stretchr/testify/require" + + dbm "github.com/cometbft/cometbft-db" + "github.com/cometbft/cometbft/libs/log" + + "cosmossdk.io/math" + + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/server" + "github.com/cosmos/cosmos-sdk/store" + simtestutil "github.com/cosmos/cosmos-sdk/testutil/sims" + simulation2 "github.com/cosmos/cosmos-sdk/types/simulation" + "github.com/cosmos/cosmos-sdk/x/simulation" + simcli "github.com/cosmos/cosmos-sdk/x/simulation/client/cli" + + "github.com/onomyprotocol/onomy-rebuild/ante" + onomy "github.com/onomyprotocol/onomy-rebuild/app" + + // "github.com/onomyprotocol/onomy-rebuild/v11/app/helpers" + // "github.com/onomyprotocol/onomy-rebuild/v11/app/params" + "github.com/onomyprotocol/onomy-rebuild/app/sim" +) + +// AppChainID hardcoded chainID for simulation +const AppChainID = "onomy-app" + +func init() { + sim.GetSimulatorFlags() +} + +// interBlockCacheOpt returns a BaseApp option function that sets the persistent +// inter-block write-through cache. +func interBlockCacheOpt() func(*baseapp.BaseApp) { + return baseapp.SetInterBlockCache(store.NewCommitKVStoreCacheManager()) +} + +// TODO: Make another test for the fuzzer itself, which just has noOp txs +// and doesn't depend on the application. +func TestAppStateDeterminism(t *testing.T) { + if !sim.FlagEnabledValue { + t.Skip("skipping application simulation") + } + + config := sim.NewConfigFromFlags() + config.InitialBlockHeight = 1 + config.ExportParamsPath = "" + config.OnOperation = false + config.AllInvariants = false + config.ChainID = AppChainID + + numSeeds := 3 + numTimesToRunPerSeed := 5 + + // We will be overriding the random seed and just run a single simulation on the provided seed value + if config.Seed != simcli.DefaultSeedValue { + numSeeds = 1 + } + + appHashList := make([]json.RawMessage, numTimesToRunPerSeed) + appOptions := make(simtestutil.AppOptionsMap, 0) + appOptions[flags.FlagHome] = onomy.DefaultNodeHome + appOptions[server.FlagInvCheckPeriod] = sim.FlagPeriodValue + + for i := 0; i < numSeeds; i++ { + if config.Seed == simcli.DefaultSeedValue { + config.Seed = rand.Int63() + } + + fmt.Println("config.Seed: ", config.Seed) + + for j := 0; j < numTimesToRunPerSeed; j++ { + var logger log.Logger + if sim.FlagVerboseValue { + logger = log.TestingLogger() + } else { + logger = log.NewNopLogger() + } + + db := dbm.NewMemDB() + encConfig := onomy.RegisterEncodingConfig() + app := onomy.NewOnomyApp( + logger, + db, + nil, + true, + map[int64]bool{}, + onomy.DefaultNodeHome, + encConfig, + appOptions, + interBlockCacheOpt(), + baseapp.SetChainID(AppChainID), + ) + + // NOTE: setting to zero to avoid failing the simulation + // due to the minimum staked tokens required to submit a vote + ante.SetMinStakedTokens(math.LegacyZeroDec()) + + fmt.Printf( + "running non-determinism simulation; seed %d: %d/%d, attempt: %d/%d\n", + config.Seed, i+1, numSeeds, j+1, numTimesToRunPerSeed, + ) + + blockedAddresses := app.BlockedModuleAccountAddrs(app.ModuleAccountAddrs()) + + _, _, err := simulation.SimulateFromSeed( + t, + os.Stdout, + app.BaseApp, + simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), onomy.NewDefaultGenesisState(encConfig)), + simulation2.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 + simtestutil.SimulationOperations(app, app.AppCodec(), config), + blockedAddresses, + config, + app.AppCodec(), + ) + require.NoError(t, err) + + if config.Commit { + sim.PrintStats(db) + } + + appHash := app.LastCommitID().Hash + appHashList[j] = appHash + + if j != 0 { + require.Equal( + t, string(appHashList[0]), string(appHashList[j]), + "non-determinism in seed %d: %d/%d, attempt: %d/%d\n", config.Seed, i+1, numSeeds, j+1, numTimesToRunPerSeed, + ) + } + } + } +} diff --git a/app/upgrades/readme.md b/app/upgrades/readme.md deleted file mode 100644 index 2dd1bee6..00000000 --- a/app/upgrades/readme.md +++ /dev/null @@ -1,13 +0,0 @@ -# Onomy Upgrades - -This folder contains sub-folders for every chain upgrade. - -## Version History - -- v1.1.4 - Add missing authz module -- v1.1.2 - Fix that the treasury is actually subtracted from the staking supply -- v1.1.1 - Removal of Gravity module and addition of provider module -- v1.0.3.5 - OIP 6 Patch Subtract DAO Treasury from Staking Supply -- v1.0.3.4 - OIP 6 Undelegate DAO from all validators -- v1.0.3 - Hyperinflation fix -- v1.0.1 - IBC integration fix diff --git a/app/upgrades/types.go b/app/upgrades/types.go new file mode 100644 index 00000000..063bb11f --- /dev/null +++ b/app/upgrades/types.go @@ -0,0 +1,40 @@ +package upgrades + +import ( + store "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/module" + upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" + + "github.com/onomyprotocol/onomy-rebuild/app/keepers" +) + +// Upgrade defines a struct containing necessary fields that a SoftwareUpgradeProposal +// must have written, in order for the state migration to go smoothly. +// An upgrade must implement this struct, and then set it in the app.go. +// The app.go will then define the handler. +type Upgrade struct { + // Upgrade version name, for the upgrade handler, e.g. `v7` + UpgradeName string + + // CreateUpgradeHandler defines the function that creates an upgrade handler + CreateUpgradeHandler func(*module.Manager, module.Configurator, *keepers.AppKeepers) upgradetypes.UpgradeHandler + + // Store upgrades, should be used for any new modules introduced, new modules deleted, or store names renamed. + StoreUpgrades store.StoreUpgrades +} + +// Fork defines a struct containing the requisite fields for a non-software upgrade proposal +// Hard Fork at a given height to implement. +// There is one time code that can be added for the start of the Fork, in `BeginForkLogic`. +// Any other change in the code should be height-gated, if the goal is to have old and new binaries +// to be compatible prior to the upgrade height. +type Fork struct { + // Upgrade version name, for the upgrade handler, e.g. `v7` + UpgradeName string + // height the upgrade occurs at + UpgradeHeight int64 + + // Function that runs some custom state transition code at the beginning of a fork. + BeginForkLogic func(ctx sdk.Context, keepers *keepers.AppKeepers) +} diff --git a/app/upgrades/v1.0.1/upgrade.go b/app/upgrades/v1.0.1/upgrade.go deleted file mode 100644 index 6bea2732..00000000 --- a/app/upgrades/v1.0.1/upgrade.go +++ /dev/null @@ -1,16 +0,0 @@ -// Package v1_0_1 is contains chain upgrade of the corresponding version. -package v1_0_1 //nolint:revive,stylecheck // app version - -import ( - sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/cosmos/cosmos-sdk/types/module" - upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" -) - -// Name is migration name. -const Name = "v1.0.1" - -// UpgradeHandler is an x/upgrade handler. -func UpgradeHandler(_ sdk.Context, _ upgradetypes.Plan, vm module.VersionMap) (module.VersionMap, error) { - return vm, nil -} diff --git a/app/upgrades/v1.0.3.4/upgrade.go b/app/upgrades/v1.0.3.4/upgrade.go deleted file mode 100644 index b7c9f48d..00000000 --- a/app/upgrades/v1.0.3.4/upgrade.go +++ /dev/null @@ -1,16 +0,0 @@ -// Package v1_0_3_4 is contains chain upgrade of the corresponding version. -package v1_0_3_4 //nolint:revive,stylecheck // app version - -import ( - sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/cosmos/cosmos-sdk/types/module" - upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" -) - -// Name is migration name. -const Name = "v1.0.3.4" - -// UpgradeHandler is an x/upgrade handler. -func UpgradeHandler(_ sdk.Context, _ upgradetypes.Plan, vm module.VersionMap) (module.VersionMap, error) { - return vm, nil -} diff --git a/app/upgrades/v1.0.3.5/upgrade.go b/app/upgrades/v1.0.3.5/upgrade.go deleted file mode 100644 index bbd643d2..00000000 --- a/app/upgrades/v1.0.3.5/upgrade.go +++ /dev/null @@ -1,16 +0,0 @@ -// Package v1_0_3_5 is contains chain upgrade of the corresponding version. -package v1_0_3_5 //nolint:revive,stylecheck // app version - -import ( - sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/cosmos/cosmos-sdk/types/module" - upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" -) - -// Name is migration name. -const Name = "v1.0.3.5" - -// UpgradeHandler is an x/upgrade handler. -func UpgradeHandler(_ sdk.Context, _ upgradetypes.Plan, vm module.VersionMap) (module.VersionMap, error) { - return vm, nil -} diff --git a/app/upgrades/v1.0.3/upgrade.go b/app/upgrades/v1.0.3/upgrade.go deleted file mode 100644 index 7ae09de9..00000000 --- a/app/upgrades/v1.0.3/upgrade.go +++ /dev/null @@ -1,16 +0,0 @@ -// Package v1_0_3 is contains chain upgrade of the corresponding version. -package v1_0_3 //nolint:revive,stylecheck // app version - -import ( - sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/cosmos/cosmos-sdk/types/module" - upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" -) - -// Name is migration name. -const Name = "v1.0.3" - -// UpgradeHandler is an x/upgrade handler. -func UpgradeHandler(_ sdk.Context, _ upgradetypes.Plan, vm module.VersionMap) (module.VersionMap, error) { - return vm, nil -} diff --git a/app/upgrades/v1.1.1/upgrade.go b/app/upgrades/v1.1.1/upgrade.go deleted file mode 100644 index 4581f2b6..00000000 --- a/app/upgrades/v1.1.1/upgrade.go +++ /dev/null @@ -1,5 +0,0 @@ -// Package v1_1_1 is contains chain upgrade of the corresponding version. -package v1_1_1 //nolint:revive,stylecheck // app version - -// Name is migration name. -const Name = "v1.1.1" diff --git a/app/upgrades/v1.1.2/upgrade.go b/app/upgrades/v1.1.2/upgrade.go deleted file mode 100644 index 08ee82dd..00000000 --- a/app/upgrades/v1.1.2/upgrade.go +++ /dev/null @@ -1,16 +0,0 @@ -// Package v1_1_2 is contains chain upgrade of the corresponding version. -package v1_1_2 //nolint:revive,stylecheck // app version - -import ( - sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/cosmos/cosmos-sdk/types/module" - upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" -) - -// Name is migration name. -const Name = "v1.1.2" - -// UpgradeHandler is an x/upgrade handler. -func UpgradeHandler(_ sdk.Context, _ upgradetypes.Plan, vm module.VersionMap) (module.VersionMap, error) { - return vm, nil -} diff --git a/app/upgrades/v1.1.4/upgrade.go b/app/upgrades/v1.1.4/upgrade.go deleted file mode 100644 index ae3789a7..00000000 --- a/app/upgrades/v1.1.4/upgrade.go +++ /dev/null @@ -1,16 +0,0 @@ -// Package v1_1_4 is contains chain upgrade of the corresponding version. -package v1_1_4 //nolint:revive,stylecheck // app version - -import ( - sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/cosmos/cosmos-sdk/types/module" - upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" -) - -// Name is migration name. -const Name = "v1.1.4" - -// UpgradeHandler is an x/upgrade handler. -func UpgradeHandler(_ sdk.Context, _ upgradetypes.Plan, vm module.VersionMap) (module.VersionMap, error) { - return vm, nil -} diff --git a/app/upgrades/v10/constants.go b/app/upgrades/v10/constants.go new file mode 100644 index 00000000..d066bd25 --- /dev/null +++ b/app/upgrades/v10/constants.go @@ -0,0 +1,15 @@ +package v10 + +import ( + "github.com/onomyprotocol/onomy-rebuild/app/upgrades" +) + +const ( + // UpgradeName defines the on-chain upgrade name. + UpgradeName = "v10" +) + +var Upgrade = upgrades.Upgrade{ + UpgradeName: UpgradeName, + CreateUpgradeHandler: CreateUpgradeHandler, +} diff --git a/app/upgrades/v10/upgrades.go b/app/upgrades/v10/upgrades.go new file mode 100644 index 00000000..aec5552b --- /dev/null +++ b/app/upgrades/v10/upgrades.go @@ -0,0 +1,27 @@ +package v10 + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/module" + upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" + + "github.com/onomyprotocol/onomy-rebuild/app/keepers" +) + +func CreateUpgradeHandler( + mm *module.Manager, + configurator module.Configurator, + keepers *keepers.AppKeepers, +) upgradetypes.UpgradeHandler { + return func(ctx sdk.Context, plan upgradetypes.Plan, vm module.VersionMap) (module.VersionMap, error) { + ctx.Logger().Info("Starting module migrations...") + + vm, err := mm.RunMigrations(ctx, configurator, vm) + if err != nil { + return vm, err + } + + ctx.Logger().Info("Upgrade complete") + return vm, err + } +} diff --git a/app/upgrades/v11/constants.go b/app/upgrades/v11/constants.go new file mode 100644 index 00000000..273f7537 --- /dev/null +++ b/app/upgrades/v11/constants.go @@ -0,0 +1,15 @@ +package v11 + +import ( + "github.com/onomyprotocol/onomy-rebuild/app/upgrades" +) + +const ( + // UpgradeName defines the on-chain upgrade name. + UpgradeName = "v11" +) + +var Upgrade = upgrades.Upgrade{ + UpgradeName: UpgradeName, + CreateUpgradeHandler: CreateUpgradeHandler, +} diff --git a/app/upgrades/v11/upgrades.go b/app/upgrades/v11/upgrades.go new file mode 100644 index 00000000..9945b28b --- /dev/null +++ b/app/upgrades/v11/upgrades.go @@ -0,0 +1,27 @@ +package v11 + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/module" + upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" + + "github.com/onomyprotocol/onomy-rebuild/app/keepers" +) + +func CreateUpgradeHandler( + mm *module.Manager, + configurator module.Configurator, + keepers *keepers.AppKeepers, +) upgradetypes.UpgradeHandler { + return func(ctx sdk.Context, plan upgradetypes.Plan, vm module.VersionMap) (module.VersionMap, error) { + ctx.Logger().Info("Starting module migrations...") + + vm, err := mm.RunMigrations(ctx, configurator, vm) + if err != nil { + return vm, err + } + + ctx.Logger().Info("Upgrade complete") + return vm, err + } +} diff --git a/app/upgrades/v12/constants.go b/app/upgrades/v12/constants.go new file mode 100644 index 00000000..35ea8a34 --- /dev/null +++ b/app/upgrades/v12/constants.go @@ -0,0 +1,30 @@ +package v12 + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + + "github.com/onomyprotocol/onomy-rebuild/app/upgrades" +) + +const ( + // UpgradeName defines the on-chain upgrade name. + UpgradeName = "v12" +) + +var ( + // The ValidatorBondFactor dictates the cap on the liquid shares + // for a validator - determined as a multiple to their validator bond + // (e.g. ValidatorBondShares = 1000, BondFactor = 250 -> LiquidSharesCap: 250,000) + ValidatorBondFactor = sdk.NewDec(250) + // GlobalLiquidStakingCap represents a cap on the portion of stake that + // comes from liquid staking providers for a specific validator + ValidatorLiquidStakingCap = sdk.MustNewDecFromStr("0.5") // 50% + // GlobalLiquidStakingCap represents the percentage cap on + // the portion of a chain's total stake can be liquid + GlobalLiquidStakingCap = sdk.MustNewDecFromStr("0.25") // 25% +) + +var Upgrade = upgrades.Upgrade{ + UpgradeName: UpgradeName, + CreateUpgradeHandler: CreateUpgradeHandler, +} diff --git a/app/upgrades/v12/upgrades.go b/app/upgrades/v12/upgrades.go new file mode 100644 index 00000000..640df77c --- /dev/null +++ b/app/upgrades/v12/upgrades.go @@ -0,0 +1,38 @@ +package v12 + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/module" + upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" + + "github.com/onomyprotocol/onomy-rebuild/app/keepers" +) + +func CreateUpgradeHandler( + mm *module.Manager, + configurator module.Configurator, + keepers *keepers.AppKeepers, +) upgradetypes.UpgradeHandler { + return func(ctx sdk.Context, plan upgradetypes.Plan, vm module.VersionMap) (module.VersionMap, error) { + ctx.Logger().Info("Starting module migrations...") + + vm, err := mm.RunMigrations(ctx, configurator, vm) + if err != nil { + return vm, err + } + + // Set liquid staking module parameters + params := keepers.StakingKeeper.GetParams(ctx) + params.ValidatorBondFactor = ValidatorBondFactor + params.ValidatorLiquidStakingCap = ValidatorLiquidStakingCap + params.GlobalLiquidStakingCap = GlobalLiquidStakingCap + + err = keepers.StakingKeeper.SetParams(ctx, params) + if err != nil { + return vm, err + } + + ctx.Logger().Info("Upgrade complete") + return vm, nil + } +} diff --git a/app/upgrades/v13/constants.go b/app/upgrades/v13/constants.go new file mode 100644 index 00000000..3ef5e6cc --- /dev/null +++ b/app/upgrades/v13/constants.go @@ -0,0 +1,15 @@ +package v13 + +import ( + "github.com/onomyprotocol/onomy-rebuild/app/upgrades" +) + +const ( + // UpgradeName defines the on-chain upgrade name. + UpgradeName = "v13" +) + +var Upgrade = upgrades.Upgrade{ + UpgradeName: UpgradeName, + CreateUpgradeHandler: CreateUpgradeHandler, +} diff --git a/app/upgrades/v13/upgrades.go b/app/upgrades/v13/upgrades.go new file mode 100644 index 00000000..76b7cd14 --- /dev/null +++ b/app/upgrades/v13/upgrades.go @@ -0,0 +1,27 @@ +package v13 + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/module" + upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" + + "github.com/onomyprotocol/onomy-rebuild/app/keepers" +) + +func CreateUpgradeHandler( + mm *module.Manager, + configurator module.Configurator, + keepers *keepers.AppKeepers, +) upgradetypes.UpgradeHandler { + return func(ctx sdk.Context, plan upgradetypes.Plan, vm module.VersionMap) (module.VersionMap, error) { + ctx.Logger().Info("Starting module migrations...") + + vm, err := mm.RunMigrations(ctx, configurator, vm) + if err != nil { + return vm, err + } + + ctx.Logger().Info("Upgrade complete") + return vm, err + } +} diff --git a/app/upgrades/v14/constants.go b/app/upgrades/v14/constants.go new file mode 100644 index 00000000..4204bb46 --- /dev/null +++ b/app/upgrades/v14/constants.go @@ -0,0 +1,15 @@ +package v14 + +import ( + "github.com/onomyprotocol/onomy-rebuild/app/upgrades" +) + +const ( + // UpgradeName defines the on-chain upgrade name. + UpgradeName = "v14" +) + +var Upgrade = upgrades.Upgrade{ + UpgradeName: UpgradeName, + CreateUpgradeHandler: CreateUpgradeHandler, +} diff --git a/app/upgrades/v14/upgrades.go b/app/upgrades/v14/upgrades.go new file mode 100644 index 00000000..ead38847 --- /dev/null +++ b/app/upgrades/v14/upgrades.go @@ -0,0 +1,32 @@ +package v14 + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/module" + upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" + + "github.com/onomyprotocol/onomy-rebuild/app/keepers" +) + +func CreateUpgradeHandler( + mm *module.Manager, + configurator module.Configurator, + keepers *keepers.AppKeepers, +) upgradetypes.UpgradeHandler { + return func(ctx sdk.Context, plan upgradetypes.Plan, vm module.VersionMap) (module.VersionMap, error) { + ctx.Logger().Info("Starting module migrations...") + + vm, err := mm.RunMigrations(ctx, configurator, vm) + if err != nil { + return vm, err + } + + // Set the minimum height of a valid consumer equivocation evidence + // for the existing consumer chains: neutron-1 and stride-1 + keepers.ProviderKeeper.SetEquivocationEvidenceMinHeight(ctx, "neutron-1", 4552189) + keepers.ProviderKeeper.SetEquivocationEvidenceMinHeight(ctx, "stride-1", 6375035) + + ctx.Logger().Info("Upgrade complete") + return vm, err + } +} diff --git a/app/upgrades/v15/constants.go b/app/upgrades/v15/constants.go new file mode 100644 index 00000000..12430b7a --- /dev/null +++ b/app/upgrades/v15/constants.go @@ -0,0 +1,26 @@ +package v15 + +import ( + store "github.com/cosmos/cosmos-sdk/store/types" + consensustypes "github.com/cosmos/cosmos-sdk/x/consensus/types" + crisistypes "github.com/cosmos/cosmos-sdk/x/crisis/types" + + "github.com/onomyprotocol/onomy-rebuild/app/upgrades" +) + +const ( + // UpgradeName defines the on-chain upgrade name. + UpgradeName = "v15" +) + +var Upgrade = upgrades.Upgrade{ + UpgradeName: UpgradeName, + CreateUpgradeHandler: CreateUpgradeHandler, + StoreUpgrades: store.StoreUpgrades{ + Added: []string{ + // v47 modules + crisistypes.ModuleName, + consensustypes.ModuleName, + }, + }, +} diff --git a/app/upgrades/v15/upgrades.go b/app/upgrades/v15/upgrades.go new file mode 100644 index 00000000..a0e0cb65 --- /dev/null +++ b/app/upgrades/v15/upgrades.go @@ -0,0 +1,458 @@ +package v15 + +import ( + "fmt" + + ibctransferkeeper "github.com/cosmos/ibc-go/v7/modules/apps/transfer/keeper" + ibctransfertypes "github.com/cosmos/ibc-go/v7/modules/apps/transfer/types" + + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/store/prefix" + storetypes "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/address" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "github.com/cosmos/cosmos-sdk/types/module" + accountkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper" + vesting "github.com/cosmos/cosmos-sdk/x/auth/vesting/types" + bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + distributionkeeper "github.com/cosmos/cosmos-sdk/x/distribution/keeper" + distributiontypes "github.com/cosmos/cosmos-sdk/x/distribution/types" + govkeeper "github.com/cosmos/cosmos-sdk/x/gov/keeper" + paramstypes "github.com/cosmos/cosmos-sdk/x/params/types" + slashingkeeper "github.com/cosmos/cosmos-sdk/x/slashing/keeper" + slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" + stakingkeeper "github.com/cosmos/cosmos-sdk/x/staking/keeper" + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" + + "github.com/onomyprotocol/onomy-rebuild/app/keepers" +) + +// CreateUpgradeHandler returns a upgrade handler for Onomy v15 +// which executes the following migrations: +// - adhere to prop 826 which sets the minimum commission rate to 5% for all validators, +// see https://www.mintscan.io/cosmos/proposals/826 +// - update the slashing module SigningInfos for which the consensus address is empty, +// see https://github.com/onomyprotocol/onomy-rebuild/issues/1734. +// - adhere to signal prop 860 which claws back vesting funds +// see https://www.mintscan.io/cosmos/proposals/860 +// - update the transfer module's escrow accounts for which there is a discrepancy +// with the counterparty chain supply. +func CreateUpgradeHandler( + mm *module.Manager, + configurator module.Configurator, + keepers *keepers.AppKeepers, +) upgradetypes.UpgradeHandler { + return func(ctx sdk.Context, plan upgradetypes.Plan, vm module.VersionMap) (module.VersionMap, error) { + ctx.Logger().Info("Starting module migrations...") + baseAppLegacySS := keepers.ParamsKeeper.Subspace(baseapp.Paramspace). + WithKeyTable(paramstypes.ConsensusParamsKeyTable()) + baseapp.MigrateParams(ctx, baseAppLegacySS, &keepers.ConsensusParamsKeeper) + + vm, err := mm.RunMigrations(ctx, configurator, vm) + if err != nil { + return vm, err + } + + if err := UpgradeMinCommissionRate(ctx, *keepers.StakingKeeper); err != nil { + return nil, fmt.Errorf("failed migrating min commission rates: %s", err) + } + + UpgradeSigningInfos(ctx, keepers.SlashingKeeper) + + if err := ClawbackVestingFunds( + ctx, + sdk.MustAccAddressFromBech32("cosmos145hytrc49m0hn6fphp8d5h4xspwkawcuzmx498"), + keepers); err != nil { + return nil, fmt.Errorf("failed migrating vesting funds: %s", err) + } + if err := SetMinInitialDepositRatio(ctx, *keepers.GovKeeper); err != nil { + return nil, fmt.Errorf("failed initializing the min initial deposit ratio: %s", err) + } + + UpgradeEscrowAccounts(ctx, keepers.BankKeeper, keepers.TransferKeeper) + + ctx.Logger().Info("Upgrade v15 complete") + return vm, err + } +} + +// UpgradeMinCommissionRate sets the minimum commission rate staking parameter to 5% +// and updates the commission rate for all validators that have a commission rate less than 5% +// adhere to prop 826 which sets the minimum commission rate to 5% for all validators +// https://www.mintscan.io/cosmos/proposals/826 +func UpgradeMinCommissionRate(ctx sdk.Context, sk stakingkeeper.Keeper) error { + ctx.Logger().Info("Migrating min commission rate...") + + params := sk.GetParams(ctx) + params.MinCommissionRate = sdk.NewDecWithPrec(5, 2) + if err := sk.SetParams(ctx, params); err != nil { + return err + } + + for _, val := range sk.GetAllValidators(ctx) { + if val.Commission.CommissionRates.Rate.LT(sdk.NewDecWithPrec(5, 2)) { + // set the commission rate to 5% + val.Commission.CommissionRates.Rate = sdk.NewDecWithPrec(5, 2) + // set the max rate to 5% if it is less than 5% + if val.Commission.CommissionRates.MaxRate.LT(sdk.NewDecWithPrec(5, 2)) { + val.Commission.CommissionRates.MaxRate = sdk.NewDecWithPrec(5, 2) + } + val.Commission.UpdateTime = ctx.BlockHeader().Time + sk.SetValidator(ctx, val) + } + } + + ctx.Logger().Info("Finished migrating min commission rate") + return nil +} + +// UpgradeSigningInfos updates the signing infos of validators for which +// the consensus address is missing +func UpgradeSigningInfos(ctx sdk.Context, sk slashingkeeper.Keeper) { + ctx.Logger().Info("Migrating signing infos...") + + signingInfos := []slashingtypes.ValidatorSigningInfo{} + + // update consensus address in signing info + // using the store key of validators + sk.IterateValidatorSigningInfos(ctx, func(address sdk.ConsAddress, info slashingtypes.ValidatorSigningInfo) (stop bool) { + if info.Address == "" { + info.Address = address.String() + signingInfos = append(signingInfos, info) + } + + return false + }) + + for _, si := range signingInfos { + addr, err := sdk.ConsAddressFromBech32(si.Address) + if err != nil { + ctx.Logger().Error("incorrect consensus address in signing info %s: %s", si.Address, err) + continue + } + sk.SetValidatorSigningInfo(ctx, addr, si) + } + + ctx.Logger().Info("Finished migrating signing infos") +} + +// ClawbackVestingFunds transfers the vesting tokens from the given vesting account +// to the community pool +func ClawbackVestingFunds(ctx sdk.Context, address sdk.AccAddress, keepers *keepers.AppKeepers) error { + ctx.Logger().Info("Migrating vesting funds...") + + ak := keepers.AccountKeeper + bk := keepers.BankKeeper + dk := keepers.DistrKeeper + sk := *keepers.StakingKeeper + + // get target account + account := ak.GetAccount(ctx, address) + + // verify that it's a vesting account type + vestAccount, ok := account.(*vesting.ContinuousVestingAccount) + if !ok { + ctx.Logger().Error( + "failed migrating vesting funds: %s: %s", + "provided account address isn't a vesting account: ", + address.String(), + ) + + return nil + } + + // returns if the account has no vesting coins of the bond denom + vestingCoinToClawback := sdk.Coin{} + if vc := vestAccount.GetVestingCoins(ctx.BlockTime()); !vc.Empty() { + _, vestingCoinToClawback = vc.Find(sk.BondDenom(ctx)) + } + + if vestingCoinToClawback.IsNil() { + ctx.Logger().Info( + "%s: %s", + "no vesting coins to migrate", + "Finished migrating vesting funds", + ) + + return nil + } + + // unbond all delegations from vesting account + if err := forceUnbondAllDelegations(sk, bk, ctx, address); err != nil { + return err + } + + // transfers still vesting tokens of BondDenom to community pool + if err := forceFundCommunityPool( + ak, + dk, + bk, + ctx, + vestingCoinToClawback, + address, + keepers.GetKey(banktypes.StoreKey), + ); err != nil { + return err + } + + // overwrite vesting account using its embedded base account + ak.SetAccount(ctx, vestAccount.BaseAccount) + + // validate account balance + if err := bk.ValidateBalance(ctx, address); err != nil { + return err + } + + ctx.Logger().Info("Finished migrating vesting funds") + return nil +} + +// forceUnbondAllDelegations unbonds all the delegations from the given account address, +// without waiting for an unbonding period +func forceUnbondAllDelegations( + sk stakingkeeper.Keeper, + bk bankkeeper.Keeper, + ctx sdk.Context, + delegator sdk.AccAddress, +) error { + dels := sk.GetDelegatorDelegations(ctx, delegator, 100) + + for _, del := range dels { + valAddr := del.GetValidatorAddr() + + validator, found := sk.GetValidator(ctx, valAddr) + if !found { + return stakingtypes.ErrNoValidatorFound + } + + returnAmount, err := sk.Unbond(ctx, delegator, valAddr, del.GetShares()) + if err != nil { + return err + } + + coins := sdk.NewCoins(sdk.NewCoin(sk.BondDenom(ctx), returnAmount)) + + // transfer the validator tokens to the not bonded pool + if validator.IsBonded() { + // doing stakingKeeper.bondedTokensToNotBonded + err = bk.SendCoinsFromModuleToModule(ctx, stakingtypes.BondedPoolName, stakingtypes.NotBondedPoolName, coins) + if err != nil { + return err + } + } + + err = bk.UndelegateCoinsFromModuleToAccount(ctx, stakingtypes.NotBondedPoolName, delegator, coins) + if err != nil { + return err + } + } + + return nil +} + +// forceFundCommunityPool sends the given coin from the sender account to the community pool +// even if the coin is locked. +// Note that it partially follows the logic of the FundCommunityPool method in +// https://github.com/cosmos/cosmos-sdk/blob/release%2Fv0.47.x/x/distribution/keeper/keeper.go#L155 +func forceFundCommunityPool( + ak accountkeeper.AccountKeeper, + dk distributionkeeper.Keeper, + bk bankkeeper.Keeper, + ctx sdk.Context, + amount sdk.Coin, + sender sdk.AccAddress, + bs storetypes.StoreKey, +) error { + recipientAcc := ak.GetModuleAccount(ctx, distributiontypes.ModuleName) + if recipientAcc == nil { + return fmt.Errorf("%s:%s", sdkerrors.ErrUnknownAddress, distributiontypes.ModuleName) + } + + senderBal := bk.GetBalance(ctx, sender, amount.Denom) + if _, hasNeg := sdk.NewCoins(senderBal).SafeSub(amount); hasNeg { + return fmt.Errorf( + "%s: spendable balance %s is smaller than %s", + sdkerrors.ErrInsufficientFunds, + senderBal, + amount, + ) + } + if err := setBalance(ctx, sender, senderBal.Sub(amount), bs); err != nil { + return err + } + recipientBal := bk.GetBalance(ctx, recipientAcc.GetAddress(), amount.Denom) + if err := setBalance(ctx, recipientAcc.GetAddress(), recipientBal.Add(amount), bs); err != nil { + return err + } + + accExists := ak.HasAccount(ctx, recipientAcc.GetAddress()) + if !accExists { + ak.SetAccount(ctx, ak.NewAccountWithAddress(ctx, recipientAcc.GetAddress())) + } + + feePool := dk.GetFeePool(ctx) + feePool.CommunityPool = feePool.CommunityPool.Add(sdk.NewDecCoinsFromCoins(amount)...) + dk.SetFeePool(ctx, feePool) + + return nil +} + +// setBalance sets the coin balance for an account by address. +// Note that it follows the same logic of the setBalance method in +// https://github.com/cosmos/cosmos-sdk/blob/v0.47.7/x/bank/keeper/send.go#L337 +func setBalance( + ctx sdk.Context, + addr sdk.AccAddress, + balance sdk.Coin, + bs storetypes.StoreKey, +) error { + if !balance.IsValid() { + return fmt.Errorf("%s:%s", sdkerrors.ErrInvalidCoins, balance.String()) + } + + store := ctx.KVStore(bs) + accountStore := prefix.NewStore(store, banktypes.CreateAccountBalancesPrefix(addr)) + denomPrefixStore := prefix.NewStore(store, banktypes.CreateDenomAddressPrefix(balance.Denom)) + + if balance.IsZero() { + accountStore.Delete([]byte(balance.Denom)) + denomPrefixStore.Delete(address.MustLengthPrefix(addr)) + } else { + amount, err := balance.Amount.Marshal() + if err != nil { + return err + } + + accountStore.Set([]byte(balance.Denom), amount) + + // Store a reverse index from denomination to account address with a + // sentinel value. + denomAddrKey := address.MustLengthPrefix(addr) + if !denomPrefixStore.Has(denomAddrKey) { + denomPrefixStore.Set(denomAddrKey, []byte{0}) + } + } + + return nil +} + +// SetMinInitialDepositRatio sets the MinInitialDepositRatio param of the gov +// module to 10% - this is the proportion of the deposit value that must be paid +// at proposal submission. +func SetMinInitialDepositRatio(ctx sdk.Context, gk govkeeper.Keeper) error { + ctx.Logger().Info("Initializing MinInitialDepositRatio...") + + params := gk.GetParams(ctx) + params.MinInitialDepositRatio = sdk.NewDecWithPrec(1, 1).String() // 0.1 (10%) + err := gk.SetParams(ctx, params) + if err != nil { + return err + } + + ctx.Logger().Info("Finished initializing MinInitialDepositRatio...") + + return nil +} + +/* +The following is a list of the discrepancies that were found in the IBC transfer escrow accounts. +Please note that discrepancies #1 and #3 are for the same escrow account address, but for coins of +a different denomination. + +Discrepancy #1: +- Counterparty Chain ID: osmosis-1 +- Escrow Account Address: cosmos1x54ltnyg88k0ejmk8ytwrhd3ltm84xehrnlslf +- Asset Base Denom: FX +- Asset IBC Denom: ibc/4925E6ABA571A44D2BE0286D2D29AF42A294D0FF2BB16490149A1B26EAD33729 +- Escrow Balance: 8859960534331100342 +- Counterparty Total Supply: 8899960534331100342ibc/EBBE6553941A1F0111A9163F885F7665417467FB630D68F5D4F15425C1E64FDE +- Missing amount in Escrow Account: 40000000000000000 + +Discrepancy #2: +- Counterparty Chain ID: juno-1 +- Escrow Account Address: cosmos1ju6tlfclulxumtt2kglvnxduj5d93a64r5czge +- Asset Base Denom: uosmo +- Asset IBC Denom: ibc/14F9BC3E44B8A9C1BE1FB08980FAB87034C9905EF17CF2F5008FC085218811CC +- Escrow Balance: 6247328 +- Counterparty Total Supply: 6249328ibc/A065D610A42C3943FAB23979A4F969291A2CF9FE76966B8960AC34B52EFA9F62 +- Missing amount in Escrow Account: 2000 + +Discrepancy #3: +- Counterparty Chain ID: osmosis-1 +- Escrow Account Address: cosmos1x54ltnyg88k0ejmk8ytwrhd3ltm84xehrnlslf +- Asset Base Denom: rowan +- Asset IBC Denom: ibc/F5ED5F3DC6F0EF73FA455337C027FE91ABCB375116BF51A228E44C493E020A09 +- Escrow Balance: 122394170815718341733868 +- Counterparty Total Supply: 126782170815718341733868ibc/92E49910206805D48FC035A947F38ABFD5F0372F254846D9873442F3036E20AF +- Missing amount in Escrow Account: 4388000000000000000000 +*/ + +// UpgradeEscrowAccounts mints the necessary assets to reach parity between the escrow account +// and the counterparty total supply, and then, send them from the transfer module to the escrow account. +func UpgradeEscrowAccounts(ctx sdk.Context, bankKeeper bankkeeper.Keeper, transferKeeper ibctransferkeeper.Keeper) { + for _, update := range GetEscrowUpdates(ctx) { + escrowAddress := sdk.MustAccAddressFromBech32(update.Address) + for _, coin := range update.Coins { + coins := sdk.NewCoins(coin) + + if err := bankKeeper.MintCoins(ctx, ibctransfertypes.ModuleName, coins); err != nil { + ctx.Logger().Error("fail to upgrade escrow account: %s", err) + } + + if err := bankKeeper.SendCoinsFromModuleToAccount(ctx, ibctransfertypes.ModuleName, escrowAddress, coins); err != nil { + ctx.Logger().Error("fail to upgrade escrow account: %s", err) + } + + // update the transfer module's store for the total escrow amounts + currentTotalEscrow := transferKeeper.GetTotalEscrowForDenom(ctx, coin.GetDenom()) + newTotalEscrow := currentTotalEscrow.Add(coin) + transferKeeper.SetTotalEscrowForDenom(ctx, newTotalEscrow) + } + } +} + +type UpdateCoins struct { + Address string + Coins sdk.Coins +} + +func GetEscrowUpdates(ctx sdk.Context) []UpdateCoins { + escrowUpdates := []UpdateCoins{ + { + // discrepancy #1 + Address: "cosmos1x54ltnyg88k0ejmk8ytwrhd3ltm84xehrnlslf", + Coins: sdk.Coins{{ + Denom: "ibc/4925E6ABA571A44D2BE0286D2D29AF42A294D0FF2BB16490149A1B26EAD33729", + Amount: sdk.NewInt(40000000000000000), + }}, + }, + { + // discrepancy #2 + Address: "cosmos1ju6tlfclulxumtt2kglvnxduj5d93a64r5czge", + Coins: sdk.Coins{{ + Denom: "ibc/14F9BC3E44B8A9C1BE1FB08980FAB87034C9905EF17CF2F5008FC085218811CC", + Amount: sdk.NewInt(2000), + }}, + }, + } + + // For discrepancy #3, the missing amount in the escrow account is too large + // to be represented using an 64-bit integer. Therefore, it's added to the + // escrow updates list under the condition that the amount is successfully + // converted to the sdk.Int type. + if amt, ok := sdk.NewIntFromString("4388000000000000000000"); !ok { + ctx.Logger().Error("can't upgrade missing amount in escrow account: '4388000000000000000000'") + } else { + coins := escrowUpdates[0].Coins + coins = coins.Add(sdk.NewCoins(sdk.NewCoin( + "ibc/F5ED5F3DC6F0EF73FA455337C027FE91ABCB375116BF51A228E44C493E020A09", + amt, + ))...) + escrowUpdates[0].Coins = coins + } + + return escrowUpdates +} diff --git a/app/upgrades/v15/upgrades_test.go b/app/upgrades/v15/upgrades_test.go new file mode 100644 index 00000000..77e51ea6 --- /dev/null +++ b/app/upgrades/v15/upgrades_test.go @@ -0,0 +1,320 @@ +package v15_test + +import ( + "testing" + "time" + + "github.com/stretchr/testify/require" + + abci "github.com/cometbft/cometbft/abci/types" + tmrand "github.com/cometbft/cometbft/libs/rand" + tmproto "github.com/cometbft/cometbft/proto/tendermint/types" + tmtime "github.com/cometbft/cometbft/types/time" + + "cosmossdk.io/math" + + "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519" + "github.com/cosmos/cosmos-sdk/testutil/mock" + sdk "github.com/cosmos/cosmos-sdk/types" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + vesting "github.com/cosmos/cosmos-sdk/x/auth/vesting/types" + banktestutil "github.com/cosmos/cosmos-sdk/x/bank/testutil" + slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + + "github.com/onomyprotocol/onomy-rebuild/app/helpers" + v15 "github.com/onomyprotocol/onomy-rebuild/app/upgrades/v15" +) + +func TestUpgradeSigningInfos(t *testing.T) { + onomyApp := helpers.Setup(t) + ctx := onomyApp.NewUncachedContext(true, tmproto.Header{}) + slashingKeeper := onomyApp.SlashingKeeper + + signingInfosNum := 8 + emptyAddrSigningInfo := make(map[string]struct{}) + + // create some dummy signing infos, half of which with an empty address field + for i := 0; i < signingInfosNum; i++ { + pubKey, err := mock.NewPV().GetPubKey() + require.NoError(t, err) + + consAddr := sdk.ConsAddress(pubKey.Address()) + info := slashingtypes.NewValidatorSigningInfo( + consAddr, + 0, + 0, + time.Unix(0, 0), + false, + 0, + ) + + if i < signingInfosNum/2 { + info.Address = "" + emptyAddrSigningInfo[consAddr.String()] = struct{}{} + } + + slashingKeeper.SetValidatorSigningInfo(ctx, consAddr, info) + require.NoError(t, err) + } + + require.Equal(t, signingInfosNum/2, len(emptyAddrSigningInfo)) + + // check that signing info are correctly set before migration + slashingKeeper.IterateValidatorSigningInfos(ctx, func(address sdk.ConsAddress, info slashingtypes.ValidatorSigningInfo) (stop bool) { + if _, ok := emptyAddrSigningInfo[address.String()]; ok { + require.Empty(t, info.Address) + } else { + require.NotEmpty(t, info.Address) + } + + return false + }) + + // upgrade signing infos + v15.UpgradeSigningInfos(ctx, slashingKeeper) + + // check that all signing info are updated as expected after migration + slashingKeeper.IterateValidatorSigningInfos(ctx, func(address sdk.ConsAddress, info slashingtypes.ValidatorSigningInfo) (stop bool) { + require.NotEmpty(t, info.Address) + + return false + }) +} + +func TestUpgradeMinCommissionRate(t *testing.T) { + onomyApp := helpers.Setup(t) + ctx := onomyApp.NewUncachedContext(true, tmproto.Header{}) + + // set min commission rate to 0 + stakingParams := onomyApp.StakingKeeper.GetParams(ctx) + stakingParams.MinCommissionRate = sdk.ZeroDec() + err := onomyApp.StakingKeeper.SetParams(ctx, stakingParams) + require.NoError(t, err) + + stakingKeeper := onomyApp.StakingKeeper + valNum := len(stakingKeeper.GetAllValidators(ctx)) + + // create 3 new validators + for i := 0; i < 3; i++ { + pk := ed25519.GenPrivKeyFromSecret([]byte{uint8(i)}).PubKey() + val, err := stakingtypes.NewValidator( + sdk.ValAddress(pk.Address()), + pk, + stakingtypes.Description{}, + ) + require.NoError(t, err) + // set random commission rate + val.Commission.CommissionRates.Rate = sdk.NewDecWithPrec(tmrand.Int63n(100), 2) + stakingKeeper.SetValidator(ctx, val) + valNum++ + } + + validators := stakingKeeper.GetAllValidators(ctx) + require.Equal(t, valNum, len(validators)) + + // pre-test min commission rate is 0 + require.Equal(t, stakingKeeper.GetParams(ctx).MinCommissionRate, sdk.ZeroDec(), "non-zero previous min commission rate") + + // run the test and confirm the values have been updated + require.NoError(t, v15.UpgradeMinCommissionRate(ctx, *stakingKeeper)) + + newStakingParams := stakingKeeper.GetParams(ctx) + require.NotEqual(t, newStakingParams.MinCommissionRate, sdk.ZeroDec(), "failed to update min commission rate") + require.Equal(t, newStakingParams.MinCommissionRate, sdk.NewDecWithPrec(5, 2), "failed to update min commission rate") + + for _, val := range stakingKeeper.GetAllValidators(ctx) { + require.True(t, val.Commission.CommissionRates.Rate.GTE(newStakingParams.MinCommissionRate), "failed to update update commission rate for validator %s", val.GetOperator()) + } +} + +func TestClawbackVestingFunds(t *testing.T) { + onomyApp := helpers.Setup(t) + + now := tmtime.Now() + endTime := now.Add(24 * time.Hour) + + bankKeeper := onomyApp.BankKeeper + accountKeeper := onomyApp.AccountKeeper + distrKeeper := onomyApp.DistrKeeper + stakingKeeper := onomyApp.StakingKeeper + + ctx := onomyApp.NewUncachedContext(true, tmproto.Header{Height: 1}) + ctx = ctx.WithBlockHeader(tmproto.Header{Height: ctx.BlockHeight(), Time: now}) + + validator := stakingKeeper.GetAllValidators(ctx)[0] + bondDenom := stakingKeeper.GetParams(ctx).BondDenom + + // create continuous vesting account + origCoins := sdk.NewCoins(sdk.NewInt64Coin(bondDenom, 100)) + addr := sdk.AccAddress([]byte("cosmos145hytrc49m0hn6fphp8d5h4xspwkawcuzmx498")) + + vestingAccount := vesting.NewContinuousVestingAccount( + authtypes.NewBaseAccountWithAddress(addr), + origCoins, + now.Unix(), + endTime.Unix(), + ) + + require.True(t, vestingAccount.GetVestingCoins(now).IsEqual(origCoins)) + + accountKeeper.SetAccount(ctx, vestingAccount) + + // check vesting account balance was set correctly + require.NoError(t, bankKeeper.ValidateBalance(ctx, addr)) + require.Empty(t, bankKeeper.GetAllBalances(ctx, addr)) + + // send original vesting coin amount + require.NoError(t, banktestutil.FundAccount(bankKeeper, ctx, addr, origCoins)) + require.True(t, origCoins.IsEqual(bankKeeper.GetAllBalances(ctx, addr))) + + initBal := bankKeeper.GetAllBalances(ctx, vestingAccount.GetAddress()) + require.True(t, initBal.IsEqual(origCoins)) + + // save validator tokens + oldValTokens := validator.Tokens + + // delegate all vesting account tokens + _, err := stakingKeeper.Delegate( + ctx, + vestingAccount.GetAddress(), + origCoins.AmountOf(bondDenom), + stakingtypes.Unbonded, + validator, + true) + require.NoError(t, err) + + // check that the validator's tokens and shares increased + validator = stakingKeeper.GetAllValidators(ctx)[0] + del, found := stakingKeeper.GetDelegation(ctx, addr, validator.GetOperator()) + require.True(t, found) + require.True(t, validator.Tokens.Equal(oldValTokens.Add(origCoins.AmountOf(bondDenom)))) + require.Equal( + t, + validator.TokensFromShares(del.Shares), + math.LegacyNewDec(origCoins.AmountOf(bondDenom).Int64()), + ) + + // check vesting account delegations + vestingAccount = accountKeeper.GetAccount(ctx, addr).(*vesting.ContinuousVestingAccount) + require.Equal(t, vestingAccount.GetDelegatedVesting(), origCoins) + require.Empty(t, vestingAccount.GetDelegatedFree()) + + // check that migration succeeds when all coins are already vested + require.NoError(t, v15.ClawbackVestingFunds(ctx.WithBlockTime(endTime), addr, &onomyApp.AppKeepers)) + + // vest half of the tokens + ctx = ctx.WithBlockTime(now.Add(12 * time.Hour)) + + currVestingCoins := vestingAccount.GetVestingCoins(ctx.BlockTime()) + currVestedCoins := vestingAccount.GetVestedCoins(ctx.BlockTime()) + + require.True(t, currVestingCoins.IsEqual(origCoins.QuoInt(math.NewInt(2)))) + require.True(t, currVestedCoins.IsEqual(origCoins.QuoInt(math.NewInt(2)))) + + // execute migration script + require.NoError(t, v15.ClawbackVestingFunds(ctx, addr, &onomyApp.AppKeepers)) + + // check that the validator's delegation is removed and that + // their total tokens decreased + validator = stakingKeeper.GetAllValidators(ctx)[0] + _, found = stakingKeeper.GetDelegation(ctx, addr, validator.GetOperator()) + require.False(t, found) + require.Equal( + t, + validator.TokensFromShares(validator.DelegatorShares), + math.LegacyNewDec(oldValTokens.Int64()), + ) + + // verify that all modules can end/begin blocks + onomyApp.EndBlock(abci.RequestEndBlock{}) + onomyApp.BeginBlock( + abci.RequestBeginBlock{ + Header: tmproto.Header{ + ChainID: ctx.ChainID(), + Height: ctx.BlockHeight() + 1, + }, + }, + ) + + // check that the resulting account is of BaseAccount type now + account, ok := accountKeeper.GetAccount(ctx, addr).(*authtypes.BaseAccount) + require.True(t, ok) + // check that the account values are still the same + require.EqualValues(t, account, vestingAccount.BaseAccount) + + // check that the account's balance still has the vested tokens + require.True(t, bankKeeper.GetAllBalances(ctx, addr).IsEqual(currVestedCoins)) + // check that the community pool balance received the vesting tokens + require.True( + t, + distrKeeper.GetFeePoolCommunityCoins(ctx). + IsEqual(sdk.NewDecCoinsFromCoins(currVestingCoins...)), + ) + + // verify that normal operations work in banking and staking + _, err = stakingKeeper.Delegate( + ctx, addr, + sdk.NewInt(30), + stakingtypes.Unbonded, + validator, + true) + require.NoError(t, err) + + newAddr := sdk.AccAddress([]byte("cosmos1qqp9myctmh8mh2y7gynlsnw4y2wz3s3089dak6")) + err = bankKeeper.SendCoins( + ctx, + addr, + newAddr, + sdk.NewCoins(sdk.NewCoin(bondDenom, sdk.NewInt(10))), + ) + require.NoError(t, err) +} + +func TestSetMinInitialDepositRatio(t *testing.T) { + onomyApp := helpers.Setup(t) + ctx := onomyApp.NewUncachedContext(true, tmproto.Header{}) + + err := v15.SetMinInitialDepositRatio(ctx, *onomyApp.GovKeeper) + require.NoError(t, err) + + minInitialDepositRatioStr := onomyApp.GovKeeper.GetParams(ctx).MinInitialDepositRatio + minInitialDepositRatio, err := math.LegacyNewDecFromStr(minInitialDepositRatioStr) + require.NoError(t, err) + require.True(t, minInitialDepositRatio.Equal(sdk.NewDecWithPrec(1, 1))) +} + +func TestUpgradeEscrowAccounts(t *testing.T) { + onomyApp := helpers.Setup(t) + ctx := onomyApp.NewUncachedContext(true, tmproto.Header{}) + + bankKeeper := onomyApp.BankKeeper + transferKeeper := onomyApp.TransferKeeper + + escrowUpdates := v15.GetEscrowUpdates(ctx) + + // check escrow accounts are empty + for _, update := range escrowUpdates { + require.Empty(t, bankKeeper.GetAllBalances(ctx, sdk.MustAccAddressFromBech32(update.Address))) + for _, coin := range update.Coins { + require.Equal(t, sdk.ZeroInt(), transferKeeper.GetTotalEscrowForDenom(ctx, coin.Denom).Amount) + } + } + + // execute the upgrade + v15.UpgradeEscrowAccounts(ctx, bankKeeper, transferKeeper) + + // check that new assets are minted and transferred to the escrow accounts + numUpdate := 0 + for _, update := range escrowUpdates { + for _, coin := range update.Coins { + require.Equal(t, coin, bankKeeper.GetBalance(ctx, sdk.MustAccAddressFromBech32(update.Address), coin.Denom)) + // check that the total escrow amount for the denom is updated + require.Equal(t, coin, transferKeeper.GetTotalEscrowForDenom(ctx, coin.Denom)) + numUpdate++ + } + } + + // verify that all tree discrepancies are covered in the update + require.Equal(t, 3, numUpdate) +} diff --git a/app/upgrades/v7/constants.go b/app/upgrades/v7/constants.go new file mode 100644 index 00000000..70f38dea --- /dev/null +++ b/app/upgrades/v7/constants.go @@ -0,0 +1,51 @@ +//go:build upgrade_v7 + +package v7 + +import ( + store "github.com/cosmos/cosmos-sdk/store/types" + icahosttypes "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/host/types" + + "github.com/onomyprotocol/onomy-rebuild/app/upgrades" +) + +const ( + // UpgradeName defines the on-chain upgrade name. + UpgradeName = "v7-Theta" + + // allowed msg types of ica host + authzMsgExec = "/cosmos.authz.v1beta1.MsgExec" + authzMsgGrant = "/cosmos.authz.v1beta1.MsgGrant" + authzMsgRevoke = "/cosmos.authz.v1beta1.MsgRevoke" + bankMsgSend = "/cosmos.bank.v1beta1.MsgSend" + bankMsgMultiSend = "/cosmos.bank.v1beta1.MsgMultiSend" + distrMsgSetWithdrawAddr = "/cosmos.distribution.v1beta1.MsgSetWithdrawAddress" + distrMsgWithdrawValidatorCommission = "/cosmos.distribution.v1beta1.MsgWithdrawValidatorCommission" + distrMsgFundCommunityPool = "/cosmos.distribution.v1beta1.MsgFundCommunityPool" + distrMsgWithdrawDelegatorReward = "/cosmos.distribution.v1beta1.MsgWithdrawDelegatorReward" + feegrantMsgGrantAllowance = "/cosmos.feegrant.v1beta1.MsgGrantAllowance" + feegrantMsgRevokeAllowance = "/cosmos.feegrant.v1beta1.MsgRevokeAllowance" + govMsgVoteWeighted = "/cosmos.gov.v1beta1.MsgVoteWeighted" + govMsgSubmitProposal = "/cosmos.gov.v1beta1.MsgSubmitProposal" + govMsgDeposit = "/cosmos.gov.v1beta1.MsgDeposit" + govMsgVote = "/cosmos.gov.v1beta1.MsgVote" + stakingMsgEditValidator = "/cosmos.staking.v1beta1.MsgEditValidator" + stakingMsgDelegate = "/cosmos.staking.v1beta1.MsgDelegate" + stakingMsgUndelegate = "/cosmos.staking.v1beta1.MsgUndelegate" + stakingMsgBeginRedelegate = "/cosmos.staking.v1beta1.MsgBeginRedelegate" + stakingMsgCreateValidator = "/cosmos.staking.v1beta1.MsgCreateValidator" + vestingMsgCreateVestingAccount = "/cosmos.vesting.v1beta1.MsgCreateVestingAccount" + ibcMsgTransfer = "/ibc.applications.transfer.v1.MsgTransfer" + liquidityMsgSwapWithinBatch = "/tendermint.liquidity.v1beta1.MsgSwapWithinBatch" //#nosec G101 -- This is a false positive + liquidityMsgCreatePool = "/tendermint.liquidity.v1beta1.MsgCreatePool" + liquidityMsgDepositWithinBatch = "/tendermint.liquidity.v1beta1.MsgDepositWithinBatch" + liquidityMsgWithdrawWithinBatch = "/tendermint.liquidity.v1beta1.MsgWithdrawWithinBatch" +) + +var Upgrade = upgrades.Upgrade{ + UpgradeName: UpgradeName, + CreateUpgradeHandler: CreateUpgradeHandler, + StoreUpgrades: store.StoreUpgrades{ + Added: []string{icahosttypes.StoreKey}, + }, +} diff --git a/app/upgrades/v7/upgrades.go b/app/upgrades/v7/upgrades.go new file mode 100644 index 00000000..11778f19 --- /dev/null +++ b/app/upgrades/v7/upgrades.go @@ -0,0 +1,72 @@ +//go:build upgrade_v7 + +package v7 + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/module" + upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" + ica "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts" + icacontrollertypes "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/controller/types" + icahosttypes "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/host/types" + icatypes "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/types" + + "github.com/onomyprotocol/onomy-rebuild/app/keepers" +) + +func CreateUpgradeHandler( + mm *module.Manager, + configurator module.Configurator, + _ *keepers.AppKeepers, +) upgradetypes.UpgradeHandler { + return func(ctx sdk.Context, plan upgradetypes.Plan, vm module.VersionMap) (module.VersionMap, error) { + vm[icatypes.ModuleName] = mm.Modules[icatypes.ModuleName].ConsensusVersion() + // create ICS27 Controller submodule params + controllerParams := icacontrollertypes.Params{} + // create ICS27 Host submodule params + hostParams := icahosttypes.Params{ + HostEnabled: true, + AllowMessages: []string{ + authzMsgExec, + authzMsgGrant, + authzMsgRevoke, + bankMsgSend, + bankMsgMultiSend, + distrMsgSetWithdrawAddr, + distrMsgWithdrawValidatorCommission, + distrMsgFundCommunityPool, + distrMsgWithdrawDelegatorReward, + feegrantMsgGrantAllowance, + feegrantMsgRevokeAllowance, + govMsgVoteWeighted, + govMsgSubmitProposal, + govMsgDeposit, + govMsgVote, + stakingMsgEditValidator, + stakingMsgDelegate, + stakingMsgUndelegate, + stakingMsgBeginRedelegate, + stakingMsgCreateValidator, + vestingMsgCreateVestingAccount, + ibcMsgTransfer, + liquidityMsgCreatePool, + liquidityMsgSwapWithinBatch, + liquidityMsgDepositWithinBatch, + liquidityMsgWithdrawWithinBatch, + }, + } + + ctx.Logger().Info("start to init interchainaccount module...") + + // initialize ICS27 module + icaModule, correctTypecast := mm.Modules[icatypes.ModuleName].(ica.AppModule) + if !correctTypecast { + panic("mm.Modules[icatypes.ModuleName] is not of type ica.AppModule") + } + icaModule.InitModule(ctx, controllerParams, hostParams) + + ctx.Logger().Info("start to run module migrations...") + + return mm.RunMigrations(ctx, configurator, vm) + } +} diff --git a/app/upgrades/v8/constants.go b/app/upgrades/v8/constants.go new file mode 100644 index 00000000..aa1f9de2 --- /dev/null +++ b/app/upgrades/v8/constants.go @@ -0,0 +1,25 @@ +//go:build upgrade_v8 + +package v8 + +import ( + store "github.com/cosmos/cosmos-sdk/store/types" + + "github.com/onomyprotocol/onomy-rebuild/app/upgrades" + "github.com/onomyprotocol/onomy-rebuild/x/globalfee" +) + +const ( + // UpgradeName defines the on-chain upgrade name. + UpgradeName = "v8-Rho" +) + +var Upgrade = upgrades.Upgrade{ + UpgradeName: UpgradeName, + CreateUpgradeHandler: CreateUpgradeHandler, + StoreUpgrades: store.StoreUpgrades{ + Added: []string{ + globalfee.ModuleName, + }, + }, +} diff --git a/app/upgrades/v8/upgrades.go b/app/upgrades/v8/upgrades.go new file mode 100644 index 00000000..d77aabbe --- /dev/null +++ b/app/upgrades/v8/upgrades.go @@ -0,0 +1,140 @@ +//go:build upgrade_v8 + +package v8 + +import ( + "errors" + "fmt" + + "github.com/cosmos/cosmos-sdk/store/prefix" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/module" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" + icahosttypes "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/host/types" + icatypes "github.com/cosmos/ibc-go/v7/modules/apps/27-interchain-accounts/types" + ibcchanneltypes "github.com/cosmos/ibc-go/v7/modules/core/04-channel/types" + + "github.com/onomyprotocol/onomy-rebuild/app/keepers" +) + +func FixBankMetadata(ctx sdk.Context, keepers *keepers.AppKeepers) error { + ctx.Logger().Info("Starting fix bank metadata...") + + malformedDenom := "uatomu" + correctDenom := "uatom" + + atomMetaData, foundMalformed := keepers.BankKeeper.GetDenomMetaData(ctx, malformedDenom) + if foundMalformed { + // save it with the correct denom + keepers.BankKeeper.SetDenomMetaData(ctx, atomMetaData) + + // delete the old format + key := keepers.GetKey(banktypes.ModuleName) + store := ctx.KVStore(key) + oldDenomMetaDataStore := prefix.NewStore(store, banktypes.DenomMetadataPrefix) + oldDenomMetaDataStore.Delete([]byte(malformedDenom)) + + // confirm whether the old key is still accessible + _, foundMalformed = keepers.BankKeeper.GetDenomMetaData(ctx, malformedDenom) + if foundMalformed { + return errors.New("malformed 'uatomu' denom not fixed") + } + } + + // proceed with the original intention of populating the missing Name and Symbol fields + atomMetaData, foundCorrect := keepers.BankKeeper.GetDenomMetaData(ctx, correctDenom) + if !foundCorrect { + return errors.New("atom denom not found") + } + + atomMetaData.Name = "Cosmos Hub Atom" + atomMetaData.Symbol = "ATOM" + keepers.BankKeeper.SetDenomMetaData(ctx, atomMetaData) + + ctx.Logger().Info("Fix bank metadata complete") + + return nil +} + +func QuicksilverFix(ctx sdk.Context, keepers *keepers.AppKeepers) error { + ctx.Logger().Info("Starting fix quicksilver...") + + // Refund stuck coins from ica address + sourceAddress, err := sdk.AccAddressFromBech32("cosmos13dqvh4qtg4gzczuktgnw8gc2ewnwmhdwnctekxctyr4azz4dcyysecgq7e") + if err != nil { + return errors.New("invalid source address") + } + destinationAddress, err := sdk.AccAddressFromBech32("cosmos1jc24kwznud9m3mwqmcz3xw33ndjuufnghstaag") + if err != nil { + return errors.New("invalid destination address") + } + + // Get balance from stuck address and subtract 1 uatom sent by bad actor + sourceBalance := keepers.BankKeeper.GetBalance(ctx, sourceAddress, "uatom") + if sourceBalance.IsGTE(sdk.NewCoin("uatom", sdk.NewInt(1))) { + refundBalance := sourceBalance.SubAmount(sdk.NewInt(1)) + err = keepers.BankKeeper.SendCoins(ctx, sourceAddress, destinationAddress, sdk.NewCoins(refundBalance)) + if err != nil { + return errors.New("unable to refund coins") + } + } + + // Close channels + closeChannel(keepers, ctx, "channel-462") + closeChannel(keepers, ctx, "channel-463") + closeChannel(keepers, ctx, "channel-464") + closeChannel(keepers, ctx, "channel-465") + closeChannel(keepers, ctx, "channel-466") + + ctx.Logger().Info("Fix quicksilver complete") + + return nil +} + +func closeChannel(keepers *keepers.AppKeepers, ctx sdk.Context, channelID string) { + channel, found := keepers.IBCKeeper.ChannelKeeper.GetChannel(ctx, icatypes.HostPortID, channelID) + if found { + channel.State = ibcchanneltypes.CLOSED + keepers.IBCKeeper.ChannelKeeper.SetChannel(ctx, icatypes.HostPortID, channelID, channel) + } +} + +func CreateUpgradeHandler( + mm *module.Manager, + configurator module.Configurator, + keepers *keepers.AppKeepers, +) upgradetypes.UpgradeHandler { + return func(ctx sdk.Context, plan upgradetypes.Plan, vm module.VersionMap) (module.VersionMap, error) { + ctx.Logger().Info("Running upgrade fixes...") + + err := FixBankMetadata(ctx, keepers) + if err != nil { + ctx.Logger().Info(fmt.Sprintf("error fixing bank metadata: %s", err.Error())) + } + + err = QuicksilverFix(ctx, keepers) + if err != nil { + return vm, err + } + + // Change hostParams allow_messages = [*] instead of whitelisting individual messages + hostParams := icahosttypes.Params{ + HostEnabled: true, + AllowMessages: []string{"*"}, + } + + // Update params for host & controller keepers + keepers.ICAHostKeeper.SetParams(ctx, hostParams) + + ctx.Logger().Info("Starting module migrations...") + + vm, err = mm.RunMigrations(ctx, configurator, vm) + if err != nil { + return vm, err + } + + ctx.Logger().Info("Upgrade complete") + return vm, err + } +} diff --git a/app/upgrades/v9/constants.go b/app/upgrades/v9/constants.go new file mode 100644 index 00000000..aa05fc56 --- /dev/null +++ b/app/upgrades/v9/constants.go @@ -0,0 +1,27 @@ +//go:build upgrade_v9 + +package v9 + +import ( + store "github.com/cosmos/cosmos-sdk/store/types" + ccvprovider "github.com/cosmos/interchain-security/v3/x/ccv/provider/types" + + store "github.com/cosmos/cosmos-sdk/store/types" + + "github.com/onomyprotocol/onomy-rebuild/app/upgrades" +) + +const ( + // UpgradeName defines the on-chain upgrade name. + UpgradeName = "v9-Lambda" +) + +var Upgrade = upgrades.Upgrade{ + UpgradeName: UpgradeName, + CreateUpgradeHandler: CreateUpgradeHandler, + StoreUpgrades: store.StoreUpgrades{ + Added: []string{ + ccvprovider.ModuleName, + }, + }, +} diff --git a/app/upgrades/v9/upgrades.go b/app/upgrades/v9/upgrades.go new file mode 100644 index 00000000..f3a3abe7 --- /dev/null +++ b/app/upgrades/v9/upgrades.go @@ -0,0 +1,29 @@ +//go:build upgrade_v9 + +package v9 + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/module" + upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" + + "github.com/onomyprotocol/onomy-rebuild/app/keepers" +) + +func CreateUpgradeHandler( + mm *module.Manager, + configurator module.Configurator, + _ *keepers.AppKeepers, +) upgradetypes.UpgradeHandler { + return func(ctx sdk.Context, plan upgradetypes.Plan, vm module.VersionMap) (module.VersionMap, error) { + ctx.Logger().Info("Starting module migrations...") + + vm, err := mm.RunMigrations(ctx, configurator, vm) + if err != nil { + return vm, err + } + + ctx.Logger().Info("Upgrade complete") + return vm, err + } +} diff --git a/buf.work.yaml b/buf.work.yaml index d3ed9b56..9de52861 100644 --- a/buf.work.yaml +++ b/buf.work.yaml @@ -1,4 +1,5 @@ +# This workspace file points to the roots found in your +# previous "buf.yaml" configuration. version: v1 directories: - proto - - third_party/proto \ No newline at end of file diff --git a/buf.yaml b/buf.yaml deleted file mode 100644 index 75e86722..00000000 --- a/buf.yaml +++ /dev/null @@ -1,11 +0,0 @@ -version: v1 -lint: - use: - - DEFAULT - except: - - SERVICE_SUFFIX - - RPC_REQUEST_STANDARD_NAME - - RPC_RESPONSE_STANDARD_NAME -breaking: - use: - - FILE \ No newline at end of file diff --git a/client/docs/config.json b/client/docs/config.json new file mode 100644 index 00000000..3c0284cb --- /dev/null +++ b/client/docs/config.json @@ -0,0 +1,18 @@ +{ + "swagger": "2.0", + "info": { + "title": "Cosmoshub - gRPC Gateway docs", + "description": "A REST interface for state queries", + "version": "1.0.0" + }, + "apis": [ + { + "url": "./tmp-swagger-gen/onomy/globalfee/v1beta1/query.swagger.json", + "operationIds": { + "rename": { + "Params": "GlobalfeeParams" + } + } + } + ] +} \ No newline at end of file diff --git a/client/docs/swagger-ui/favicon-16x16.png b/client/docs/swagger-ui/favicon-16x16.png new file mode 100644 index 0000000000000000000000000000000000000000..8b194e617af1c135e6b37939591d24ac3a5efa18 GIT binary patch literal 665 zcmV;K0%rY*P)}JKSduyL>)s!A4EhTMMEM%Q;aL6%l#xiZiF>S;#Y{N2Zz%pvTGHJduXuC6Lx-)0EGfRy*N{Tv4i8@4oJ41gw zKzThrcRe|7J~(YYIBq{SYCkn-KQm=N8$CrEK1CcqMI1dv9z#VRL_{D)L|`QmF8}}l zJ9JV`Q}p!p_4f7m_U`WQ@apR4;o;!mnU<7}iG_qr zF(e)x9~BG-3IzcG2M4an0002kNkl41`ZiN1i62V%{PM@Ry|IS_+Yc7{bb`MM~xm(7p4|kMHP&!VGuDW4kFixat zXw43VmgwEvB$hXt_u=vZ>+v4i7E}n~eG6;n4Z=zF1n?T*yg<;W6kOfxpC6nao>VR% z?fpr=asSJ&`L*wu^rLJ5Peq*PB0;alL#XazZCBxJLd&giTfw@!hW167F^`7kobi;( ze<<>qNlP|xy7S1zl@lZNIBR7#o9ybJsptO#%}P0hz~sBp00000NkvXXu0mjfUsDF? literal 0 HcmV?d00001 diff --git a/client/docs/swagger-ui/favicon-32x32.png b/client/docs/swagger-ui/favicon-32x32.png new file mode 100644 index 0000000000000000000000000000000000000000..249737fe44558e679f0b67134e274461d988fa98 GIT binary patch literal 628 zcmV-)0*n2LP)Ma*GM0}OV<074bNCP7P7GVd{iMr*I6y~TMLss@FjvgL~HxU z%Vvj33AwpD(Z4*$Mfx=HaU16axM zt2xG_rloN<$iy9j9I5 + + + + + Swagger UI + + + + + + + +
+ + + + + + diff --git a/client/docs/swagger-ui/oauth2-redirect.html b/client/docs/swagger-ui/oauth2-redirect.html new file mode 100644 index 00000000..64b171f7 --- /dev/null +++ b/client/docs/swagger-ui/oauth2-redirect.html @@ -0,0 +1,75 @@ + + + + Swagger UI: OAuth2 Redirect + + + + + diff --git a/client/docs/swagger-ui/swagger-ui-bundle.js b/client/docs/swagger-ui/swagger-ui-bundle.js new file mode 100644 index 00000000..d8acdca9 --- /dev/null +++ b/client/docs/swagger-ui/swagger-ui-bundle.js @@ -0,0 +1,3 @@ +/*! For license information please see swagger-ui-bundle.js.LICENSE.txt */ +!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.SwaggerUIBundle=t():e.SwaggerUIBundle=t()}(this,(function(){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var o=t[r]={i:r,l:!1,exports:{}};return e[r].call(o.exports,o,o.exports,n),o.l=!0,o.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)n.d(r,o,function(t){return e[t]}.bind(null,o));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="/dist",n(n.s=479)}([function(e,t,n){"use strict";e.exports=n(560)},function(e,t,n){e.exports=function(){"use strict";var e=Array.prototype.slice;function t(e,t){t&&(e.prototype=Object.create(t.prototype)),e.prototype.constructor=e}function n(e){return i(e)?e:J(e)}function r(e){return u(e)?e:K(e)}function o(e){return s(e)?e:Y(e)}function a(e){return i(e)&&!c(e)?e:G(e)}function i(e){return!(!e||!e[f])}function u(e){return!(!e||!e[p])}function s(e){return!(!e||!e[h])}function c(e){return u(e)||s(e)}function l(e){return!(!e||!e[d])}t(r,n),t(o,n),t(a,n),n.isIterable=i,n.isKeyed=u,n.isIndexed=s,n.isAssociative=c,n.isOrdered=l,n.Keyed=r,n.Indexed=o,n.Set=a;var f="@@__IMMUTABLE_ITERABLE__@@",p="@@__IMMUTABLE_KEYED__@@",h="@@__IMMUTABLE_INDEXED__@@",d="@@__IMMUTABLE_ORDERED__@@",m="delete",v=5,g=1<>>0;if(""+n!==t||4294967295===n)return NaN;t=n}return t<0?A(e)+t:t}function C(){return!0}function j(e,t,n){return(0===e||void 0!==n&&e<=-n)&&(void 0===t||void 0!==n&&t>=n)}function T(e,t){return P(e,t,0)}function I(e,t){return P(e,t,t)}function P(e,t,n){return void 0===e?n:e<0?Math.max(0,t+e):void 0===t?e:Math.min(t,e)}var N=0,M=1,R=2,D="function"==typeof Symbol&&Symbol.iterator,L="@@iterator",B=D||L;function F(e){this.next=e}function z(e,t,n,r){var o=0===e?t:1===e?n:[t,n];return r?r.value=o:r={value:o,done:!1},r}function U(){return{value:void 0,done:!0}}function q(e){return!!H(e)}function V(e){return e&&"function"==typeof e.next}function W(e){var t=H(e);return t&&t.call(e)}function H(e){var t=e&&(D&&e[D]||e[L]);if("function"==typeof t)return t}function $(e){return e&&"number"==typeof e.length}function J(e){return null==e?ie():i(e)?e.toSeq():ce(e)}function K(e){return null==e?ie().toKeyedSeq():i(e)?u(e)?e.toSeq():e.fromEntrySeq():ue(e)}function Y(e){return null==e?ie():i(e)?u(e)?e.entrySeq():e.toIndexedSeq():se(e)}function G(e){return(null==e?ie():i(e)?u(e)?e.entrySeq():e:se(e)).toSetSeq()}F.prototype.toString=function(){return"[Iterator]"},F.KEYS=N,F.VALUES=M,F.ENTRIES=R,F.prototype.inspect=F.prototype.toSource=function(){return this.toString()},F.prototype[B]=function(){return this},t(J,n),J.of=function(){return J(arguments)},J.prototype.toSeq=function(){return this},J.prototype.toString=function(){return this.__toString("Seq {","}")},J.prototype.cacheResult=function(){return!this._cache&&this.__iterateUncached&&(this._cache=this.entrySeq().toArray(),this.size=this._cache.length),this},J.prototype.__iterate=function(e,t){return fe(this,e,t,!0)},J.prototype.__iterator=function(e,t){return pe(this,e,t,!0)},t(K,J),K.prototype.toKeyedSeq=function(){return this},t(Y,J),Y.of=function(){return Y(arguments)},Y.prototype.toIndexedSeq=function(){return this},Y.prototype.toString=function(){return this.__toString("Seq [","]")},Y.prototype.__iterate=function(e,t){return fe(this,e,t,!1)},Y.prototype.__iterator=function(e,t){return pe(this,e,t,!1)},t(G,J),G.of=function(){return G(arguments)},G.prototype.toSetSeq=function(){return this},J.isSeq=ae,J.Keyed=K,J.Set=G,J.Indexed=Y;var Q,Z,X,ee="@@__IMMUTABLE_SEQ__@@";function te(e){this._array=e,this.size=e.length}function ne(e){var t=Object.keys(e);this._object=e,this._keys=t,this.size=t.length}function re(e){this._iterable=e,this.size=e.length||e.size}function oe(e){this._iterator=e,this._iteratorCache=[]}function ae(e){return!(!e||!e[ee])}function ie(){return Q||(Q=new te([]))}function ue(e){var t=Array.isArray(e)?new te(e).fromEntrySeq():V(e)?new oe(e).fromEntrySeq():q(e)?new re(e).fromEntrySeq():"object"==typeof e?new ne(e):void 0;if(!t)throw new TypeError("Expected Array or iterable object of [k, v] entries, or keyed object: "+e);return t}function se(e){var t=le(e);if(!t)throw new TypeError("Expected Array or iterable object of values: "+e);return t}function ce(e){var t=le(e)||"object"==typeof e&&new ne(e);if(!t)throw new TypeError("Expected Array or iterable object of values, or keyed object: "+e);return t}function le(e){return $(e)?new te(e):V(e)?new oe(e):q(e)?new re(e):void 0}function fe(e,t,n,r){var o=e._cache;if(o){for(var a=o.length-1,i=0;i<=a;i++){var u=o[n?a-i:i];if(!1===t(u[1],r?u[0]:i,e))return i+1}return i}return e.__iterateUncached(t,n)}function pe(e,t,n,r){var o=e._cache;if(o){var a=o.length-1,i=0;return new F((function(){var e=o[n?a-i:i];return i++>a?U():z(t,r?e[0]:i-1,e[1])}))}return e.__iteratorUncached(t,n)}function he(e,t){return t?de(t,e,"",{"":e}):me(e)}function de(e,t,n,r){return Array.isArray(t)?e.call(r,n,Y(t).map((function(n,r){return de(e,n,r,t)}))):ve(t)?e.call(r,n,K(t).map((function(n,r){return de(e,n,r,t)}))):t}function me(e){return Array.isArray(e)?Y(e).map(me).toList():ve(e)?K(e).map(me).toMap():e}function ve(e){return e&&(e.constructor===Object||void 0===e.constructor)}function ge(e,t){if(e===t||e!=e&&t!=t)return!0;if(!e||!t)return!1;if("function"==typeof e.valueOf&&"function"==typeof t.valueOf){if((e=e.valueOf())===(t=t.valueOf())||e!=e&&t!=t)return!0;if(!e||!t)return!1}return!("function"!=typeof e.equals||"function"!=typeof t.equals||!e.equals(t))}function ye(e,t){if(e===t)return!0;if(!i(t)||void 0!==e.size&&void 0!==t.size&&e.size!==t.size||void 0!==e.__hash&&void 0!==t.__hash&&e.__hash!==t.__hash||u(e)!==u(t)||s(e)!==s(t)||l(e)!==l(t))return!1;if(0===e.size&&0===t.size)return!0;var n=!c(e);if(l(e)){var r=e.entries();return t.every((function(e,t){var o=r.next().value;return o&&ge(o[1],e)&&(n||ge(o[0],t))}))&&r.next().done}var o=!1;if(void 0===e.size)if(void 0===t.size)"function"==typeof e.cacheResult&&e.cacheResult();else{o=!0;var a=e;e=t,t=a}var f=!0,p=t.__iterate((function(t,r){if(n?!e.has(t):o?!ge(t,e.get(r,b)):!ge(e.get(r,b),t))return f=!1,!1}));return f&&e.size===p}function be(e,t){if(!(this instanceof be))return new be(e,t);if(this._value=e,this.size=void 0===t?1/0:Math.max(0,t),0===this.size){if(Z)return Z;Z=this}}function we(e,t){if(!e)throw new Error(t)}function xe(e,t,n){if(!(this instanceof xe))return new xe(e,t,n);if(we(0!==n,"Cannot step a Range by 0"),e=e||0,void 0===t&&(t=1/0),n=void 0===n?1:Math.abs(n),tr?U():z(e,o,n[t?r-o++:o++])}))},t(ne,K),ne.prototype.get=function(e,t){return void 0===t||this.has(e)?this._object[e]:t},ne.prototype.has=function(e){return this._object.hasOwnProperty(e)},ne.prototype.__iterate=function(e,t){for(var n=this._object,r=this._keys,o=r.length-1,a=0;a<=o;a++){var i=r[t?o-a:a];if(!1===e(n[i],i,this))return a+1}return a},ne.prototype.__iterator=function(e,t){var n=this._object,r=this._keys,o=r.length-1,a=0;return new F((function(){var i=r[t?o-a:a];return a++>o?U():z(e,i,n[i])}))},ne.prototype[d]=!0,t(re,Y),re.prototype.__iterateUncached=function(e,t){if(t)return this.cacheResult().__iterate(e,t);var n=W(this._iterable),r=0;if(V(n))for(var o;!(o=n.next()).done&&!1!==e(o.value,r++,this););return r},re.prototype.__iteratorUncached=function(e,t){if(t)return this.cacheResult().__iterator(e,t);var n=W(this._iterable);if(!V(n))return new F(U);var r=0;return new F((function(){var t=n.next();return t.done?t:z(e,r++,t.value)}))},t(oe,Y),oe.prototype.__iterateUncached=function(e,t){if(t)return this.cacheResult().__iterate(e,t);for(var n,r=this._iterator,o=this._iteratorCache,a=0;a=r.length){var t=n.next();if(t.done)return t;r[o]=t.value}return z(e,o,r[o++])}))},t(be,Y),be.prototype.toString=function(){return 0===this.size?"Repeat []":"Repeat [ "+this._value+" "+this.size+" times ]"},be.prototype.get=function(e,t){return this.has(e)?this._value:t},be.prototype.includes=function(e){return ge(this._value,e)},be.prototype.slice=function(e,t){var n=this.size;return j(e,t,n)?this:new be(this._value,I(t,n)-T(e,n))},be.prototype.reverse=function(){return this},be.prototype.indexOf=function(e){return ge(this._value,e)?0:-1},be.prototype.lastIndexOf=function(e){return ge(this._value,e)?this.size:-1},be.prototype.__iterate=function(e,t){for(var n=0;n=0&&t=0&&nn?U():z(e,a++,i)}))},xe.prototype.equals=function(e){return e instanceof xe?this._start===e._start&&this._end===e._end&&this._step===e._step:ye(this,e)},t(Ee,n),t(_e,Ee),t(Se,Ee),t(ke,Ee),Ee.Keyed=_e,Ee.Indexed=Se,Ee.Set=ke;var Ae="function"==typeof Math.imul&&-2===Math.imul(4294967295,2)?Math.imul:function(e,t){var n=65535&(e|=0),r=65535&(t|=0);return n*r+((e>>>16)*r+n*(t>>>16)<<16>>>0)|0};function Oe(e){return e>>>1&1073741824|3221225471&e}function Ce(e){if(!1===e||null==e)return 0;if("function"==typeof e.valueOf&&(!1===(e=e.valueOf())||null==e))return 0;if(!0===e)return 1;var t=typeof e;if("number"===t){if(e!=e||e===1/0)return 0;var n=0|e;for(n!==e&&(n^=4294967295*e);e>4294967295;)n^=e/=4294967295;return Oe(n)}if("string"===t)return e.length>Fe?je(e):Te(e);if("function"==typeof e.hashCode)return e.hashCode();if("object"===t)return Ie(e);if("function"==typeof e.toString)return Te(e.toString());throw new Error("Value type "+t+" cannot be hashed.")}function je(e){var t=qe[e];return void 0===t&&(t=Te(e),Ue===ze&&(Ue=0,qe={}),Ue++,qe[e]=t),t}function Te(e){for(var t=0,n=0;n0)switch(e.nodeType){case 1:return e.uniqueID;case 9:return e.documentElement&&e.documentElement.uniqueID}}var Re,De="function"==typeof WeakMap;De&&(Re=new WeakMap);var Le=0,Be="__immutablehash__";"function"==typeof Symbol&&(Be=Symbol(Be));var Fe=16,ze=255,Ue=0,qe={};function Ve(e){we(e!==1/0,"Cannot perform this action with an infinite size.")}function We(e){return null==e?ot():He(e)&&!l(e)?e:ot().withMutations((function(t){var n=r(e);Ve(n.size),n.forEach((function(e,n){return t.set(n,e)}))}))}function He(e){return!(!e||!e[Je])}t(We,_e),We.of=function(){var t=e.call(arguments,0);return ot().withMutations((function(e){for(var n=0;n=t.length)throw new Error("Missing value for key: "+t[n]);e.set(t[n],t[n+1])}}))},We.prototype.toString=function(){return this.__toString("Map {","}")},We.prototype.get=function(e,t){return this._root?this._root.get(0,void 0,e,t):t},We.prototype.set=function(e,t){return at(this,e,t)},We.prototype.setIn=function(e,t){return this.updateIn(e,b,(function(){return t}))},We.prototype.remove=function(e){return at(this,e,b)},We.prototype.deleteIn=function(e){return this.updateIn(e,(function(){return b}))},We.prototype.update=function(e,t,n){return 1===arguments.length?e(this):this.updateIn([e],t,n)},We.prototype.updateIn=function(e,t,n){n||(n=t,t=void 0);var r=vt(this,En(e),t,n);return r===b?void 0:r},We.prototype.clear=function(){return 0===this.size?this:this.__ownerID?(this.size=0,this._root=null,this.__hash=void 0,this.__altered=!0,this):ot()},We.prototype.merge=function(){return pt(this,void 0,arguments)},We.prototype.mergeWith=function(t){return pt(this,t,e.call(arguments,1))},We.prototype.mergeIn=function(t){var n=e.call(arguments,1);return this.updateIn(t,ot(),(function(e){return"function"==typeof e.merge?e.merge.apply(e,n):n[n.length-1]}))},We.prototype.mergeDeep=function(){return pt(this,ht,arguments)},We.prototype.mergeDeepWith=function(t){var n=e.call(arguments,1);return pt(this,dt(t),n)},We.prototype.mergeDeepIn=function(t){var n=e.call(arguments,1);return this.updateIn(t,ot(),(function(e){return"function"==typeof e.mergeDeep?e.mergeDeep.apply(e,n):n[n.length-1]}))},We.prototype.sort=function(e){return qt(fn(this,e))},We.prototype.sortBy=function(e,t){return qt(fn(this,t,e))},We.prototype.withMutations=function(e){var t=this.asMutable();return e(t),t.wasAltered()?t.__ensureOwner(this.__ownerID):this},We.prototype.asMutable=function(){return this.__ownerID?this:this.__ensureOwner(new S)},We.prototype.asImmutable=function(){return this.__ensureOwner()},We.prototype.wasAltered=function(){return this.__altered},We.prototype.__iterator=function(e,t){return new et(this,e,t)},We.prototype.__iterate=function(e,t){var n=this,r=0;return this._root&&this._root.iterate((function(t){return r++,e(t[1],t[0],n)}),t),r},We.prototype.__ensureOwner=function(e){return e===this.__ownerID?this:e?rt(this.size,this._root,e,this.__hash):(this.__ownerID=e,this.__altered=!1,this)},We.isMap=He;var $e,Je="@@__IMMUTABLE_MAP__@@",Ke=We.prototype;function Ye(e,t){this.ownerID=e,this.entries=t}function Ge(e,t,n){this.ownerID=e,this.bitmap=t,this.nodes=n}function Qe(e,t,n){this.ownerID=e,this.count=t,this.nodes=n}function Ze(e,t,n){this.ownerID=e,this.keyHash=t,this.entries=n}function Xe(e,t,n){this.ownerID=e,this.keyHash=t,this.entry=n}function et(e,t,n){this._type=t,this._reverse=n,this._stack=e._root&&nt(e._root)}function tt(e,t){return z(e,t[0],t[1])}function nt(e,t){return{node:e,index:0,__prev:t}}function rt(e,t,n,r){var o=Object.create(Ke);return o.size=e,o._root=t,o.__ownerID=n,o.__hash=r,o.__altered=!1,o}function ot(){return $e||($e=rt(0))}function at(e,t,n){var r,o;if(e._root){var a=E(w),i=E(x);if(r=it(e._root,e.__ownerID,0,void 0,t,n,a,i),!i.value)return e;o=e.size+(a.value?n===b?-1:1:0)}else{if(n===b)return e;o=1,r=new Ye(e.__ownerID,[[t,n]])}return e.__ownerID?(e.size=o,e._root=r,e.__hash=void 0,e.__altered=!0,e):r?rt(o,r):ot()}function it(e,t,n,r,o,a,i,u){return e?e.update(t,n,r,o,a,i,u):a===b?e:(_(u),_(i),new Xe(t,r,[o,a]))}function ut(e){return e.constructor===Xe||e.constructor===Ze}function st(e,t,n,r,o){if(e.keyHash===r)return new Ze(t,r,[e.entry,o]);var a,i=(0===n?e.keyHash:e.keyHash>>>n)&y,u=(0===n?r:r>>>n)&y;return new Ge(t,1<>>=1)i[u]=1&n?t[a++]:void 0;return i[r]=o,new Qe(e,a+1,i)}function pt(e,t,n){for(var o=[],a=0;a>1&1431655765))+(e>>2&858993459))+(e>>4)&252645135,e+=e>>8,127&(e+=e>>16)}function yt(e,t,n,r){var o=r?e:k(e);return o[t]=n,o}function bt(e,t,n,r){var o=e.length+1;if(r&&t+1===o)return e[t]=n,e;for(var a=new Array(o),i=0,u=0;u=xt)return ct(e,s,r,o);var p=e&&e===this.ownerID,h=p?s:k(s);return f?u?c===l-1?h.pop():h[c]=h.pop():h[c]=[r,o]:h.push([r,o]),p?(this.entries=h,this):new Ye(e,h)}},Ge.prototype.get=function(e,t,n,r){void 0===t&&(t=Ce(n));var o=1<<((0===e?t:t>>>e)&y),a=this.bitmap;return 0==(a&o)?r:this.nodes[gt(a&o-1)].get(e+v,t,n,r)},Ge.prototype.update=function(e,t,n,r,o,a,i){void 0===n&&(n=Ce(r));var u=(0===t?n:n>>>t)&y,s=1<=Et)return ft(e,p,c,u,d);if(l&&!d&&2===p.length&&ut(p[1^f]))return p[1^f];if(l&&d&&1===p.length&&ut(d))return d;var m=e&&e===this.ownerID,g=l?d?c:c^s:c|s,w=l?d?yt(p,f,d,m):wt(p,f,m):bt(p,f,d,m);return m?(this.bitmap=g,this.nodes=w,this):new Ge(e,g,w)},Qe.prototype.get=function(e,t,n,r){void 0===t&&(t=Ce(n));var o=(0===e?t:t>>>e)&y,a=this.nodes[o];return a?a.get(e+v,t,n,r):r},Qe.prototype.update=function(e,t,n,r,o,a,i){void 0===n&&(n=Ce(r));var u=(0===t?n:n>>>t)&y,s=o===b,c=this.nodes,l=c[u];if(s&&!l)return this;var f=it(l,e,t+v,n,r,o,a,i);if(f===l)return this;var p=this.count;if(l){if(!f&&--p<_t)return lt(e,c,p,u)}else p++;var h=e&&e===this.ownerID,d=yt(c,u,f,h);return h?(this.count=p,this.nodes=d,this):new Qe(e,p,d)},Ze.prototype.get=function(e,t,n,r){for(var o=this.entries,a=0,i=o.length;a0&&r=0&&e>>t&y;if(r>=this.array.length)return new Ct([],e);var o,a=0===r;if(t>0){var i=this.array[r];if((o=i&&i.removeBefore(e,t-v,n))===i&&a)return this}if(a&&!o)return this;var u=Lt(this,e);if(!a)for(var s=0;s>>t&y;if(o>=this.array.length)return this;if(t>0){var a=this.array[o];if((r=a&&a.removeAfter(e,t-v,n))===a&&o===this.array.length-1)return this}var i=Lt(this,e);return i.array.splice(o+1),r&&(i.array[o]=r),i};var jt,Tt,It={};function Pt(e,t){var n=e._origin,r=e._capacity,o=Ut(r),a=e._tail;return i(e._root,e._level,0);function i(e,t,n){return 0===t?u(e,n):s(e,t,n)}function u(e,i){var u=i===o?a&&a.array:e&&e.array,s=i>n?0:n-i,c=r-i;return c>g&&(c=g),function(){if(s===c)return It;var e=t?--c:s++;return u&&u[e]}}function s(e,o,a){var u,s=e&&e.array,c=a>n?0:n-a>>o,l=1+(r-a>>o);return l>g&&(l=g),function(){for(;;){if(u){var e=u();if(e!==It)return e;u=null}if(c===l)return It;var n=t?--l:c++;u=i(s&&s[n],o-v,a+(n<=e.size||t<0)return e.withMutations((function(e){t<0?Ft(e,t).set(0,n):Ft(e,0,t+1).set(t,n)}));t+=e._origin;var r=e._tail,o=e._root,a=E(x);return t>=Ut(e._capacity)?r=Dt(r,e.__ownerID,0,t,n,a):o=Dt(o,e.__ownerID,e._level,t,n,a),a.value?e.__ownerID?(e._root=o,e._tail=r,e.__hash=void 0,e.__altered=!0,e):Nt(e._origin,e._capacity,e._level,o,r):e}function Dt(e,t,n,r,o,a){var i,u=r>>>n&y,s=e&&u0){var c=e&&e.array[u],l=Dt(c,t,n-v,r,o,a);return l===c?e:((i=Lt(e,t)).array[u]=l,i)}return s&&e.array[u]===o?e:(_(a),i=Lt(e,t),void 0===o&&u===i.array.length-1?i.array.pop():i.array[u]=o,i)}function Lt(e,t){return t&&e&&t===e.ownerID?e:new Ct(e?e.array.slice():[],t)}function Bt(e,t){if(t>=Ut(e._capacity))return e._tail;if(t<1<0;)n=n.array[t>>>r&y],r-=v;return n}}function Ft(e,t,n){void 0!==t&&(t|=0),void 0!==n&&(n|=0);var r=e.__ownerID||new S,o=e._origin,a=e._capacity,i=o+t,u=void 0===n?a:n<0?a+n:o+n;if(i===o&&u===a)return e;if(i>=u)return e.clear();for(var s=e._level,c=e._root,l=0;i+l<0;)c=new Ct(c&&c.array.length?[void 0,c]:[],r),l+=1<<(s+=v);l&&(i+=l,o+=l,u+=l,a+=l);for(var f=Ut(a),p=Ut(u);p>=1<f?new Ct([],r):h;if(h&&p>f&&iv;g-=v){var b=f>>>g&y;m=m.array[b]=Lt(m.array[b],r)}m.array[f>>>v&y]=h}if(u=p)i-=p,u-=p,s=v,c=null,d=d&&d.removeBefore(r,0,i);else if(i>o||p>>s&y;if(w!==p>>>s&y)break;w&&(l+=(1<o&&(c=c.removeBefore(r,s,i-l)),c&&pa&&(a=c.size),i(s)||(c=c.map((function(e){return he(e)}))),r.push(c)}return a>e.size&&(e=e.setSize(a)),mt(e,t,r)}function Ut(e){return e>>v<=g&&i.size>=2*a.size?(r=(o=i.filter((function(e,t){return void 0!==e&&u!==t}))).toKeyedSeq().map((function(e){return e[0]})).flip().toMap(),e.__ownerID&&(r.__ownerID=o.__ownerID=e.__ownerID)):(r=a.remove(t),o=u===i.size-1?i.pop():i.set(u,void 0))}else if(s){if(n===i.get(u)[1])return e;r=a,o=i.set(u,[t,n])}else r=a.set(t,i.size),o=i.set(i.size,[t,n]);return e.__ownerID?(e.size=r.size,e._map=r,e._list=o,e.__hash=void 0,e):Wt(r,o)}function Jt(e,t){this._iter=e,this._useKeys=t,this.size=e.size}function Kt(e){this._iter=e,this.size=e.size}function Yt(e){this._iter=e,this.size=e.size}function Gt(e){this._iter=e,this.size=e.size}function Qt(e){var t=bn(e);return t._iter=e,t.size=e.size,t.flip=function(){return e},t.reverse=function(){var t=e.reverse.apply(this);return t.flip=function(){return e.reverse()},t},t.has=function(t){return e.includes(t)},t.includes=function(t){return e.has(t)},t.cacheResult=wn,t.__iterateUncached=function(t,n){var r=this;return e.__iterate((function(e,n){return!1!==t(n,e,r)}),n)},t.__iteratorUncached=function(t,n){if(t===R){var r=e.__iterator(t,n);return new F((function(){var e=r.next();if(!e.done){var t=e.value[0];e.value[0]=e.value[1],e.value[1]=t}return e}))}return e.__iterator(t===M?N:M,n)},t}function Zt(e,t,n){var r=bn(e);return r.size=e.size,r.has=function(t){return e.has(t)},r.get=function(r,o){var a=e.get(r,b);return a===b?o:t.call(n,a,r,e)},r.__iterateUncached=function(r,o){var a=this;return e.__iterate((function(e,o,i){return!1!==r(t.call(n,e,o,i),o,a)}),o)},r.__iteratorUncached=function(r,o){var a=e.__iterator(R,o);return new F((function(){var o=a.next();if(o.done)return o;var i=o.value,u=i[0];return z(r,u,t.call(n,i[1],u,e),o)}))},r}function Xt(e,t){var n=bn(e);return n._iter=e,n.size=e.size,n.reverse=function(){return e},e.flip&&(n.flip=function(){var t=Qt(e);return t.reverse=function(){return e.flip()},t}),n.get=function(n,r){return e.get(t?n:-1-n,r)},n.has=function(n){return e.has(t?n:-1-n)},n.includes=function(t){return e.includes(t)},n.cacheResult=wn,n.__iterate=function(t,n){var r=this;return e.__iterate((function(e,n){return t(e,n,r)}),!n)},n.__iterator=function(t,n){return e.__iterator(t,!n)},n}function en(e,t,n,r){var o=bn(e);return r&&(o.has=function(r){var o=e.get(r,b);return o!==b&&!!t.call(n,o,r,e)},o.get=function(r,o){var a=e.get(r,b);return a!==b&&t.call(n,a,r,e)?a:o}),o.__iterateUncached=function(o,a){var i=this,u=0;return e.__iterate((function(e,a,s){if(t.call(n,e,a,s))return u++,o(e,r?a:u-1,i)}),a),u},o.__iteratorUncached=function(o,a){var i=e.__iterator(R,a),u=0;return new F((function(){for(;;){var a=i.next();if(a.done)return a;var s=a.value,c=s[0],l=s[1];if(t.call(n,l,c,e))return z(o,r?c:u++,l,a)}}))},o}function tn(e,t,n){var r=We().asMutable();return e.__iterate((function(o,a){r.update(t.call(n,o,a,e),0,(function(e){return e+1}))})),r.asImmutable()}function nn(e,t,n){var r=u(e),o=(l(e)?qt():We()).asMutable();e.__iterate((function(a,i){o.update(t.call(n,a,i,e),(function(e){return(e=e||[]).push(r?[i,a]:a),e}))}));var a=yn(e);return o.map((function(t){return mn(e,a(t))}))}function rn(e,t,n,r){var o=e.size;if(void 0!==t&&(t|=0),void 0!==n&&(n===1/0?n=o:n|=0),j(t,n,o))return e;var a=T(t,o),i=I(n,o);if(a!=a||i!=i)return rn(e.toSeq().cacheResult(),t,n,r);var u,s=i-a;s==s&&(u=s<0?0:s);var c=bn(e);return c.size=0===u?u:e.size&&u||void 0,!r&&ae(e)&&u>=0&&(c.get=function(t,n){return(t=O(this,t))>=0&&tu)return U();var e=o.next();return r||t===M?e:z(t,s-1,t===N?void 0:e.value[1],e)}))},c}function on(e,t,n){var r=bn(e);return r.__iterateUncached=function(r,o){var a=this;if(o)return this.cacheResult().__iterate(r,o);var i=0;return e.__iterate((function(e,o,u){return t.call(n,e,o,u)&&++i&&r(e,o,a)})),i},r.__iteratorUncached=function(r,o){var a=this;if(o)return this.cacheResult().__iterator(r,o);var i=e.__iterator(R,o),u=!0;return new F((function(){if(!u)return U();var e=i.next();if(e.done)return e;var o=e.value,s=o[0],c=o[1];return t.call(n,c,s,a)?r===R?e:z(r,s,c,e):(u=!1,U())}))},r}function an(e,t,n,r){var o=bn(e);return o.__iterateUncached=function(o,a){var i=this;if(a)return this.cacheResult().__iterate(o,a);var u=!0,s=0;return e.__iterate((function(e,a,c){if(!u||!(u=t.call(n,e,a,c)))return s++,o(e,r?a:s-1,i)})),s},o.__iteratorUncached=function(o,a){var i=this;if(a)return this.cacheResult().__iterator(o,a);var u=e.__iterator(R,a),s=!0,c=0;return new F((function(){var e,a,l;do{if((e=u.next()).done)return r||o===M?e:z(o,c++,o===N?void 0:e.value[1],e);var f=e.value;a=f[0],l=f[1],s&&(s=t.call(n,l,a,i))}while(s);return o===R?e:z(o,a,l,e)}))},o}function un(e,t){var n=u(e),o=[e].concat(t).map((function(e){return i(e)?n&&(e=r(e)):e=n?ue(e):se(Array.isArray(e)?e:[e]),e})).filter((function(e){return 0!==e.size}));if(0===o.length)return e;if(1===o.length){var a=o[0];if(a===e||n&&u(a)||s(e)&&s(a))return a}var c=new te(o);return n?c=c.toKeyedSeq():s(e)||(c=c.toSetSeq()),(c=c.flatten(!0)).size=o.reduce((function(e,t){if(void 0!==e){var n=t.size;if(void 0!==n)return e+n}}),0),c}function sn(e,t,n){var r=bn(e);return r.__iterateUncached=function(r,o){var a=0,u=!1;function s(e,c){var l=this;e.__iterate((function(e,o){return(!t||c0}function dn(e,t,r){var o=bn(e);return o.size=new te(r).map((function(e){return e.size})).min(),o.__iterate=function(e,t){for(var n,r=this.__iterator(M,t),o=0;!(n=r.next()).done&&!1!==e(n.value,o++,this););return o},o.__iteratorUncached=function(e,o){var a=r.map((function(e){return e=n(e),W(o?e.reverse():e)})),i=0,u=!1;return new F((function(){var n;return u||(n=a.map((function(e){return e.next()})),u=n.some((function(e){return e.done}))),u?U():z(e,i++,t.apply(null,n.map((function(e){return e.value}))))}))},o}function mn(e,t){return ae(e)?t:e.constructor(t)}function vn(e){if(e!==Object(e))throw new TypeError("Expected [K, V] tuple: "+e)}function gn(e){return Ve(e.size),A(e)}function yn(e){return u(e)?r:s(e)?o:a}function bn(e){return Object.create((u(e)?K:s(e)?Y:G).prototype)}function wn(){return this._iter.cacheResult?(this._iter.cacheResult(),this.size=this._iter.size,this):J.prototype.cacheResult.call(this)}function xn(e,t){return e>t?1:e=0;n--)t={value:arguments[n],next:t};return this.__ownerID?(this.size=e,this._head=t,this.__hash=void 0,this.__altered=!0,this):Kn(e,t)},Vn.prototype.pushAll=function(e){if(0===(e=o(e)).size)return this;Ve(e.size);var t=this.size,n=this._head;return e.reverse().forEach((function(e){t++,n={value:e,next:n}})),this.__ownerID?(this.size=t,this._head=n,this.__hash=void 0,this.__altered=!0,this):Kn(t,n)},Vn.prototype.pop=function(){return this.slice(1)},Vn.prototype.unshift=function(){return this.push.apply(this,arguments)},Vn.prototype.unshiftAll=function(e){return this.pushAll(e)},Vn.prototype.shift=function(){return this.pop.apply(this,arguments)},Vn.prototype.clear=function(){return 0===this.size?this:this.__ownerID?(this.size=0,this._head=void 0,this.__hash=void 0,this.__altered=!0,this):Yn()},Vn.prototype.slice=function(e,t){if(j(e,t,this.size))return this;var n=T(e,this.size);if(I(t,this.size)!==this.size)return Se.prototype.slice.call(this,e,t);for(var r=this.size-n,o=this._head;n--;)o=o.next;return this.__ownerID?(this.size=r,this._head=o,this.__hash=void 0,this.__altered=!0,this):Kn(r,o)},Vn.prototype.__ensureOwner=function(e){return e===this.__ownerID?this:e?Kn(this.size,this._head,e,this.__hash):(this.__ownerID=e,this.__altered=!1,this)},Vn.prototype.__iterate=function(e,t){if(t)return this.reverse().__iterate(e);for(var n=0,r=this._head;r&&!1!==e(r.value,n++,this);)r=r.next;return n},Vn.prototype.__iterator=function(e,t){if(t)return this.reverse().__iterator(e);var n=0,r=this._head;return new F((function(){if(r){var t=r.value;return r=r.next,z(e,n++,t)}return U()}))},Vn.isStack=Wn;var Hn,$n="@@__IMMUTABLE_STACK__@@",Jn=Vn.prototype;function Kn(e,t,n,r){var o=Object.create(Jn);return o.size=e,o._head=t,o.__ownerID=n,o.__hash=r,o.__altered=!1,o}function Yn(){return Hn||(Hn=Kn(0))}function Gn(e,t){var n=function(n){e.prototype[n]=t[n]};return Object.keys(t).forEach(n),Object.getOwnPropertySymbols&&Object.getOwnPropertySymbols(t).forEach(n),e}Jn[$n]=!0,Jn.withMutations=Ke.withMutations,Jn.asMutable=Ke.asMutable,Jn.asImmutable=Ke.asImmutable,Jn.wasAltered=Ke.wasAltered,n.Iterator=F,Gn(n,{toArray:function(){Ve(this.size);var e=new Array(this.size||0);return this.valueSeq().__iterate((function(t,n){e[n]=t})),e},toIndexedSeq:function(){return new Kt(this)},toJS:function(){return this.toSeq().map((function(e){return e&&"function"==typeof e.toJS?e.toJS():e})).__toJS()},toJSON:function(){return this.toSeq().map((function(e){return e&&"function"==typeof e.toJSON?e.toJSON():e})).__toJS()},toKeyedSeq:function(){return new Jt(this,!0)},toMap:function(){return We(this.toKeyedSeq())},toObject:function(){Ve(this.size);var e={};return this.__iterate((function(t,n){e[n]=t})),e},toOrderedMap:function(){return qt(this.toKeyedSeq())},toOrderedSet:function(){return Ln(u(this)?this.valueSeq():this)},toSet:function(){return jn(u(this)?this.valueSeq():this)},toSetSeq:function(){return new Yt(this)},toSeq:function(){return s(this)?this.toIndexedSeq():u(this)?this.toKeyedSeq():this.toSetSeq()},toStack:function(){return Vn(u(this)?this.valueSeq():this)},toList:function(){return St(u(this)?this.valueSeq():this)},toString:function(){return"[Iterable]"},__toString:function(e,t){return 0===this.size?e+t:e+" "+this.toSeq().map(this.__toStringMapper).join(", ")+" "+t},concat:function(){return mn(this,un(this,e.call(arguments,0)))},includes:function(e){return this.some((function(t){return ge(t,e)}))},entries:function(){return this.__iterator(R)},every:function(e,t){Ve(this.size);var n=!0;return this.__iterate((function(r,o,a){if(!e.call(t,r,o,a))return n=!1,!1})),n},filter:function(e,t){return mn(this,en(this,e,t,!0))},find:function(e,t,n){var r=this.findEntry(e,t);return r?r[1]:n},forEach:function(e,t){return Ve(this.size),this.__iterate(t?e.bind(t):e)},join:function(e){Ve(this.size),e=void 0!==e?""+e:",";var t="",n=!0;return this.__iterate((function(r){n?n=!1:t+=e,t+=null!=r?r.toString():""})),t},keys:function(){return this.__iterator(N)},map:function(e,t){return mn(this,Zt(this,e,t))},reduce:function(e,t,n){var r,o;return Ve(this.size),arguments.length<2?o=!0:r=t,this.__iterate((function(t,a,i){o?(o=!1,r=t):r=e.call(n,r,t,a,i)})),r},reduceRight:function(e,t,n){var r=this.toKeyedSeq().reverse();return r.reduce.apply(r,arguments)},reverse:function(){return mn(this,Xt(this,!0))},slice:function(e,t){return mn(this,rn(this,e,t,!0))},some:function(e,t){return!this.every(tr(e),t)},sort:function(e){return mn(this,fn(this,e))},values:function(){return this.__iterator(M)},butLast:function(){return this.slice(0,-1)},isEmpty:function(){return void 0!==this.size?0===this.size:!this.some((function(){return!0}))},count:function(e,t){return A(e?this.toSeq().filter(e,t):this)},countBy:function(e,t){return tn(this,e,t)},equals:function(e){return ye(this,e)},entrySeq:function(){var e=this;if(e._cache)return new te(e._cache);var t=e.toSeq().map(er).toIndexedSeq();return t.fromEntrySeq=function(){return e.toSeq()},t},filterNot:function(e,t){return this.filter(tr(e),t)},findEntry:function(e,t,n){var r=n;return this.__iterate((function(n,o,a){if(e.call(t,n,o,a))return r=[o,n],!1})),r},findKey:function(e,t){var n=this.findEntry(e,t);return n&&n[0]},findLast:function(e,t,n){return this.toKeyedSeq().reverse().find(e,t,n)},findLastEntry:function(e,t,n){return this.toKeyedSeq().reverse().findEntry(e,t,n)},findLastKey:function(e,t){return this.toKeyedSeq().reverse().findKey(e,t)},first:function(){return this.find(C)},flatMap:function(e,t){return mn(this,cn(this,e,t))},flatten:function(e){return mn(this,sn(this,e,!0))},fromEntrySeq:function(){return new Gt(this)},get:function(e,t){return this.find((function(t,n){return ge(n,e)}),void 0,t)},getIn:function(e,t){for(var n,r=this,o=En(e);!(n=o.next()).done;){var a=n.value;if((r=r&&r.get?r.get(a,b):b)===b)return t}return r},groupBy:function(e,t){return nn(this,e,t)},has:function(e){return this.get(e,b)!==b},hasIn:function(e){return this.getIn(e,b)!==b},isSubset:function(e){return e="function"==typeof e.includes?e:n(e),this.every((function(t){return e.includes(t)}))},isSuperset:function(e){return(e="function"==typeof e.isSubset?e:n(e)).isSubset(this)},keyOf:function(e){return this.findKey((function(t){return ge(t,e)}))},keySeq:function(){return this.toSeq().map(Xn).toIndexedSeq()},last:function(){return this.toSeq().reverse().first()},lastKeyOf:function(e){return this.toKeyedSeq().reverse().keyOf(e)},max:function(e){return pn(this,e)},maxBy:function(e,t){return pn(this,t,e)},min:function(e){return pn(this,e?nr(e):ar)},minBy:function(e,t){return pn(this,t?nr(t):ar,e)},rest:function(){return this.slice(1)},skip:function(e){return this.slice(Math.max(0,e))},skipLast:function(e){return mn(this,this.toSeq().reverse().skip(e).reverse())},skipWhile:function(e,t){return mn(this,an(this,e,t,!0))},skipUntil:function(e,t){return this.skipWhile(tr(e),t)},sortBy:function(e,t){return mn(this,fn(this,t,e))},take:function(e){return this.slice(0,Math.max(0,e))},takeLast:function(e){return mn(this,this.toSeq().reverse().take(e).reverse())},takeWhile:function(e,t){return mn(this,on(this,e,t))},takeUntil:function(e,t){return this.takeWhile(tr(e),t)},valueSeq:function(){return this.toIndexedSeq()},hashCode:function(){return this.__hash||(this.__hash=ir(this))}});var Qn=n.prototype;Qn[f]=!0,Qn[B]=Qn.values,Qn.__toJS=Qn.toArray,Qn.__toStringMapper=rr,Qn.inspect=Qn.toSource=function(){return this.toString()},Qn.chain=Qn.flatMap,Qn.contains=Qn.includes,Gn(r,{flip:function(){return mn(this,Qt(this))},mapEntries:function(e,t){var n=this,r=0;return mn(this,this.toSeq().map((function(o,a){return e.call(t,[a,o],r++,n)})).fromEntrySeq())},mapKeys:function(e,t){var n=this;return mn(this,this.toSeq().flip().map((function(r,o){return e.call(t,r,o,n)})).flip())}});var Zn=r.prototype;function Xn(e,t){return t}function er(e,t){return[t,e]}function tr(e){return function(){return!e.apply(this,arguments)}}function nr(e){return function(){return-e.apply(this,arguments)}}function rr(e){return"string"==typeof e?JSON.stringify(e):String(e)}function or(){return k(arguments)}function ar(e,t){return et?-1:0}function ir(e){if(e.size===1/0)return 0;var t=l(e),n=u(e),r=t?1:0;return ur(e.__iterate(n?t?function(e,t){r=31*r+sr(Ce(e),Ce(t))|0}:function(e,t){r=r+sr(Ce(e),Ce(t))|0}:t?function(e){r=31*r+Ce(e)|0}:function(e){r=r+Ce(e)|0}),r)}function ur(e,t){return t=Ae(t,3432918353),t=Ae(t<<15|t>>>-15,461845907),t=Ae(t<<13|t>>>-13,5),t=Ae((t=(t+3864292196|0)^e)^t>>>16,2246822507),t=Oe((t=Ae(t^t>>>13,3266489909))^t>>>16)}function sr(e,t){return e^t+2654435769+(e<<6)+(e>>2)|0}return Zn[p]=!0,Zn[B]=Qn.entries,Zn.__toJS=Qn.toObject,Zn.__toStringMapper=function(e,t){return JSON.stringify(t)+": "+rr(e)},Gn(o,{toKeyedSeq:function(){return new Jt(this,!1)},filter:function(e,t){return mn(this,en(this,e,t,!1))},findIndex:function(e,t){var n=this.findEntry(e,t);return n?n[0]:-1},indexOf:function(e){var t=this.keyOf(e);return void 0===t?-1:t},lastIndexOf:function(e){var t=this.lastKeyOf(e);return void 0===t?-1:t},reverse:function(){return mn(this,Xt(this,!1))},slice:function(e,t){return mn(this,rn(this,e,t,!1))},splice:function(e,t){var n=arguments.length;if(t=Math.max(0|t,0),0===n||2===n&&!t)return this;e=T(e,e<0?this.count():this.size);var r=this.slice(0,e);return mn(this,1===n?r:r.concat(k(arguments,2),this.slice(e+t)))},findLastIndex:function(e,t){var n=this.findLastEntry(e,t);return n?n[0]:-1},first:function(){return this.get(0)},flatten:function(e){return mn(this,sn(this,e,!1))},get:function(e,t){return(e=O(this,e))<0||this.size===1/0||void 0!==this.size&&e>this.size?t:this.find((function(t,n){return n===e}),void 0,t)},has:function(e){return(e=O(this,e))>=0&&(void 0!==this.size?this.size===1/0||e1)try{return decodeURIComponent(t[1])}catch(e){console.error(e)}return null}function Pe(e){return t=e.replace(/\.[^./]*$/,""),Y()(J()(t));var t}function Ne(e,t,n,r,a){if(!t)return[];var u=[],s=t.get("nullable"),c=t.get("required"),f=t.get("maximum"),h=t.get("minimum"),d=t.get("type"),m=t.get("format"),g=t.get("maxLength"),b=t.get("minLength"),x=t.get("uniqueItems"),E=t.get("maxItems"),_=t.get("minItems"),S=t.get("pattern"),k=n||!0===c,A=null!=e;if(s&&null===e||!d||!(k||A&&"array"===d||!(!k&&!A)))return[];var O="string"===d&&e,C="array"===d&&l()(e)&&e.length,j="array"===d&&W.a.List.isList(e)&&e.count(),T=[O,C,j,"array"===d&&"string"==typeof e&&e,"file"===d&&e instanceof ue.a.File,"boolean"===d&&(e||!1===e),"number"===d&&(e||0===e),"integer"===d&&(e||0===e),"object"===d&&"object"===i()(e)&&null!==e,"object"===d&&"string"==typeof e&&e],I=P()(T).call(T,(function(e){return!!e}));if(k&&!I&&!r)return u.push("Required field is not provided"),u;if("object"===d&&(null===a||"application/json"===a)){var N,M=e;if("string"==typeof e)try{M=JSON.parse(e)}catch(e){return u.push("Parameter string value must be valid JSON"),u}if(t&&t.has("required")&&_e(c.isList)&&c.isList()&&y()(c).call(c,(function(e){void 0===M[e]&&u.push({propKey:e,error:"Required property not found"})})),t&&t.has("properties"))y()(N=t.get("properties")).call(N,(function(e,t){var n=Ne(M[t],e,!1,r,a);u.push.apply(u,o()(p()(n).call(n,(function(e){return{propKey:t,error:e}}))))}))}if(S){var R=function(e,t){if(!new RegExp(t).test(e))return"Value must follow pattern "+t}(e,S);R&&u.push(R)}if(_&&"array"===d){var D=function(e,t){var n;if(!e&&t>=1||e&&e.lengtht)return v()(n="Array must not contain more then ".concat(t," item")).call(n,1===t?"":"s")}(e,E);L&&u.push({needRemove:!0,error:L})}if(x&&"array"===d){var B=function(e,t){if(e&&("true"===t||!0===t)){var n=Object(V.fromJS)(e),r=n.toSet();if(e.length>r.size){var o=Object(V.Set)();if(y()(n).call(n,(function(e,t){w()(n).call(n,(function(t){return _e(t.equals)?t.equals(e):t===e})).size>1&&(o=o.add(t))})),0!==o.size)return p()(o).call(o,(function(e){return{index:e,error:"No duplicates allowed."}})).toArray()}}}(e,x);B&&u.push.apply(u,o()(B))}if(g||0===g){var F=function(e,t){var n;if(e.length>t)return v()(n="Value must be no longer than ".concat(t," character")).call(n,1!==t?"s":"")}(e,g);F&&u.push(F)}if(b){var z=function(e,t){var n;if(e.lengtht)return"Value must be less than ".concat(t)}(e,f);U&&u.push(U)}if(h||0===h){var q=function(e,t){if(e2&&void 0!==arguments[2]?arguments[2]:{},r=n.isOAS3,o=void 0!==r&&r,a=n.bypassRequiredCheck,i=void 0!==a&&a,u=e.get("required"),s=Object(le.a)(e,{isOAS3:o}),c=s.schema,l=s.parameterContentMediaType;return Ne(t,c,u,i,l)},Re=function(e,t,n){if(e&&(!e.xml||!e.xml.name)){if(e.xml=e.xml||{},!e.$$ref)return e.type||e.items||e.properties||e.additionalProperties?'\n\x3c!-- XML example cannot be generated; root element name is undefined --\x3e':null;var r=e.$$ref.match(/\S*\/(\S+)$/);e.xml.name=r[1]}return Object(ie.memoizedCreateXMLExample)(e,t,n)},De=[{when:/json/,shouldStringifyTypes:["string"]}],Le=["object"],Be=function(e,t,n,r){var a=Object(ie.memoizedSampleFromSchema)(e,t,r),u=i()(a),s=S()(De).call(De,(function(e,t){var r;return t.when.test(n)?v()(r=[]).call(r,o()(e),o()(t.shouldStringifyTypes)):e}),Le);return te()(s,(function(e){return e===u}))?M()(a,null,2):a},Fe=function(e,t,n,r){var o,a=Be(e,t,n,r);try{"\n"===(o=me.a.dump(me.a.load(a),{lineWidth:-1}))[o.length-1]&&(o=T()(o).call(o,0,o.length-1))}catch(e){return console.error(e),"error: could not generate yaml example"}return o.replace(/\t/g," ")},ze=function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"",n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:void 0;return e&&_e(e.toJS)&&(e=e.toJS()),r&&_e(r.toJS)&&(r=r.toJS()),/xml/.test(t)?Re(e,n,r):/(yaml|yml)/.test(t)?Fe(e,n,t,r):Be(e,n,t,r)},Ue=function(){var e={},t=ue.a.location.search;if(!t)return{};if(""!=t){var n=t.substr(1).split("&");for(var r in n)Object.prototype.hasOwnProperty.call(n,r)&&(r=n[r].split("="),e[decodeURIComponent(r[0])]=r[1]&&decodeURIComponent(r[1])||"")}return e},qe=function(t){return(t instanceof e?t:e.from(t.toString(),"utf-8")).toString("base64")},Ve={operationsSorter:{alpha:function(e,t){return e.get("path").localeCompare(t.get("path"))},method:function(e,t){return e.get("method").localeCompare(t.get("method"))}},tagsSorter:{alpha:function(e,t){return e.localeCompare(t)}}},We=function(e){var t=[];for(var n in e){var r=e[n];void 0!==r&&""!==r&&t.push([n,"=",encodeURIComponent(r).replace(/%20/g,"+")].join(""))}return t.join("&")},He=function(e,t,n){return!!X()(n,(function(n){return re()(e[n],t[n])}))};function $e(e){return"string"!=typeof e||""===e?"":Object(H.sanitizeUrl)(e)}function Je(e){return!(!e||D()(e).call(e,"localhost")>=0||D()(e).call(e,"127.0.0.1")>=0||"none"===e)}function Ke(e){if(!W.a.OrderedMap.isOrderedMap(e))return null;if(!e.size)return null;var t=B()(e).call(e,(function(e,t){return z()(t).call(t,"2")&&E()(e.get("content")||{}).length>0})),n=e.get("default")||W.a.OrderedMap(),r=(n.get("content")||W.a.OrderedMap()).keySeq().toJS().length?n:null;return t||r}var Ye=function(e){return"string"==typeof e||e instanceof String?q()(e).call(e).replace(/\s/g,"%20"):""},Ge=function(e){return ce()(Ye(e).replace(/%20/g,"_"))},Qe=function(e){return w()(e).call(e,(function(e,t){return/^x-/.test(t)}))},Ze=function(e){return w()(e).call(e,(function(e,t){return/^pattern|maxLength|minLength|maximum|minimum/.test(t)}))};function Xe(e,t){var n,r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:function(){return!0};if("object"!==i()(e)||l()(e)||null===e||!t)return e;var o=A()({},e);return y()(n=E()(o)).call(n,(function(e){e===t&&r(o[e],e)?delete o[e]:o[e]=Xe(o[e],t,r)})),o}function et(e){if("string"==typeof e)return e;if(e&&e.toJS&&(e=e.toJS()),"object"===i()(e)&&null!==e)try{return M()(e,null,2)}catch(t){return String(e)}return null==e?"":e.toString()}function tt(e){return"number"==typeof e?e.toString():e}function nt(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=t.returnAll,r=void 0!==n&&n,o=t.allowHashes,a=void 0===o||o;if(!W.a.Map.isMap(e))throw new Error("paramToIdentifier: received a non-Im.Map parameter as input");var i,u,s,c=e.get("name"),l=e.get("in"),f=[];e&&e.hashCode&&l&&c&&a&&f.push(v()(i=v()(u="".concat(l,".")).call(u,c,".hash-")).call(i,e.hashCode()));l&&c&&f.push(v()(s="".concat(l,".")).call(s,c));return f.push(c),r?f:f[0]||""}function rt(e,t){var n,r=nt(e,{returnAll:!0});return w()(n=p()(r).call(r,(function(e){return t[e]}))).call(n,(function(e){return void 0!==e}))[0]}function ot(){return it(pe()(32).toString("base64"))}function at(e){return it(de()("sha256").update(e).digest("base64"))}function it(e){return e.replace(/\+/g,"-").replace(/\//g,"_").replace(/=/g,"")}var ut=function(e){return!e||!(!ge(e)||!e.isEmpty())}}).call(this,n(129).Buffer)},function(e,t,n){var r=n(416),o=n(185),a=n(250);e.exports=function(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Super expression must either be null or a function");e.prototype=r(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),o(e,"prototype",{writable:!1}),t&&a(e,t)},e.exports.__esModule=!0,e.exports.default=e.exports},function(e,t,n){var r=n(251),o=n(137),a=n(417),i=n(769);e.exports=function(e){var t=a();return function(){var n,a=o(e);if(t){var u=o(this).constructor;n=r(a,arguments,u)}else n=a.apply(this,arguments);return i(this,n)}},e.exports.__esModule=!0,e.exports.default=e.exports},function(e,t){e.exports=function(e){if(void 0===e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return e},e.exports.__esModule=!0,e.exports.default=e.exports},function(e,t,n){e.exports=n(889)()},function(e,t,n){e.exports=n(349)},function(e,t,n){var r=n(386),o=n(627),a=n(197),i=n(387);e.exports=function(e,t){return r(e)||o(e,t)||a(e,t)||i()},e.exports.__esModule=!0,e.exports.default=e.exports},function(e,t,n){var r=n(611),o=n(379),a=n(197),i=n(626);e.exports=function(e){return r(e)||o(e)||a(e)||i()},e.exports.__esModule=!0,e.exports.default=e.exports},function(e,t,n){e.exports=n(350)},function(e,t,n){"use strict";var r=n(17),o=n(96),a=n(28),i=n(42),u=n(97).f,s=n(335),c=n(35),l=n(55),f=n(90),p=n(47),h=function(e){var t=function(n,r,a){if(this instanceof t){switch(arguments.length){case 0:return new e;case 1:return new e(n);case 2:return new e(n,r)}return new e(n,r,a)}return o(e,this,arguments)};return t.prototype=e.prototype,t};e.exports=function(e,t){var n,o,d,m,v,g,y,b,w=e.target,x=e.global,E=e.stat,_=e.proto,S=x?r:E?r[w]:(r[w]||{}).prototype,k=x?c:c[w]||f(c,w,{})[w],A=k.prototype;for(d in t)n=!s(x?d:w+(E?".":"#")+d,e.forced)&&S&&p(S,d),v=k[d],n&&(g=e.noTargetGet?(b=u(S,d))&&b.value:S[d]),m=n&&g?g:t[d],n&&typeof v==typeof m||(y=e.bind&&n?l(m,r):e.wrap&&n?h(m):_&&i(m)?a(m):m,(e.sham||m&&m.sham||v&&v.sham)&&f(y,"sham",!0),f(k,d,y),_&&(p(c,o=w+"Prototype")||f(c,o,{}),f(c[o],d,m),e.real&&A&&!A[d]&&f(A,d,m)))}},function(e,t,n){(function(t){var n=function(e){return e&&e.Math==Math&&e};e.exports=n("object"==typeof globalThis&&globalThis)||n("object"==typeof window&&window)||n("object"==typeof self&&self)||n("object"==typeof t&&t)||function(){return this}()||Function("return this")()}).call(this,n(61))},function(e,t,n){"use strict";n.d(t,"a",(function(){return s}));var r="NOT_FOUND";var o=function(e,t){return e===t};function a(e,t){var n,a,i="object"==typeof t?t:{equalityCheck:t},u=i.equalityCheck,s=void 0===u?o:u,c=i.maxSize,l=void 0===c?1:c,f=i.resultEqualityCheck,p=function(e){return function(t,n){if(null===t||null===n||t.length!==n.length)return!1;for(var r=t.length,o=0;o-1){var a=n[o];return o>0&&(n.splice(o,1),n.unshift(a)),a.value}return r}return{get:o,put:function(t,a){o(t)===r&&(n.unshift({key:t,value:a}),n.length>e&&n.pop())},getEntries:function(){return n},clear:function(){n=[]}}}(l,p);function d(){var t=h.get(arguments);if(t===r){if(t=e.apply(null,arguments),f){var n=h.getEntries(),o=n.find((function(e){return f(e.value,t)}));o&&(t=o.value)}h.put(arguments,t)}return t}return d.clearCache=function(){return h.clear()},d}function i(e){var t=Array.isArray(e[0])?e[0]:e;if(!t.every((function(e){return"function"==typeof e}))){var n=t.map((function(e){return"function"==typeof e?"function "+(e.name||"unnamed")+"()":typeof e})).join(", ");throw new Error("createSelector expects all input-selectors to be functions, but received the following types: ["+n+"]")}return t}function u(e){for(var t=arguments.length,n=new Array(t>1?t-1:0),r=1;r>",i=function(){invariant(!1,"ImmutablePropTypes type checking code is stripped in production.")};i.isRequired=i;var u=function(){return i};function s(e){var t=typeof e;return Array.isArray(e)?"array":e instanceof RegExp?"object":e instanceof o.Iterable?"Immutable."+e.toSource().split(" ")[0]:t}function c(e){function t(t,n,r,o,i,u){for(var s=arguments.length,c=Array(s>6?s-6:0),l=6;l4)}function l(e){var t=e.get("swagger");return"string"==typeof t&&i()(t).call(t,"2.0")}function f(e){return function(t,n){return function(r){return n&&n.specSelectors&&n.specSelectors.specJson?c(n.specSelectors.specJson())?s.a.createElement(e,o()({},r,n,{Ori:t})):s.a.createElement(t,r):(console.warn("OAS3 wrapper: couldn't get spec"),null)}}}},function(e,t,n){var r=n(17),o=n(221),a=n(47),i=n(179),u=n(219),s=n(333),c=o("wks"),l=r.Symbol,f=l&&l.for,p=s?l:l&&l.withoutSetter||i;e.exports=function(e){if(!a(c,e)||!u&&"string"!=typeof c[e]){var t="Symbol."+e;u&&a(l,e)?c[e]=l[e]:c[e]=s&&f?f(t):p(t)}return c[e]}},function(e,t,n){var r=n(35);e.exports=function(e){return r[e+"Prototype"]}},function(e,t){e.exports=function(e){return"function"==typeof e}},function(e,t,n){var r=n(245);e.exports=function(e,t,n){var o=null==e?void 0:r(e,t);return void 0===o?n:o}},function(e,t,n){e.exports=n(872)},function(e,t,n){var r=n(42);e.exports=function(e){return"object"==typeof e?null!==e:r(e)}},function(e,t,n){var r=n(17),o=n(42),a=n(178),i=r.TypeError;e.exports=function(e){if(o(e))return e;throw i(a(e)+" is not a function")}},function(e,t,n){var r=n(28),o=n(62),a=r({}.hasOwnProperty);e.exports=Object.hasOwn||function(e,t){return a(o(e),t)}},function(e,t,n){var r=n(35),o=n(47),a=n(231),i=n(63).f;e.exports=function(e){var t=r.Symbol||(r.Symbol={});o(t,e)||i(t,e,{value:a.f(e)})}},function(e,t,n){var r=n(17),o=n(55),a=n(37),i=n(33),u=n(178),s=n(384),c=n(67),l=n(36),f=n(159),p=n(158),h=n(383),d=r.TypeError,m=function(e,t){this.stopped=e,this.result=t},v=m.prototype;e.exports=function(e,t,n){var r,g,y,b,w,x,E,_=n&&n.that,S=!(!n||!n.AS_ENTRIES),k=!(!n||!n.IS_ITERATOR),A=!(!n||!n.INTERRUPTED),O=o(t,_),C=function(e){return r&&h(r,"normal",e),new m(!0,e)},j=function(e){return S?(i(e),A?O(e[0],e[1],C):O(e[0],e[1])):A?O(e,C):O(e)};if(k)r=e;else{if(!(g=p(e)))throw d(u(e)+" is not iterable");if(s(g)){for(y=0,b=c(e);b>y;y++)if((w=j(e[y]))&&l(v,w))return w;return new m(!1)}r=f(e,g)}for(x=r.next;!(E=a(x,r)).done;){try{w=j(E.value)}catch(e){h(r,"throw",e)}if("object"==typeof w&&w&&l(v,w))return w}return new m(!1)}},function(e,t,n){e.exports=n(353)},function(e,t,n){var r=n(34);e.exports=!r((function(){return 7!=Object.defineProperty({},1,{get:function(){return 7}})[1]}))},function(e,t,n){e.exports=n(656)},function(e,t,n){"use strict";n.r(t),n.d(t,"UPDATE_SPEC",(function(){return ee})),n.d(t,"UPDATE_URL",(function(){return te})),n.d(t,"UPDATE_JSON",(function(){return ne})),n.d(t,"UPDATE_PARAM",(function(){return re})),n.d(t,"UPDATE_EMPTY_PARAM_INCLUSION",(function(){return oe})),n.d(t,"VALIDATE_PARAMS",(function(){return ae})),n.d(t,"SET_RESPONSE",(function(){return ie})),n.d(t,"SET_REQUEST",(function(){return ue})),n.d(t,"SET_MUTATED_REQUEST",(function(){return se})),n.d(t,"LOG_REQUEST",(function(){return ce})),n.d(t,"CLEAR_RESPONSE",(function(){return le})),n.d(t,"CLEAR_REQUEST",(function(){return fe})),n.d(t,"CLEAR_VALIDATE_PARAMS",(function(){return pe})),n.d(t,"UPDATE_OPERATION_META_VALUE",(function(){return he})),n.d(t,"UPDATE_RESOLVED",(function(){return de})),n.d(t,"UPDATE_RESOLVED_SUBTREE",(function(){return me})),n.d(t,"SET_SCHEME",(function(){return ve})),n.d(t,"updateSpec",(function(){return ge})),n.d(t,"updateResolved",(function(){return ye})),n.d(t,"updateUrl",(function(){return be})),n.d(t,"updateJsonSpec",(function(){return we})),n.d(t,"parseToJson",(function(){return xe})),n.d(t,"resolveSpec",(function(){return _e})),n.d(t,"requestResolvedSubtree",(function(){return Ae})),n.d(t,"changeParam",(function(){return Oe})),n.d(t,"changeParamByIdentity",(function(){return Ce})),n.d(t,"updateResolvedSubtree",(function(){return je})),n.d(t,"invalidateResolvedSubtreeCache",(function(){return Te})),n.d(t,"validateParams",(function(){return Ie})),n.d(t,"updateEmptyParamInclusion",(function(){return Pe})),n.d(t,"clearValidateParams",(function(){return Ne})),n.d(t,"changeConsumesValue",(function(){return Me})),n.d(t,"changeProducesValue",(function(){return Re})),n.d(t,"setResponse",(function(){return De})),n.d(t,"setRequest",(function(){return Le})),n.d(t,"setMutatedRequest",(function(){return Be})),n.d(t,"logRequest",(function(){return Fe})),n.d(t,"executeRequest",(function(){return ze})),n.d(t,"execute",(function(){return Ue})),n.d(t,"clearResponse",(function(){return qe})),n.d(t,"clearRequest",(function(){return Ve})),n.d(t,"setScheme",(function(){return We}));var r=n(25),o=n.n(r),a=n(59),i=n.n(a),u=n(79),s=n.n(u),c=n(19),l=n.n(c),f=n(44),p=n.n(f),h=n(22),d=n.n(h),m=n(4),v=n.n(m),g=n(323),y=n.n(g),b=n(31),w=n.n(b),x=n(140),E=n.n(x),_=n(72),S=n.n(_),k=n(12),A=n.n(k),O=n(207),C=n.n(O),j=n(20),T=n.n(j),I=n(23),P=n.n(I),N=n(2),M=n.n(N),R=n(15),D=n.n(R),L=n(24),B=n.n(L),F=n(324),z=n.n(F),U=n(76),q=n(1),V=n(92),W=n.n(V),H=n(138),$=n(455),J=n.n($),K=n(456),Y=n.n(K),G=n(325),Q=n.n(G),Z=n(7),X=["path","method"],ee="spec_update_spec",te="spec_update_url",ne="spec_update_json",re="spec_update_param",oe="spec_update_empty_param_inclusion",ae="spec_validate_param",ie="spec_set_response",ue="spec_set_request",se="spec_set_mutated_request",ce="spec_log_request",le="spec_clear_response",fe="spec_clear_request",pe="spec_clear_validate_param",he="spec_update_operation_meta_value",de="spec_update_resolved",me="spec_update_resolved_subtree",ve="set_scheme";function ge(e){var t,n=(t=e,J()(t)?t:"").replace(/\t/g," ");if("string"==typeof e)return{type:ee,payload:n}}function ye(e){return{type:de,payload:e}}function be(e){return{type:te,payload:e}}function we(e){return{type:ne,payload:e}}var xe=function(e){return function(t){var n=t.specActions,r=t.specSelectors,o=t.errActions,a=r.specStr,i=null;try{e=e||a(),o.clear({source:"parser"}),i=U.a.load(e)}catch(e){return console.error(e),o.newSpecErr({source:"parser",level:"error",message:e.reason,line:e.mark&&e.mark.line?e.mark.line+1:void 0})}return i&&"object"===l()(i)?n.updateJsonSpec(i):{}}},Ee=!1,_e=function(e,t){return function(n){var r=n.specActions,o=n.specSelectors,a=n.errActions,i=n.fn,u=i.fetch,s=i.resolve,c=i.AST,l=void 0===c?{}:c,f=n.getConfigs;Ee||(console.warn("specActions.resolveSpec is deprecated since v3.10.0 and will be removed in v4.0.0; use requestResolvedSubtree instead!"),Ee=!0);var p=f(),h=p.modelPropertyMacro,m=p.parameterMacro,g=p.requestInterceptor,b=p.responseInterceptor;void 0===e&&(e=o.specJson()),void 0===t&&(t=o.url());var w=l.getLineNumberForPath?l.getLineNumberForPath:function(){},x=o.specStr();return s({fetch:u,spec:e,baseDoc:t,modelPropertyMacro:h,parameterMacro:m,requestInterceptor:g,responseInterceptor:b}).then((function(e){var t=e.spec,n=e.errors;if(a.clear({type:"thrown"}),d()(n)&&n.length>0){var o=v()(n).call(n,(function(e){return console.error(e),e.line=e.fullPath?w(x,e.fullPath):null,e.path=e.fullPath?e.fullPath.join("."):null,e.level="error",e.type="thrown",e.source="resolver",y()(e,"message",{enumerable:!0,value:e.message}),e}));a.newThrownErrBatch(o)}return r.updateResolved(t)}))}},Se=[],ke=Y()(s()(p.a.mark((function e(){var t,n,r,o,a,i,u,c,l,f,h,m,g,b,x,_,k,O;return p.a.wrap((function(e){for(;;)switch(e.prev=e.next){case 0:if(t=Se.system){e.next=4;break}return console.error("debResolveSubtrees: don't have a system to operate on, aborting."),e.abrupt("return");case 4:if(n=t.errActions,r=t.errSelectors,o=t.fn,a=o.resolveSubtree,i=o.fetch,u=o.AST,c=void 0===u?{}:u,l=t.specSelectors,f=t.specActions,a){e.next=8;break}return console.error("Error: Swagger-Client did not provide a `resolveSubtree` method, doing nothing."),e.abrupt("return");case 8:return h=c.getLineNumberForPath?c.getLineNumberForPath:function(){},m=l.specStr(),g=t.getConfigs(),b=g.modelPropertyMacro,x=g.parameterMacro,_=g.requestInterceptor,k=g.responseInterceptor,e.prev=11,e.next=14,w()(Se).call(Se,function(){var e=s()(p.a.mark((function e(t,o){var u,c,f,g,w,O,j,T,I;return p.a.wrap((function(e){for(;;)switch(e.prev=e.next){case 0:return e.next=2,t;case 2:return u=e.sent,c=u.resultMap,f=u.specWithCurrentSubtrees,e.next=7,a(f,o,{baseDoc:l.url(),modelPropertyMacro:b,parameterMacro:x,requestInterceptor:_,responseInterceptor:k});case 7:if(g=e.sent,w=g.errors,O=g.spec,r.allErrors().size&&n.clearBy((function(e){var t;return"thrown"!==e.get("type")||"resolver"!==e.get("source")||!E()(t=e.get("fullPath")).call(t,(function(e,t){return e===o[t]||void 0===o[t]}))})),d()(w)&&w.length>0&&(j=v()(w).call(w,(function(e){return e.line=e.fullPath?h(m,e.fullPath):null,e.path=e.fullPath?e.fullPath.join("."):null,e.level="error",e.type="thrown",e.source="resolver",y()(e,"message",{enumerable:!0,value:e.message}),e})),n.newThrownErrBatch(j)),!O||!l.isOAS3()||"components"!==o[0]||"securitySchemes"!==o[1]){e.next=15;break}return e.next=15,S.a.all(v()(T=A()(I=C()(O)).call(I,(function(e){return"openIdConnect"===e.type}))).call(T,function(){var e=s()(p.a.mark((function e(t){var n,r;return p.a.wrap((function(e){for(;;)switch(e.prev=e.next){case 0:return n={url:t.openIdConnectUrl,requestInterceptor:_,responseInterceptor:k},e.prev=1,e.next=4,i(n);case 4:(r=e.sent)instanceof Error||r.status>=400?console.error(r.statusText+" "+n.url):t.openIdConnectData=JSON.parse(r.text),e.next=11;break;case 8:e.prev=8,e.t0=e.catch(1),console.error(e.t0);case 11:case"end":return e.stop()}}),e,null,[[1,8]])})));return function(t){return e.apply(this,arguments)}}()));case 15:return Q()(c,o,O),Q()(f,o,O),e.abrupt("return",{resultMap:c,specWithCurrentSubtrees:f});case 18:case"end":return e.stop()}}),e)})));return function(t,n){return e.apply(this,arguments)}}(),S.a.resolve({resultMap:(l.specResolvedSubtree([])||Object(q.Map)()).toJS(),specWithCurrentSubtrees:l.specJson().toJS()}));case 14:O=e.sent,delete Se.system,Se=[],e.next=22;break;case 19:e.prev=19,e.t0=e.catch(11),console.error(e.t0);case 22:f.updateResolvedSubtree([],O.resultMap);case 23:case"end":return e.stop()}}),e,null,[[11,19]])}))),35),Ae=function(e){return function(t){var n;T()(n=v()(Se).call(Se,(function(e){return e.join("@@")}))).call(n,e.join("@@"))>-1||(Se.push(e),Se.system=t,ke())}};function Oe(e,t,n,r,o){return{type:re,payload:{path:e,value:r,paramName:t,paramIn:n,isXml:o}}}function Ce(e,t,n,r){return{type:re,payload:{path:e,param:t,value:n,isXml:r}}}var je=function(e,t){return{type:me,payload:{path:e,value:t}}},Te=function(){return{type:me,payload:{path:[],value:Object(q.Map)()}}},Ie=function(e,t){return{type:ae,payload:{pathMethod:e,isOAS3:t}}},Pe=function(e,t,n,r){return{type:oe,payload:{pathMethod:e,paramName:t,paramIn:n,includeEmptyValue:r}}};function Ne(e){return{type:pe,payload:{pathMethod:e}}}function Me(e,t){return{type:he,payload:{path:e,value:t,key:"consumes_value"}}}function Re(e,t){return{type:he,payload:{path:e,value:t,key:"produces_value"}}}var De=function(e,t,n){return{payload:{path:e,method:t,res:n},type:ie}},Le=function(e,t,n){return{payload:{path:e,method:t,req:n},type:ue}},Be=function(e,t,n){return{payload:{path:e,method:t,req:n},type:se}},Fe=function(e){return{payload:e,type:ce}},ze=function(e){return function(t){var n,r,o=t.fn,a=t.specActions,i=t.specSelectors,u=t.getConfigs,c=t.oas3Selectors,l=e.pathName,f=e.method,h=e.operation,m=u(),g=m.requestInterceptor,y=m.responseInterceptor,b=h.toJS();h&&h.get("parameters")&&P()(n=A()(r=h.get("parameters")).call(r,(function(e){return e&&!0===e.get("allowEmptyValue")}))).call(n,(function(t){if(i.parameterInclusionSettingFor([l,f],t.get("name"),t.get("in"))){e.parameters=e.parameters||{};var n=Object(Z.B)(t,e.parameters);(!n||n&&0===n.size)&&(e.parameters[t.get("name")]="")}}));if(e.contextUrl=W()(i.url()).toString(),b&&b.operationId?e.operationId=b.operationId:b&&l&&f&&(e.operationId=o.opId(b,l,f)),i.isOAS3()){var w,x=M()(w="".concat(l,":")).call(w,f);e.server=c.selectedServer(x)||c.selectedServer();var E=c.serverVariables({server:e.server,namespace:x}).toJS(),_=c.serverVariables({server:e.server}).toJS();e.serverVariables=D()(E).length?E:_,e.requestContentType=c.requestContentType(l,f),e.responseContentType=c.responseContentType(l,f)||"*/*";var S,k=c.requestBodyValue(l,f),O=c.requestBodyInclusionSetting(l,f);if(k&&k.toJS)e.requestBody=A()(S=v()(k).call(k,(function(e){return q.Map.isMap(e)?e.get("value"):e}))).call(S,(function(e,t){return(d()(e)?0!==e.length:!Object(Z.q)(e))||O.get(t)})).toJS();else e.requestBody=k}var C=B()({},e);C=o.buildRequest(C),a.setRequest(e.pathName,e.method,C);var j=function(){var t=s()(p.a.mark((function t(n){var r,o;return p.a.wrap((function(t){for(;;)switch(t.prev=t.next){case 0:return t.next=2,g.apply(undefined,[n]);case 2:return r=t.sent,o=B()({},r),a.setMutatedRequest(e.pathName,e.method,o),t.abrupt("return",r);case 6:case"end":return t.stop()}}),t)})));return function(e){return t.apply(this,arguments)}}();e.requestInterceptor=j,e.responseInterceptor=y;var T=z()();return o.execute(e).then((function(t){t.duration=z()()-T,a.setResponse(e.pathName,e.method,t)})).catch((function(t){"Failed to fetch"===t.message&&(t.name="",t.message='**Failed to fetch.** \n**Possible Reasons:** \n - CORS \n - Network Failure \n - URL scheme must be "http" or "https" for CORS request.'),a.setResponse(e.pathName,e.method,{error:!0,err:Object(H.serializeError)(t)})}))}},Ue=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=e.path,n=e.method,r=i()(e,X);return function(e){var a=e.fn.fetch,i=e.specSelectors,u=e.specActions,s=i.specJsonWithResolvedSubtrees().toJS(),c=i.operationScheme(t,n),l=i.contentTypeValues([t,n]).toJS(),f=l.requestContentType,p=l.responseContentType,h=/xml/i.test(f),d=i.parameterValues([t,n],h).toJS();return u.executeRequest(o()(o()({},r),{},{fetch:a,spec:s,pathName:t,method:n,parameters:d,requestContentType:f,scheme:c,responseContentType:p}))}};function qe(e,t){return{type:le,payload:{path:e,method:t}}}function Ve(e,t){return{type:fe,payload:{path:e,method:t}}}function We(e,t,n){return{type:ve,payload:{scheme:e,path:t,method:n}}}},function(e,t,n){var r;!function(){"use strict";var n={}.hasOwnProperty;function o(){for(var e=[],t=0;t=e.length?{done:!0}:{done:!1,value:e[u++]}},e:function(e){throw e},f:s}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}var c,l=!0,f=!1;return{s:function(){n=n.call(e)},n:function(){var e=n.next();return l=e.done,e},e:function(e){f=!0,c=e},f:function(){try{l||null==n.return||n.return()}finally{if(f)throw c}}}},e.exports.__esModule=!0,e.exports.default=e.exports},function(e,t,n){var r=n(35),o=n(17),a=n(42),i=function(e){return a(e)?e:void 0};e.exports=function(e,t){return arguments.length<2?i(r[e])||i(o[e]):r[e]&&r[e][t]||o[e]&&o[e][t]}},function(e,t){var n=Array.isArray;e.exports=n},function(e,t,n){var r=n(426),o=n(253),a=n(850);e.exports=function(e,t){if(null==e)return{};var n,i,u=a(e,t);if(r){var s=r(e);for(i=0;i=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(u[n]=e[n])}return u},e.exports.__esModule=!0,e.exports.default=e.exports},function(e,t,n){"use strict";n.r(t),n.d(t,"UPDATE_SELECTED_SERVER",(function(){return r})),n.d(t,"UPDATE_REQUEST_BODY_VALUE",(function(){return o})),n.d(t,"UPDATE_REQUEST_BODY_VALUE_RETAIN_FLAG",(function(){return a})),n.d(t,"UPDATE_REQUEST_BODY_INCLUSION",(function(){return i})),n.d(t,"UPDATE_ACTIVE_EXAMPLES_MEMBER",(function(){return u})),n.d(t,"UPDATE_REQUEST_CONTENT_TYPE",(function(){return s})),n.d(t,"UPDATE_RESPONSE_CONTENT_TYPE",(function(){return c})),n.d(t,"UPDATE_SERVER_VARIABLE_VALUE",(function(){return l})),n.d(t,"SET_REQUEST_BODY_VALIDATE_ERROR",(function(){return f})),n.d(t,"CLEAR_REQUEST_BODY_VALIDATE_ERROR",(function(){return p})),n.d(t,"CLEAR_REQUEST_BODY_VALUE",(function(){return h})),n.d(t,"setSelectedServer",(function(){return d})),n.d(t,"setRequestBodyValue",(function(){return m})),n.d(t,"setRetainRequestBodyValueFlag",(function(){return v})),n.d(t,"setRequestBodyInclusion",(function(){return g})),n.d(t,"setActiveExamplesMember",(function(){return y})),n.d(t,"setRequestContentType",(function(){return b})),n.d(t,"setResponseContentType",(function(){return w})),n.d(t,"setServerVariableValue",(function(){return x})),n.d(t,"setRequestBodyValidateError",(function(){return E})),n.d(t,"clearRequestBodyValidateError",(function(){return _})),n.d(t,"initRequestBodyValidateError",(function(){return S})),n.d(t,"clearRequestBodyValue",(function(){return k}));var r="oas3_set_servers",o="oas3_set_request_body_value",a="oas3_set_request_body_retain_flag",i="oas3_set_request_body_inclusion",u="oas3_set_active_examples_member",s="oas3_set_request_content_type",c="oas3_set_response_content_type",l="oas3_set_server_variable_value",f="oas3_set_request_body_validate_error",p="oas3_clear_request_body_validate_error",h="oas3_clear_request_body_value";function d(e,t){return{type:r,payload:{selectedServerUrl:e,namespace:t}}}function m(e){var t=e.value,n=e.pathMethod;return{type:o,payload:{value:t,pathMethod:n}}}var v=function(e){var t=e.value,n=e.pathMethod;return{type:a,payload:{value:t,pathMethod:n}}};function g(e){var t=e.value,n=e.pathMethod,r=e.name;return{type:i,payload:{value:t,pathMethod:n,name:r}}}function y(e){var t=e.name,n=e.pathMethod,r=e.contextType,o=e.contextName;return{type:u,payload:{name:t,pathMethod:n,contextType:r,contextName:o}}}function b(e){var t=e.value,n=e.pathMethod;return{type:s,payload:{value:t,pathMethod:n}}}function w(e){var t=e.value,n=e.path,r=e.method;return{type:c,payload:{value:t,path:n,method:r}}}function x(e){var t=e.server,n=e.namespace,r=e.key,o=e.val;return{type:l,payload:{server:t,namespace:n,key:r,val:o}}}var E=function(e){var t=e.path,n=e.method,r=e.validationErrors;return{type:f,payload:{path:t,method:n,validationErrors:r}}},_=function(e){var t=e.path,n=e.method;return{type:p,payload:{path:t,method:n}}},S=function(e){var t=e.pathMethod;return{type:p,payload:{path:t[0],method:t[1]}}},k=function(e){var t=e.pathMethod;return{type:h,payload:{pathMethod:t}}}},function(e,t){var n;n=function(){return this}();try{n=n||new Function("return this")()}catch(e){"object"==typeof window&&(n=window)}e.exports=n},function(e,t,n){var r=n(17),o=n(114),a=r.Object;e.exports=function(e){return a(o(e))}},function(e,t,n){var r=n(17),o=n(51),a=n(334),i=n(336),u=n(33),s=n(177),c=r.TypeError,l=Object.defineProperty,f=Object.getOwnPropertyDescriptor,p="enumerable",h="configurable",d="writable";t.f=o?i?function(e,t,n){if(u(e),t=s(t),u(n),"function"==typeof e&&"prototype"===t&&"value"in n&&d in n&&!n.writable){var r=f(e,t);r&&r.writable&&(e[t]=n.value,n={configurable:h in n?n.configurable:r.configurable,enumerable:p in n?n.enumerable:r.enumerable,writable:!1})}return l(e,t,n)}:l:function(e,t,n){if(u(e),t=s(t),u(n),a)try{return l(e,t,n)}catch(e){}if("get"in n||"set"in n)throw c("Accessors not supported");return"value"in n&&(e[t]=n.value),e}},function(e,t,n){"use strict";n.d(t,"b",(function(){return m})),n.d(t,"e",(function(){return v})),n.d(t,"c",(function(){return y})),n.d(t,"a",(function(){return b})),n.d(t,"d",(function(){return w}));var r=n(56),o=n.n(r),a=n(19),i=n.n(a),u=n(109),s=n.n(u),c=n(2),l=n.n(c),f=n(32),p=n.n(f),h=function(e){return String.prototype.toLowerCase.call(e)},d=function(e){return e.replace(/[^\w]/gi,"_")};function m(e){var t=e.openapi;return!!t&&s()(t).call(t,"3")}function v(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:"",r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},o=r.v2OperationIdCompatibilityMode;if(!e||"object"!==i()(e))return null;var a=(e.operationId||"").replace(/\s/g,"");return a.length?d(e.operationId):g(t,n,{v2OperationIdCompatibilityMode:o})}function g(e,t){var n,r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},o=r.v2OperationIdCompatibilityMode;if(o){var a,i,u=l()(a="".concat(t.toLowerCase(),"_")).call(a,e).replace(/[\s!@#$%^&*()_+=[{\]};:<>|./?,\\'""-]/g,"_");return(u=u||l()(i="".concat(e.substring(1),"_")).call(i,t)).replace(/((_){2,})/g,"_").replace(/^(_)*/g,"").replace(/([_])*$/g,"")}return l()(n="".concat(h(t))).call(n,d(e))}function y(e,t){var n;return l()(n="".concat(h(t),"-")).call(n,e)}function b(e,t){return e&&e.paths?function(e,t){return function(e,t,n){if(!e||"object"!==i()(e)||!e.paths||"object"!==i()(e.paths))return null;var r=e.paths;for(var o in r)for(var a in r[o])if("PARAMETERS"!==a.toUpperCase()){var u=r[o][a];if(u&&"object"===i()(u)){var s={spec:e,pathName:o,method:a.toUpperCase(),operation:u},c=t(s);if(n&&c)return s}}return}(e,t,!0)||null}(e,(function(e){var n=e.pathName,r=e.method,o=e.operation;if(!o||"object"!==i()(o))return!1;var a=o.operationId;return[v(o,n,r),y(n,r),a].some((function(e){return e&&e===t}))})):null}function w(e){var t=e.spec,n=t.paths,r={};if(!n||t.$$normalized)return e;for(var a in n){var u,s=n[a];if(null!=s&&p()(u=["object","function"]).call(u,i()(s))){var c=s.parameters,f=function(e){var n,u=s[e];if(null==u||!p()(n=["object","function"]).call(n,i()(u)))return"continue";var f=v(u,a,e);if(f){r[f]?r[f].push(u):r[f]=[u];var h=r[f];if(h.length>1)h.forEach((function(e,t){var n;e.__originalOperationId=e.__originalOperationId||e.operationId,e.operationId=l()(n="".concat(f)).call(n,t+1)}));else if(void 0!==u.operationId){var d=h[0];d.__originalOperationId=d.__originalOperationId||u.operationId,d.operationId=f}}if("parameters"!==e){var m=[],g={};for(var y in t)"produces"!==y&&"consumes"!==y&&"security"!==y||(g[y]=t[y],m.push(g));if(c&&(g.parameters=c,m.push(g)),m.length){var b,w=o()(m);try{for(w.s();!(b=w.n()).done;){var x=b.value;for(var E in x)if(u[E]){if("parameters"===E){var _,S=o()(x[E]);try{var k=function(){var e=_.value;u[E].some((function(t){return t.name&&t.name===e.name||t.$ref&&t.$ref===e.$ref||t.$$ref&&t.$$ref===e.$$ref||t===e}))||u[E].push(e)};for(S.s();!(_=S.n()).done;)k()}catch(e){S.e(e)}finally{S.f()}}}else u[E]=x[E]}}catch(e){w.e(e)}finally{w.f()}}}};for(var h in s)f(h)}}return t.$$normalized=!0,e}},function(e,t,n){"use strict";n.r(t),n.d(t,"NEW_THROWN_ERR",(function(){return o})),n.d(t,"NEW_THROWN_ERR_BATCH",(function(){return a})),n.d(t,"NEW_SPEC_ERR",(function(){return i})),n.d(t,"NEW_SPEC_ERR_BATCH",(function(){return u})),n.d(t,"NEW_AUTH_ERR",(function(){return s})),n.d(t,"CLEAR",(function(){return c})),n.d(t,"CLEAR_BY",(function(){return l})),n.d(t,"newThrownErr",(function(){return f})),n.d(t,"newThrownErrBatch",(function(){return p})),n.d(t,"newSpecErr",(function(){return h})),n.d(t,"newSpecErrBatch",(function(){return d})),n.d(t,"newAuthErr",(function(){return m})),n.d(t,"clear",(function(){return v})),n.d(t,"clearBy",(function(){return g}));var r=n(138),o="err_new_thrown_err",a="err_new_thrown_err_batch",i="err_new_spec_err",u="err_new_spec_err_batch",s="err_new_auth_err",c="err_clear",l="err_clear_by";function f(e){return{type:o,payload:Object(r.serializeError)(e)}}function p(e){return{type:a,payload:e}}function h(e){return{type:i,payload:e}}function d(e){return{type:u,payload:e}}function m(e){return{type:s,payload:e}}function v(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};return{type:c,payload:e}}function g(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:function(){return!0};return{type:l,payload:e}}},function(e,t,n){var r=n(176),o=n(114);e.exports=function(e){return r(o(e))}},function(e,t,n){var r=n(337);e.exports=function(e){return r(e.length)}},function(e,t){e.exports=function(e){var t=typeof e;return null!=e&&("object"==t||"function"==t)}},function(e,t){"function"==typeof Object.create?e.exports=function(e,t){t&&(e.super_=t,e.prototype=Object.create(t.prototype,{constructor:{value:e,enumerable:!1,writable:!0,configurable:!0}}))}:e.exports=function(e,t){if(t){e.super_=t;var n=function(){};n.prototype=t.prototype,e.prototype=new n,e.prototype.constructor=e}}},function(e,t,n){var r=n(129),o=r.Buffer;function a(e,t){for(var n in e)t[n]=e[n]}function i(e,t,n){return o(e,t,n)}o.from&&o.alloc&&o.allocUnsafe&&o.allocUnsafeSlow?e.exports=r:(a(r,t),t.Buffer=i),a(o,i),i.from=function(e,t,n){if("number"==typeof e)throw new TypeError("Argument must not be a number");return o(e,t,n)},i.alloc=function(e,t,n){if("number"!=typeof e)throw new TypeError("Argument must be a number");var r=o(e);return void 0!==t?"string"==typeof n?r.fill(t,n):r.fill(t):r.fill(0),r},i.allocUnsafe=function(e){if("number"!=typeof e)throw new TypeError("Argument must be a number");return o(e)},i.allocUnsafeSlow=function(e){if("number"!=typeof e)throw new TypeError("Argument must be a number");return r.SlowBuffer(e)}},function(e,t,n){e.exports=n(381)},function(e,t,n){e.exports=n(428)},function(e,t,n){var r=n(17),o=n(80),a=r.String;e.exports=function(e){if("Symbol"===o(e))throw TypeError("Cannot convert a Symbol value to a string");return a(e)}},function(e,t,n){n(83);var r=n(506),o=n(17),a=n(80),i=n(90),u=n(127),s=n(40)("toStringTag");for(var c in r){var l=o[c],f=l&&l.prototype;f&&a(f)!==s&&i(f,s,c),u[c]=u.Array}},function(e,t,n){var r=n(361),o="object"==typeof self&&self&&self.Object===Object&&self,a=r||o||Function("return this")();e.exports=a},function(e,t,n){"use strict";function r(e){return null==e}var o={isNothing:r,isObject:function(e){return"object"==typeof e&&null!==e},toArray:function(e){return Array.isArray(e)?e:r(e)?[]:[e]},repeat:function(e,t){var n,r="";for(n=0;nu&&(t=r-u+(a=" ... ").length),n-r>u&&(n=r+u-(i=" ...").length),{str:a+e.slice(t,n).replace(/\t/g,"→")+i,pos:r-t+a.length}}function c(e,t){return o.repeat(" ",t-e.length)+e}var l=function(e,t){if(t=Object.create(t||null),!e.buffer)return null;t.maxLength||(t.maxLength=79),"number"!=typeof t.indent&&(t.indent=1),"number"!=typeof t.linesBefore&&(t.linesBefore=3),"number"!=typeof t.linesAfter&&(t.linesAfter=2);for(var n,r=/\r?\n|\r|\0/g,a=[0],i=[],u=-1;n=r.exec(e.buffer);)i.push(n.index),a.push(n.index+n[0].length),e.position<=n.index&&u<0&&(u=a.length-2);u<0&&(u=a.length-1);var l,f,p="",h=Math.min(e.line+t.linesAfter,i.length).toString().length,d=t.maxLength-(t.indent+h+3);for(l=1;l<=t.linesBefore&&!(u-l<0);l++)f=s(e.buffer,a[u-l],i[u-l],e.position-(a[u]-a[u-l]),d),p=o.repeat(" ",t.indent)+c((e.line-l+1).toString(),h)+" | "+f.str+"\n"+p;for(f=s(e.buffer,a[u],i[u],e.position,d),p+=o.repeat(" ",t.indent)+c((e.line+1).toString(),h)+" | "+f.str+"\n",p+=o.repeat("-",t.indent+h+3+f.pos)+"^\n",l=1;l<=t.linesAfter&&!(u+l>=i.length);l++)f=s(e.buffer,a[u+l],i[u+l],e.position-(a[u]-a[u+l]),d),p+=o.repeat(" ",t.indent)+c((e.line+l+1).toString(),h)+" | "+f.str+"\n";return p.replace(/\n$/,"")},f=["kind","multi","resolve","construct","instanceOf","predicate","represent","representName","defaultStyle","styleAliases"],p=["scalar","sequence","mapping"];var h=function(e,t){if(t=t||{},Object.keys(t).forEach((function(t){if(-1===f.indexOf(t))throw new u('Unknown option "'+t+'" is met in definition of "'+e+'" YAML type.')})),this.options=t,this.tag=e,this.kind=t.kind||null,this.resolve=t.resolve||function(){return!0},this.construct=t.construct||function(e){return e},this.instanceOf=t.instanceOf||null,this.predicate=t.predicate||null,this.represent=t.represent||null,this.representName=t.representName||null,this.defaultStyle=t.defaultStyle||null,this.multi=t.multi||!1,this.styleAliases=function(e){var t={};return null!==e&&Object.keys(e).forEach((function(n){e[n].forEach((function(e){t[String(e)]=n}))})),t}(t.styleAliases||null),-1===p.indexOf(this.kind))throw new u('Unknown kind "'+this.kind+'" is specified for "'+e+'" YAML type.')};function d(e,t){var n=[];return e[t].forEach((function(e){var t=n.length;n.forEach((function(n,r){n.tag===e.tag&&n.kind===e.kind&&n.multi===e.multi&&(t=r)})),n[t]=e})),n}function m(e){return this.extend(e)}m.prototype.extend=function(e){var t=[],n=[];if(e instanceof h)n.push(e);else if(Array.isArray(e))n=n.concat(e);else{if(!e||!Array.isArray(e.implicit)&&!Array.isArray(e.explicit))throw new u("Schema.extend argument should be a Type, [ Type ], or a schema definition ({ implicit: [...], explicit: [...] })");e.implicit&&(t=t.concat(e.implicit)),e.explicit&&(n=n.concat(e.explicit))}t.forEach((function(e){if(!(e instanceof h))throw new u("Specified list of YAML types (or a single Type object) contains a non-Type object.");if(e.loadKind&&"scalar"!==e.loadKind)throw new u("There is a non-scalar type in the implicit list of a schema. Implicit resolving of such types is not supported.");if(e.multi)throw new u("There is a multi type in the implicit list of a schema. Multi tags can only be listed as explicit.")})),n.forEach((function(e){if(!(e instanceof h))throw new u("Specified list of YAML types (or a single Type object) contains a non-Type object.")}));var r=Object.create(m.prototype);return r.implicit=(this.implicit||[]).concat(t),r.explicit=(this.explicit||[]).concat(n),r.compiledImplicit=d(r,"implicit"),r.compiledExplicit=d(r,"explicit"),r.compiledTypeMap=function(){var e,t,n={scalar:{},sequence:{},mapping:{},fallback:{},multi:{scalar:[],sequence:[],mapping:[],fallback:[]}};function r(e){e.multi?(n.multi[e.kind].push(e),n.multi.fallback.push(e)):n[e.kind][e.tag]=n.fallback[e.tag]=e}for(e=0,t=arguments.length;e=0?"0b"+e.toString(2):"-0b"+e.toString(2).slice(1)},octal:function(e){return e>=0?"0o"+e.toString(8):"-0o"+e.toString(8).slice(1)},decimal:function(e){return e.toString(10)},hexadecimal:function(e){return e>=0?"0x"+e.toString(16).toUpperCase():"-0x"+e.toString(16).toUpperCase().slice(1)}},defaultStyle:"decimal",styleAliases:{binary:[2,"bin"],octal:[8,"oct"],decimal:[10,"dec"],hexadecimal:[16,"hex"]}}),A=new RegExp("^(?:[-+]?(?:[0-9][0-9_]*)(?:\\.[0-9_]*)?(?:[eE][-+]?[0-9]+)?|\\.[0-9_]+(?:[eE][-+]?[0-9]+)?|[-+]?\\.(?:inf|Inf|INF)|\\.(?:nan|NaN|NAN))$");var O=/^[-+]?[0-9]+e/;var C=new h("tag:yaml.org,2002:float",{kind:"scalar",resolve:function(e){return null!==e&&!(!A.test(e)||"_"===e[e.length-1])},construct:function(e){var t,n;return n="-"===(t=e.replace(/_/g,"").toLowerCase())[0]?-1:1,"+-".indexOf(t[0])>=0&&(t=t.slice(1)),".inf"===t?1===n?Number.POSITIVE_INFINITY:Number.NEGATIVE_INFINITY:".nan"===t?NaN:n*parseFloat(t,10)},predicate:function(e){return"[object Number]"===Object.prototype.toString.call(e)&&(e%1!=0||o.isNegativeZero(e))},represent:function(e,t){var n;if(isNaN(e))switch(t){case"lowercase":return".nan";case"uppercase":return".NAN";case"camelcase":return".NaN"}else if(Number.POSITIVE_INFINITY===e)switch(t){case"lowercase":return".inf";case"uppercase":return".INF";case"camelcase":return".Inf"}else if(Number.NEGATIVE_INFINITY===e)switch(t){case"lowercase":return"-.inf";case"uppercase":return"-.INF";case"camelcase":return"-.Inf"}else if(o.isNegativeZero(e))return"-0.0";return n=e.toString(10),O.test(n)?n.replace("e",".e"):n},defaultStyle:"lowercase"}),j=w.extend({implicit:[x,E,k,C]}),T=j,I=new RegExp("^([0-9][0-9][0-9][0-9])-([0-9][0-9])-([0-9][0-9])$"),P=new RegExp("^([0-9][0-9][0-9][0-9])-([0-9][0-9]?)-([0-9][0-9]?)(?:[Tt]|[ \\t]+)([0-9][0-9]?):([0-9][0-9]):([0-9][0-9])(?:\\.([0-9]*))?(?:[ \\t]*(Z|([-+])([0-9][0-9]?)(?::([0-9][0-9]))?))?$");var N=new h("tag:yaml.org,2002:timestamp",{kind:"scalar",resolve:function(e){return null!==e&&(null!==I.exec(e)||null!==P.exec(e))},construct:function(e){var t,n,r,o,a,i,u,s,c=0,l=null;if(null===(t=I.exec(e))&&(t=P.exec(e)),null===t)throw new Error("Date resolve error");if(n=+t[1],r=+t[2]-1,o=+t[3],!t[4])return new Date(Date.UTC(n,r,o));if(a=+t[4],i=+t[5],u=+t[6],t[7]){for(c=t[7].slice(0,3);c.length<3;)c+="0";c=+c}return t[9]&&(l=6e4*(60*+t[10]+ +(t[11]||0)),"-"===t[9]&&(l=-l)),s=new Date(Date.UTC(n,r,o,a,i,u,c)),l&&s.setTime(s.getTime()-l),s},instanceOf:Date,represent:function(e){return e.toISOString()}});var M=new h("tag:yaml.org,2002:merge",{kind:"scalar",resolve:function(e){return"<<"===e||null===e}}),R="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\n\r";var D=new h("tag:yaml.org,2002:binary",{kind:"scalar",resolve:function(e){if(null===e)return!1;var t,n,r=0,o=e.length,a=R;for(n=0;n64)){if(t<0)return!1;r+=6}return r%8==0},construct:function(e){var t,n,r=e.replace(/[\r\n=]/g,""),o=r.length,a=R,i=0,u=[];for(t=0;t>16&255),u.push(i>>8&255),u.push(255&i)),i=i<<6|a.indexOf(r.charAt(t));return 0===(n=o%4*6)?(u.push(i>>16&255),u.push(i>>8&255),u.push(255&i)):18===n?(u.push(i>>10&255),u.push(i>>2&255)):12===n&&u.push(i>>4&255),new Uint8Array(u)},predicate:function(e){return"[object Uint8Array]"===Object.prototype.toString.call(e)},represent:function(e){var t,n,r="",o=0,a=e.length,i=R;for(t=0;t>18&63],r+=i[o>>12&63],r+=i[o>>6&63],r+=i[63&o]),o=(o<<8)+e[t];return 0===(n=a%3)?(r+=i[o>>18&63],r+=i[o>>12&63],r+=i[o>>6&63],r+=i[63&o]):2===n?(r+=i[o>>10&63],r+=i[o>>4&63],r+=i[o<<2&63],r+=i[64]):1===n&&(r+=i[o>>2&63],r+=i[o<<4&63],r+=i[64],r+=i[64]),r}}),L=Object.prototype.hasOwnProperty,B=Object.prototype.toString;var F=new h("tag:yaml.org,2002:omap",{kind:"sequence",resolve:function(e){if(null===e)return!0;var t,n,r,o,a,i=[],u=e;for(t=0,n=u.length;t>10),56320+(e-65536&1023))}for(var ae=new Array(256),ie=new Array(256),ue=0;ue<256;ue++)ae[ue]=re(ue)?1:0,ie[ue]=re(ue);function se(e,t){this.input=e,this.filename=t.filename||null,this.schema=t.schema||W,this.onWarning=t.onWarning||null,this.legacy=t.legacy||!1,this.json=t.json||!1,this.listener=t.listener||null,this.implicitTypes=this.schema.compiledImplicit,this.typeMap=this.schema.compiledTypeMap,this.length=e.length,this.position=0,this.line=0,this.lineStart=0,this.lineIndent=0,this.firstTabInLine=-1,this.documents=[]}function ce(e,t){var n={name:e.filename,buffer:e.input.slice(0,-1),position:e.position,line:e.line,column:e.position-e.lineStart};return n.snippet=l(n),new u(t,n)}function le(e,t){throw ce(e,t)}function fe(e,t){e.onWarning&&e.onWarning.call(null,ce(e,t))}var pe={YAML:function(e,t,n){var r,o,a;null!==e.version&&le(e,"duplication of %YAML directive"),1!==n.length&&le(e,"YAML directive accepts exactly one argument"),null===(r=/^([0-9]+)\.([0-9]+)$/.exec(n[0]))&&le(e,"ill-formed argument of the YAML directive"),o=parseInt(r[1],10),a=parseInt(r[2],10),1!==o&&le(e,"unacceptable YAML version of the document"),e.version=n[0],e.checkLineBreaks=a<2,1!==a&&2!==a&&fe(e,"unsupported YAML version of the document")},TAG:function(e,t,n){var r,o;2!==n.length&&le(e,"TAG directive accepts exactly two arguments"),r=n[0],o=n[1],Y.test(r)||le(e,"ill-formed tag handle (first argument) of the TAG directive"),H.call(e.tagMap,r)&&le(e,'there is a previously declared suffix for "'+r+'" tag handle'),G.test(o)||le(e,"ill-formed tag prefix (second argument) of the TAG directive");try{o=decodeURIComponent(o)}catch(t){le(e,"tag prefix is malformed: "+o)}e.tagMap[r]=o}};function he(e,t,n,r){var o,a,i,u;if(t1&&(e.result+=o.repeat("\n",t-1))}function we(e,t){var n,r,o=e.tag,a=e.anchor,i=[],u=!1;if(-1!==e.firstTabInLine)return!1;for(null!==e.anchor&&(e.anchorMap[e.anchor]=i),r=e.input.charCodeAt(e.position);0!==r&&(-1!==e.firstTabInLine&&(e.position=e.firstTabInLine,le(e,"tab characters must not be used in indentation")),45===r)&&ee(e.input.charCodeAt(e.position+1));)if(u=!0,e.position++,ge(e,!0,-1)&&e.lineIndent<=t)i.push(null),r=e.input.charCodeAt(e.position);else if(n=e.line,_e(e,t,3,!1,!0),i.push(e.result),ge(e,!0,-1),r=e.input.charCodeAt(e.position),(e.line===n||e.lineIndent>t)&&0!==r)le(e,"bad indentation of a sequence entry");else if(e.lineIndentt?m=1:e.lineIndent===t?m=0:e.lineIndentt?m=1:e.lineIndent===t?m=0:e.lineIndentt)&&(g&&(i=e.line,u=e.lineStart,s=e.position),_e(e,t,4,!0,o)&&(g?m=e.result:v=e.result),g||(me(e,p,h,d,m,v,i,u,s),d=m=v=null),ge(e,!0,-1),c=e.input.charCodeAt(e.position)),(e.line===a||e.lineIndent>t)&&0!==c)le(e,"bad indentation of a mapping entry");else if(e.lineIndent=0))break;0===a?le(e,"bad explicit indentation width of a block scalar; it cannot be less than one"):l?le(e,"repeat of an indentation width identifier"):(f=t+a-1,l=!0)}if(X(i)){do{i=e.input.charCodeAt(++e.position)}while(X(i));if(35===i)do{i=e.input.charCodeAt(++e.position)}while(!Z(i)&&0!==i)}for(;0!==i;){for(ve(e),e.lineIndent=0,i=e.input.charCodeAt(e.position);(!l||e.lineIndentf&&(f=e.lineIndent),Z(i))p++;else{if(e.lineIndent0){for(o=i,a=0;o>0;o--)(i=ne(u=e.input.charCodeAt(++e.position)))>=0?a=(a<<4)+i:le(e,"expected hexadecimal character");e.result+=oe(a),e.position++}else le(e,"unknown escape sequence");n=r=e.position}else Z(u)?(he(e,n,r,!0),be(e,ge(e,!1,t)),n=r=e.position):e.position===e.lineStart&&ye(e)?le(e,"unexpected end of the document within a double quoted scalar"):(e.position++,r=e.position)}le(e,"unexpected end of the stream within a double quoted scalar")}(e,h)?g=!0:!function(e){var t,n,r;if(42!==(r=e.input.charCodeAt(e.position)))return!1;for(r=e.input.charCodeAt(++e.position),t=e.position;0!==r&&!ee(r)&&!te(r);)r=e.input.charCodeAt(++e.position);return e.position===t&&le(e,"name of an alias node must contain at least one character"),n=e.input.slice(t,e.position),H.call(e.anchorMap,n)||le(e,'unidentified alias "'+n+'"'),e.result=e.anchorMap[n],ge(e,!0,-1),!0}(e)?function(e,t,n){var r,o,a,i,u,s,c,l,f=e.kind,p=e.result;if(ee(l=e.input.charCodeAt(e.position))||te(l)||35===l||38===l||42===l||33===l||124===l||62===l||39===l||34===l||37===l||64===l||96===l)return!1;if((63===l||45===l)&&(ee(r=e.input.charCodeAt(e.position+1))||n&&te(r)))return!1;for(e.kind="scalar",e.result="",o=a=e.position,i=!1;0!==l;){if(58===l){if(ee(r=e.input.charCodeAt(e.position+1))||n&&te(r))break}else if(35===l){if(ee(e.input.charCodeAt(e.position-1)))break}else{if(e.position===e.lineStart&&ye(e)||n&&te(l))break;if(Z(l)){if(u=e.line,s=e.lineStart,c=e.lineIndent,ge(e,!1,-1),e.lineIndent>=t){i=!0,l=e.input.charCodeAt(e.position);continue}e.position=a,e.line=u,e.lineStart=s,e.lineIndent=c;break}}i&&(he(e,o,a,!1),be(e,e.line-u),o=a=e.position,i=!1),X(l)||(a=e.position+1),l=e.input.charCodeAt(++e.position)}return he(e,o,a,!1),!!e.result||(e.kind=f,e.result=p,!1)}(e,h,1===n)&&(g=!0,null===e.tag&&(e.tag="?")):(g=!0,null===e.tag&&null===e.anchor||le(e,"alias node should not have any properties")),null!==e.anchor&&(e.anchorMap[e.anchor]=e.result)):0===m&&(g=s&&we(e,d))),null===e.tag)null!==e.anchor&&(e.anchorMap[e.anchor]=e.result);else if("?"===e.tag){for(null!==e.result&&"scalar"!==e.kind&&le(e,'unacceptable node kind for ! tag; it should be "scalar", not "'+e.kind+'"'),c=0,l=e.implicitTypes.length;c"),null!==e.result&&p.kind!==e.kind&&le(e,"unacceptable node kind for !<"+e.tag+'> tag; it should be "'+p.kind+'", not "'+e.kind+'"'),p.resolve(e.result,e.tag)?(e.result=p.construct(e.result,e.tag),null!==e.anchor&&(e.anchorMap[e.anchor]=e.result)):le(e,"cannot resolve a node with !<"+e.tag+"> explicit tag")}return null!==e.listener&&e.listener("close",e),null!==e.tag||null!==e.anchor||g}function Se(e){var t,n,r,o,a=e.position,i=!1;for(e.version=null,e.checkLineBreaks=e.legacy,e.tagMap=Object.create(null),e.anchorMap=Object.create(null);0!==(o=e.input.charCodeAt(e.position))&&(ge(e,!0,-1),o=e.input.charCodeAt(e.position),!(e.lineIndent>0||37!==o));){for(i=!0,o=e.input.charCodeAt(++e.position),t=e.position;0!==o&&!ee(o);)o=e.input.charCodeAt(++e.position);for(r=[],(n=e.input.slice(t,e.position)).length<1&&le(e,"directive name must not be less than one character in length");0!==o;){for(;X(o);)o=e.input.charCodeAt(++e.position);if(35===o){do{o=e.input.charCodeAt(++e.position)}while(0!==o&&!Z(o));break}if(Z(o))break;for(t=e.position;0!==o&&!ee(o);)o=e.input.charCodeAt(++e.position);r.push(e.input.slice(t,e.position))}0!==o&&ve(e),H.call(pe,n)?pe[n](e,n,r):fe(e,'unknown document directive "'+n+'"')}ge(e,!0,-1),0===e.lineIndent&&45===e.input.charCodeAt(e.position)&&45===e.input.charCodeAt(e.position+1)&&45===e.input.charCodeAt(e.position+2)?(e.position+=3,ge(e,!0,-1)):i&&le(e,"directives end mark is expected"),_e(e,e.lineIndent-1,4,!1,!0),ge(e,!0,-1),e.checkLineBreaks&&J.test(e.input.slice(a,e.position))&&fe(e,"non-ASCII line breaks are interpreted as content"),e.documents.push(e.result),e.position===e.lineStart&&ye(e)?46===e.input.charCodeAt(e.position)&&(e.position+=3,ge(e,!0,-1)):e.position=55296&&r<=56319&&t+1=56320&&n<=57343?1024*(r-55296)+n-56320+65536:r}function qe(e){return/^\n* /.test(e)}function Ve(e,t,n,r,o,a,i,u){var s,c,l=0,f=null,p=!1,h=!1,d=-1!==r,m=-1,v=Be(c=Ue(e,0))&&c!==je&&!Le(c)&&45!==c&&63!==c&&58!==c&&44!==c&&91!==c&&93!==c&&123!==c&&125!==c&&35!==c&&38!==c&&42!==c&&33!==c&&124!==c&&61!==c&&62!==c&&39!==c&&34!==c&&37!==c&&64!==c&&96!==c&&function(e){return!Le(e)&&58!==e}(Ue(e,e.length-1));if(t||i)for(s=0;s=65536?s+=2:s++){if(!Be(l=Ue(e,s)))return 5;v=v&&ze(l,f,u),f=l}else{for(s=0;s=65536?s+=2:s++){if(10===(l=Ue(e,s)))p=!0,d&&(h=h||s-m-1>r&&" "!==e[m+1],m=s);else if(!Be(l))return 5;v=v&&ze(l,f,u),f=l}h=h||d&&s-m-1>r&&" "!==e[m+1]}return p||h?n>9&&qe(e)?5:i?2===a?5:2:h?4:3:!v||i||o(e)?2===a?5:2:1}function We(e,t,n,r,o){e.dump=function(){if(0===t.length)return 2===e.quotingType?'""':"''";if(!e.noCompatMode&&(-1!==Ie.indexOf(t)||Pe.test(t)))return 2===e.quotingType?'"'+t+'"':"'"+t+"'";var a=e.indent*Math.max(1,n),i=-1===e.lineWidth?-1:Math.max(Math.min(e.lineWidth,40),e.lineWidth-a),s=r||e.flowLevel>-1&&n>=e.flowLevel;switch(Ve(t,s,e.indent,i,(function(t){return function(e,t){var n,r;for(n=0,r=e.implicitTypes.length;n"+He(t,e.indent)+$e(Re(function(e,t){var n,r,o=/(\n+)([^\n]*)/g,a=(u=e.indexOf("\n"),u=-1!==u?u:e.length,o.lastIndex=u,Je(e.slice(0,u),t)),i="\n"===e[0]||" "===e[0];var u;for(;r=o.exec(e);){var s=r[1],c=r[2];n=" "===c[0],a+=s+(i||n||""===c?"":"\n")+Je(c,t),i=n}return a}(t,i),a));case 5:return'"'+function(e){for(var t,n="",r=0,o=0;o=65536?o+=2:o++)r=Ue(e,o),!(t=Te[r])&&Be(r)?(n+=e[o],r>=65536&&(n+=e[o+1])):n+=t||Ne(r);return n}(t)+'"';default:throw new u("impossible error: invalid scalar style")}}()}function He(e,t){var n=qe(e)?String(t):"",r="\n"===e[e.length-1];return n+(r&&("\n"===e[e.length-2]||"\n"===e)?"+":r?"":"-")+"\n"}function $e(e){return"\n"===e[e.length-1]?e.slice(0,-1):e}function Je(e,t){if(""===e||" "===e[0])return e;for(var n,r,o=/ [^ ]/g,a=0,i=0,u=0,s="";n=o.exec(e);)(u=n.index)-a>t&&(r=i>a?i:u,s+="\n"+e.slice(a,r),a=r+1),i=u;return s+="\n",e.length-a>t&&i>a?s+=e.slice(a,i)+"\n"+e.slice(i+1):s+=e.slice(a),s.slice(1)}function Ke(e,t,n,r){var o,a,i,u="",s=e.tag;for(o=0,a=n.length;o tag resolver accepts not "'+c+'" style');r=s.represent[c](t,c)}e.dump=r}return!0}return!1}function Ge(e,t,n,r,o,a,i){e.tag=null,e.dump=n,Ye(e,n,!1)||Ye(e,n,!0);var s,c=Oe.call(e.dump),l=r;r&&(r=e.flowLevel<0||e.flowLevel>t);var f,p,h="[object Object]"===c||"[object Array]"===c;if(h&&(p=-1!==(f=e.duplicates.indexOf(n))),(null!==e.tag&&"?"!==e.tag||p||2!==e.indent&&t>0)&&(o=!1),p&&e.usedDuplicates[f])e.dump="*ref_"+f;else{if(h&&p&&!e.usedDuplicates[f]&&(e.usedDuplicates[f]=!0),"[object Object]"===c)r&&0!==Object.keys(e.dump).length?(!function(e,t,n,r){var o,a,i,s,c,l,f="",p=e.tag,h=Object.keys(n);if(!0===e.sortKeys)h.sort();else if("function"==typeof e.sortKeys)h.sort(e.sortKeys);else if(e.sortKeys)throw new u("sortKeys must be a boolean or a function");for(o=0,a=h.length;o1024)&&(e.dump&&10===e.dump.charCodeAt(0)?l+="?":l+="? "),l+=e.dump,c&&(l+=De(e,t)),Ge(e,t+1,s,!0,c)&&(e.dump&&10===e.dump.charCodeAt(0)?l+=":":l+=": ",f+=l+=e.dump));e.tag=p,e.dump=f||"{}"}(e,t,e.dump,o),p&&(e.dump="&ref_"+f+e.dump)):(!function(e,t,n){var r,o,a,i,u,s="",c=e.tag,l=Object.keys(n);for(r=0,o=l.length;r1024&&(u+="? "),u+=e.dump+(e.condenseFlow?'"':"")+":"+(e.condenseFlow?"":" "),Ge(e,t,i,!1,!1)&&(s+=u+=e.dump));e.tag=c,e.dump="{"+s+"}"}(e,t,e.dump),p&&(e.dump="&ref_"+f+" "+e.dump));else if("[object Array]"===c)r&&0!==e.dump.length?(e.noArrayIndent&&!i&&t>0?Ke(e,t-1,e.dump,o):Ke(e,t,e.dump,o),p&&(e.dump="&ref_"+f+e.dump)):(!function(e,t,n){var r,o,a,i="",u=e.tag;for(r=0,o=n.length;r",e.dump=s+" "+e.dump)}return!0}function Qe(e,t){var n,r,o=[],a=[];for(Ze(e,o,a),n=0,r=a.length;nS;S++)if((h||S in x)&&(b=E(y=x[S],S,w),e))if(t)A[S]=b;else if(b)switch(e){case 3:return!0;case 5:return y;case 6:return S;case 2:c(A,y)}else switch(e){case 4:return!1;case 7:c(A,y)}return f?-1:o||l?l:A}};e.exports={forEach:l(0),map:l(1),filter:l(2),some:l(3),every:l(4),find:l(5),findIndex:l(6),filterReject:l(7)}},function(e,t,n){"use strict";var r=n(66),o=n(153),a=n(127),i=n(81),u=n(63).f,s=n(232),c=n(38),l=n(51),f="Array Iterator",p=i.set,h=i.getterFor(f);e.exports=s(Array,"Array",(function(e,t){p(this,{type:f,target:r(e),index:0,kind:t})}),(function(){var e=h(this),t=e.target,n=e.kind,r=e.index++;return!t||r>=t.length?(e.target=void 0,{value:void 0,done:!0}):"keys"==n?{value:r,done:!1}:"values"==n?{value:t[r],done:!1}:{value:[r,t[r]],done:!1}}),"values");var d=a.Arguments=a.Array;if(o("keys"),o("values"),o("entries"),!c&&l&&"values"!==d.name)try{u(d,"name",{value:"values"})}catch(e){}},function(e,t){e.exports=function(e){return null!=e&&"object"==typeof e}},function(e,t,n){var r=n(159);e.exports=r},function(e,t,n){e.exports=n(628)},function(e,t,n){"use strict";var r=n(957),o=n(958);function a(){this.protocol=null,this.slashes=null,this.auth=null,this.host=null,this.port=null,this.hostname=null,this.hash=null,this.search=null,this.query=null,this.pathname=null,this.path=null,this.href=null}t.parse=b,t.resolve=function(e,t){return b(e,!1,!0).resolve(t)},t.resolveObject=function(e,t){return e?b(e,!1,!0).resolveObject(t):t},t.format=function(e){o.isString(e)&&(e=b(e));return e instanceof a?e.format():a.prototype.format.call(e)},t.Url=a;var i=/^([a-z0-9.+-]+:)/i,u=/:[0-9]*$/,s=/^(\/\/?(?!\/)[^\?\s]*)(\?[^\s]*)?$/,c=["{","}","|","\\","^","`"].concat(["<",">",'"',"`"," ","\r","\n","\t"]),l=["'"].concat(c),f=["%","/","?",";","#"].concat(l),p=["/","?","#"],h=/^[+a-z0-9A-Z_-]{0,63}$/,d=/^([+a-z0-9A-Z_-]{0,63})(.*)$/,m={javascript:!0,"javascript:":!0},v={javascript:!0,"javascript:":!0},g={http:!0,https:!0,ftp:!0,gopher:!0,file:!0,"http:":!0,"https:":!0,"ftp:":!0,"gopher:":!0,"file:":!0},y=n(959);function b(e,t,n){if(e&&o.isObject(e)&&e instanceof a)return e;var r=new a;return r.parse(e,t,n),r}a.prototype.parse=function(e,t,n){if(!o.isString(e))throw new TypeError("Parameter 'url' must be a string, not "+typeof e);var a=e.indexOf("?"),u=-1!==a&&a127?N+="x":N+=P[M];if(!N.match(h)){var D=T.slice(0,O),L=T.slice(O+1),B=P.match(d);B&&(D.push(B[1]),L.unshift(B[2])),L.length&&(b="/"+L.join(".")+b),this.hostname=D.join(".");break}}}this.hostname.length>255?this.hostname="":this.hostname=this.hostname.toLowerCase(),j||(this.hostname=r.toASCII(this.hostname));var F=this.port?":"+this.port:"",z=this.hostname||"";this.host=z+F,this.href+=this.host,j&&(this.hostname=this.hostname.substr(1,this.hostname.length-2),"/"!==b[0]&&(b="/"+b))}if(!m[E])for(O=0,I=l.length;O0)&&n.host.split("@"))&&(n.auth=j.shift(),n.host=n.hostname=j.shift());return n.search=e.search,n.query=e.query,o.isNull(n.pathname)&&o.isNull(n.search)||(n.path=(n.pathname?n.pathname:"")+(n.search?n.search:"")),n.href=n.format(),n}if(!_.length)return n.pathname=null,n.search?n.path="/"+n.search:n.path=null,n.href=n.format(),n;for(var k=_.slice(-1)[0],A=(n.host||e.host||_.length>1)&&("."===k||".."===k)||""===k,O=0,C=_.length;C>=0;C--)"."===(k=_[C])?_.splice(C,1):".."===k?(_.splice(C,1),O++):O&&(_.splice(C,1),O--);if(!x&&!E)for(;O--;O)_.unshift("..");!x||""===_[0]||_[0]&&"/"===_[0].charAt(0)||_.unshift(""),A&&"/"!==_.join("/").substr(-1)&&_.push("");var j,T=""===_[0]||_[0]&&"/"===_[0].charAt(0);S&&(n.hostname=n.host=T?"":_.length?_.shift():"",(j=!!(n.host&&n.host.indexOf("@")>0)&&n.host.split("@"))&&(n.auth=j.shift(),n.host=n.hostname=j.shift()));return(x=x||n.host&&_.length)&&!T&&_.unshift(""),_.length?n.pathname=_.join("/"):(n.pathname=null,n.path=null),o.isNull(n.pathname)&&o.isNull(n.search)||(n.path=(n.pathname?n.pathname:"")+(n.search?n.search:"")),n.auth=e.auth||n.auth,n.slashes=n.slashes||e.slashes,n.href=n.format(),n},a.prototype.parseHost=function(){var e=this.host,t=u.exec(e);t&&(":"!==(t=t[0])&&(this.port=t.substr(1)),e=e.substr(0,e.length-t.length)),e&&(this.hostname=e)}},function(e,t,n){"use strict";n.r(t),n.d(t,"SHOW_AUTH_POPUP",(function(){return h})),n.d(t,"AUTHORIZE",(function(){return d})),n.d(t,"LOGOUT",(function(){return m})),n.d(t,"PRE_AUTHORIZE_OAUTH2",(function(){return v})),n.d(t,"AUTHORIZE_OAUTH2",(function(){return g})),n.d(t,"VALIDATE",(function(){return y})),n.d(t,"CONFIGURE_AUTH",(function(){return b})),n.d(t,"RESTORE_AUTHORIZATION",(function(){return w})),n.d(t,"showDefinitions",(function(){return x})),n.d(t,"authorize",(function(){return E})),n.d(t,"authorizeWithPersistOption",(function(){return _})),n.d(t,"logout",(function(){return S})),n.d(t,"logoutWithPersistOption",(function(){return k})),n.d(t,"preAuthorizeImplicit",(function(){return A})),n.d(t,"authorizeOauth2",(function(){return O})),n.d(t,"authorizeOauth2WithPersistOption",(function(){return C})),n.d(t,"authorizePassword",(function(){return j})),n.d(t,"authorizeApplication",(function(){return T})),n.d(t,"authorizeAccessCodeWithFormParams",(function(){return I})),n.d(t,"authorizeAccessCodeWithBasicAuthentication",(function(){return P})),n.d(t,"authorizeRequest",(function(){return N})),n.d(t,"configureAuth",(function(){return M})),n.d(t,"restoreAuthorization",(function(){return R})),n.d(t,"persistAuthorizationIfNeeded",(function(){return D}));var r=n(19),o=n.n(r),a=n(30),i=n.n(a),u=n(24),s=n.n(u),c=n(92),l=n.n(c),f=n(27),p=n(7),h="show_popup",d="authorize",m="logout",v="pre_authorize_oauth2",g="authorize_oauth2",y="validate",b="configure_auth",w="restore_authorization";function x(e){return{type:h,payload:e}}function E(e){return{type:d,payload:e}}var _=function(e){return function(t){var n=t.authActions;n.authorize(e),n.persistAuthorizationIfNeeded()}};function S(e){return{type:m,payload:e}}var k=function(e){return function(t){var n=t.authActions;n.logout(e),n.persistAuthorizationIfNeeded()}},A=function(e){return function(t){var n=t.authActions,r=t.errActions,o=e.auth,a=e.token,u=e.isValid,s=o.schema,c=o.name,l=s.get("flow");delete f.a.swaggerUIRedirectOauth2,"accessCode"===l||u||r.newAuthErr({authId:c,source:"auth",level:"warning",message:"Authorization may be unsafe, passed state was changed in server Passed state wasn't returned from auth server"}),a.error?r.newAuthErr({authId:c,source:"auth",level:"error",message:i()(a)}):n.authorizeOauth2WithPersistOption({auth:o,token:a})}};function O(e){return{type:g,payload:e}}var C=function(e){return function(t){var n=t.authActions;n.authorizeOauth2(e),n.persistAuthorizationIfNeeded()}},j=function(e){return function(t){var n=t.authActions,r=e.schema,o=e.name,a=e.username,i=e.password,u=e.passwordType,c=e.clientId,l=e.clientSecret,f={grant_type:"password",scope:e.scopes.join(" "),username:a,password:i},h={};switch(u){case"request-body":!function(e,t,n){t&&s()(e,{client_id:t});n&&s()(e,{client_secret:n})}(f,c,l);break;case"basic":h.Authorization="Basic "+Object(p.a)(c+":"+l);break;default:console.warn("Warning: invalid passwordType ".concat(u," was passed, not including client id and secret"))}return n.authorizeRequest({body:Object(p.b)(f),url:r.get("tokenUrl"),name:o,headers:h,query:{},auth:e})}};var T=function(e){return function(t){var n=t.authActions,r=e.schema,o=e.scopes,a=e.name,i=e.clientId,u=e.clientSecret,s={Authorization:"Basic "+Object(p.a)(i+":"+u)},c={grant_type:"client_credentials",scope:o.join(" ")};return n.authorizeRequest({body:Object(p.b)(c),name:a,url:r.get("tokenUrl"),auth:e,headers:s})}},I=function(e){var t=e.auth,n=e.redirectUrl;return function(e){var r=e.authActions,o=t.schema,a=t.name,i=t.clientId,u=t.clientSecret,s=t.codeVerifier,c={grant_type:"authorization_code",code:t.code,client_id:i,client_secret:u,redirect_uri:n,code_verifier:s};return r.authorizeRequest({body:Object(p.b)(c),name:a,url:o.get("tokenUrl"),auth:t})}},P=function(e){var t=e.auth,n=e.redirectUrl;return function(e){var r=e.authActions,o=t.schema,a=t.name,i=t.clientId,u=t.clientSecret,s=t.codeVerifier,c={Authorization:"Basic "+Object(p.a)(i+":"+u)},l={grant_type:"authorization_code",code:t.code,client_id:i,redirect_uri:n,code_verifier:s};return r.authorizeRequest({body:Object(p.b)(l),name:a,url:o.get("tokenUrl"),auth:t,headers:c})}},N=function(e){return function(t){var n,r=t.fn,a=t.getConfigs,u=t.authActions,c=t.errActions,f=t.oas3Selectors,p=t.specSelectors,h=t.authSelectors,d=e.body,m=e.query,v=void 0===m?{}:m,g=e.headers,y=void 0===g?{}:g,b=e.name,w=e.url,x=e.auth,E=(h.getConfigs()||{}).additionalQueryStringParams;if(p.isOAS3()){var _=f.serverEffectiveValue(f.selectedServer());n=l()(w,_,!0)}else n=l()(w,p.url(),!0);"object"===o()(E)&&(n.query=s()({},n.query,E));var S=n.toString(),k=s()({Accept:"application/json, text/plain, */*","Content-Type":"application/x-www-form-urlencoded","X-Requested-With":"XMLHttpRequest"},y);r.fetch({url:S,method:"post",headers:k,query:v,body:d,requestInterceptor:a().requestInterceptor,responseInterceptor:a().responseInterceptor}).then((function(e){var t=JSON.parse(e.data),n=t&&(t.error||""),r=t&&(t.parseError||"");e.ok?n||r?c.newAuthErr({authId:b,level:"error",source:"auth",message:i()(t)}):u.authorizeOauth2WithPersistOption({auth:x,token:t}):c.newAuthErr({authId:b,level:"error",source:"auth",message:e.statusText})})).catch((function(e){var t=new Error(e).message;if(e.response&&e.response.data){var n=e.response.data;try{var r="string"==typeof n?JSON.parse(n):n;r.error&&(t+=", error: ".concat(r.error)),r.error_description&&(t+=", description: ".concat(r.error_description))}catch(e){}}c.newAuthErr({authId:b,level:"error",source:"auth",message:t})}))}};function M(e){return{type:b,payload:e}}function R(e){return{type:w,payload:e}}var D=function(){return function(e){var t=e.authSelectors;if((0,e.getConfigs)().persistAuthorization){var n=t.authorized();localStorage.setItem("authorized",i()(n.toJS()))}}}},function(e,t,n){var r=n(931);e.exports=function(e){for(var t=1;t0&&"/"!==t[0]}));function Se(e,t,n){var r;t=t||[];var o=xe.apply(void 0,s()(r=[e]).call(r,i()(t))).get("parameters",Object(I.List)());return E()(o).call(o,(function(e,t){var r=n&&"body"===t.get("in")?t.get("value_xml"):t.get("value");return e.set(Object(T.A)(t,{allowHashes:!1}),r)}),Object(I.fromJS)({}))}function ke(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"";if(I.List.isList(e))return A()(e).call(e,(function(e){return I.Map.isMap(e)&&e.get("in")===t}))}function Ae(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"";if(I.List.isList(e))return A()(e).call(e,(function(e){return I.Map.isMap(e)&&e.get("type")===t}))}function Oe(e,t){var n,r;t=t||[];var o=q(e).getIn(s()(n=["paths"]).call(n,i()(t)),Object(I.fromJS)({})),a=e.getIn(s()(r=["meta","paths"]).call(r,i()(t)),Object(I.fromJS)({})),u=Ce(e,t),c=o.get("parameters")||new I.List,l=a.get("consumes_value")?a.get("consumes_value"):Ae(c,"file")?"multipart/form-data":Ae(c,"formData")?"application/x-www-form-urlencoded":void 0;return Object(I.fromJS)({requestContentType:l,responseContentType:u})}function Ce(e,t){var n,r;t=t||[];var o=q(e).getIn(s()(n=["paths"]).call(n,i()(t)),null);if(null!==o){var a=e.getIn(s()(r=["meta","paths"]).call(r,i()(t),["produces_value"]),null),u=o.getIn(["produces",0],null);return a||u||"application/json"}}function je(e,t){var n;t=t||[];var r=q(e),a=r.getIn(s()(n=["paths"]).call(n,i()(t)),null);if(null!==a){var u=t,c=o()(u,1)[0],l=a.get("produces",null),f=r.getIn(["paths",c,"produces"],null),p=r.getIn(["produces"],null);return l||f||p}}function Te(e,t){var n;t=t||[];var r=q(e),a=r.getIn(s()(n=["paths"]).call(n,i()(t)),null);if(null!==a){var u=t,c=o()(u,1)[0],l=a.get("consumes",null),f=r.getIn(["paths",c,"consumes"],null),p=r.getIn(["consumes"],null);return l||f||p}}var Ie=function(e,t,n){var r=e.get("url").match(/^([a-z][a-z0-9+\-.]*):/),o=C()(r)?r[1]:null;return e.getIn(["scheme",t,n])||e.getIn(["scheme","_defaultScheme"])||o||""},Pe=function(e,t,n){var r;return d()(r=["http","https"]).call(r,Ie(e,t,n))>-1},Ne=function(e,t){var n;t=t||[];var r=e.getIn(s()(n=["meta","paths"]).call(n,i()(t),["parameters"]),Object(I.fromJS)([])),o=!0;return p()(r).call(r,(function(e){var t=e.get("errors");t&&t.count()&&(o=!1)})),o},Me=function(e,t){var n,r,o={requestBody:!1,requestContentType:{}},a=e.getIn(s()(n=["resolvedSubtrees","paths"]).call(n,i()(t),["requestBody"]),Object(I.fromJS)([]));return a.size<1||(a.getIn(["required"])&&(o.requestBody=a.getIn(["required"])),p()(r=a.getIn(["content"]).entrySeq()).call(r,(function(e){var t=e[0];if(e[1].getIn(["schema","required"])){var n=e[1].getIn(["schema","required"]).toJS();o.requestContentType[t]=n}}))),o},Re=function(e,t,n,r){var o;if((n||r)&&n===r)return!0;var a=e.getIn(s()(o=["resolvedSubtrees","paths"]).call(o,i()(t),["requestBody","content"]),Object(I.fromJS)([]));if(a.size<2||!n||!r)return!1;var u=a.getIn([n,"schema","properties"],Object(I.fromJS)([])),c=a.getIn([r,"schema","properties"],Object(I.fromJS)([]));return!!u.equals(c)};function De(e){return I.Map.isMap(e)?e:new I.Map}},function(e,t,n){"use strict";(function(t){var r=n(879),o=n(880),a=/^[A-Za-z][A-Za-z0-9+-.]*:\/\//,i=/^([a-z][a-z0-9.+-]*:)?(\/\/)?([\\/]+)?([\S\s]*)/i,u=/^[a-zA-Z]:/,s=new RegExp("^[\\x09\\x0A\\x0B\\x0C\\x0D\\x20\\xA0\\u1680\\u180E\\u2000\\u2001\\u2002\\u2003\\u2004\\u2005\\u2006\\u2007\\u2008\\u2009\\u200A\\u202F\\u205F\\u3000\\u2028\\u2029\\uFEFF]+");function c(e){return(e||"").toString().replace(s,"")}var l=[["#","hash"],["?","query"],function(e,t){return h(t.protocol)?e.replace(/\\/g,"/"):e},["/","pathname"],["@","auth",1],[NaN,"host",void 0,1,1],[/:(\d+)$/,"port",void 0,1],[NaN,"hostname",void 0,1,1]],f={hash:1,query:1};function p(e){var n,r=("undefined"!=typeof window?window:void 0!==t?t:"undefined"!=typeof self?self:{}).location||{},o={},i=typeof(e=e||r);if("blob:"===e.protocol)o=new m(unescape(e.pathname),{});else if("string"===i)for(n in o=new m(e,{}),f)delete o[n];else if("object"===i){for(n in e)n in f||(o[n]=e[n]);void 0===o.slashes&&(o.slashes=a.test(e.href))}return o}function h(e){return"file:"===e||"ftp:"===e||"http:"===e||"https:"===e||"ws:"===e||"wss:"===e}function d(e,t){e=c(e),t=t||{};var n,r=i.exec(e),o=r[1]?r[1].toLowerCase():"",a=!!r[2],u=!!r[3],s=0;return a?u?(n=r[2]+r[3]+r[4],s=r[2].length+r[3].length):(n=r[2]+r[4],s=r[2].length):u?(n=r[3]+r[4],s=r[3].length):n=r[4],"file:"===o?s>=2&&(n=n.slice(2)):h(o)?n=r[4]:o?a&&(n=n.slice(2)):s>=2&&h(t.protocol)&&(n=r[4]),{protocol:o,slashes:a||h(o),slashesCount:s,rest:n}}function m(e,t,n){if(e=c(e),!(this instanceof m))return new m(e,t,n);var a,i,s,f,v,g,y=l.slice(),b=typeof t,w=this,x=0;for("object"!==b&&"string"!==b&&(n=t,t=null),n&&"function"!=typeof n&&(n=o.parse),a=!(i=d(e||"",t=p(t))).protocol&&!i.slashes,w.slashes=i.slashes||a&&t.slashes,w.protocol=i.protocol||t.protocol||"",e=i.rest,("file:"===i.protocol&&(2!==i.slashesCount||u.test(e))||!i.slashes&&(i.protocol||i.slashesCount<2||!h(w.protocol)))&&(y[3]=[/(.*)/,"pathname"]);x=4?[t[0],t[1],t[2],t[3],"".concat(t[0],".").concat(t[1]),"".concat(t[0],".").concat(t[2]),"".concat(t[0],".").concat(t[3]),"".concat(t[1],".").concat(t[0]),"".concat(t[1],".").concat(t[2]),"".concat(t[1],".").concat(t[3]),"".concat(t[2],".").concat(t[0]),"".concat(t[2],".").concat(t[1]),"".concat(t[2],".").concat(t[3]),"".concat(t[3],".").concat(t[0]),"".concat(t[3],".").concat(t[1]),"".concat(t[3],".").concat(t[2]),"".concat(t[0],".").concat(t[1],".").concat(t[2]),"".concat(t[0],".").concat(t[1],".").concat(t[3]),"".concat(t[0],".").concat(t[2],".").concat(t[1]),"".concat(t[0],".").concat(t[2],".").concat(t[3]),"".concat(t[0],".").concat(t[3],".").concat(t[1]),"".concat(t[0],".").concat(t[3],".").concat(t[2]),"".concat(t[1],".").concat(t[0],".").concat(t[2]),"".concat(t[1],".").concat(t[0],".").concat(t[3]),"".concat(t[1],".").concat(t[2],".").concat(t[0]),"".concat(t[1],".").concat(t[2],".").concat(t[3]),"".concat(t[1],".").concat(t[3],".").concat(t[0]),"".concat(t[1],".").concat(t[3],".").concat(t[2]),"".concat(t[2],".").concat(t[0],".").concat(t[1]),"".concat(t[2],".").concat(t[0],".").concat(t[3]),"".concat(t[2],".").concat(t[1],".").concat(t[0]),"".concat(t[2],".").concat(t[1],".").concat(t[3]),"".concat(t[2],".").concat(t[3],".").concat(t[0]),"".concat(t[2],".").concat(t[3],".").concat(t[1]),"".concat(t[3],".").concat(t[0],".").concat(t[1]),"".concat(t[3],".").concat(t[0],".").concat(t[2]),"".concat(t[3],".").concat(t[1],".").concat(t[0]),"".concat(t[3],".").concat(t[1],".").concat(t[2]),"".concat(t[3],".").concat(t[2],".").concat(t[0]),"".concat(t[3],".").concat(t[2],".").concat(t[1]),"".concat(t[0],".").concat(t[1],".").concat(t[2],".").concat(t[3]),"".concat(t[0],".").concat(t[1],".").concat(t[3],".").concat(t[2]),"".concat(t[0],".").concat(t[2],".").concat(t[1],".").concat(t[3]),"".concat(t[0],".").concat(t[2],".").concat(t[3],".").concat(t[1]),"".concat(t[0],".").concat(t[3],".").concat(t[1],".").concat(t[2]),"".concat(t[0],".").concat(t[3],".").concat(t[2],".").concat(t[1]),"".concat(t[1],".").concat(t[0],".").concat(t[2],".").concat(t[3]),"".concat(t[1],".").concat(t[0],".").concat(t[3],".").concat(t[2]),"".concat(t[1],".").concat(t[2],".").concat(t[0],".").concat(t[3]),"".concat(t[1],".").concat(t[2],".").concat(t[3],".").concat(t[0]),"".concat(t[1],".").concat(t[3],".").concat(t[0],".").concat(t[2]),"".concat(t[1],".").concat(t[3],".").concat(t[2],".").concat(t[0]),"".concat(t[2],".").concat(t[0],".").concat(t[1],".").concat(t[3]),"".concat(t[2],".").concat(t[0],".").concat(t[3],".").concat(t[1]),"".concat(t[2],".").concat(t[1],".").concat(t[0],".").concat(t[3]),"".concat(t[2],".").concat(t[1],".").concat(t[3],".").concat(t[0]),"".concat(t[2],".").concat(t[3],".").concat(t[0],".").concat(t[1]),"".concat(t[2],".").concat(t[3],".").concat(t[1],".").concat(t[0]),"".concat(t[3],".").concat(t[0],".").concat(t[1],".").concat(t[2]),"".concat(t[3],".").concat(t[0],".").concat(t[2],".").concat(t[1]),"".concat(t[3],".").concat(t[1],".").concat(t[0],".").concat(t[2]),"".concat(t[3],".").concat(t[1],".").concat(t[2],".").concat(t[0]),"".concat(t[3],".").concat(t[2],".").concat(t[0],".").concat(t[1]),"".concat(t[3],".").concat(t[2],".").concat(t[1],".").concat(t[0])]:void 0),g[r]}function b(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2?arguments[2]:void 0,r=e.filter((function(e){return"token"!==e})),o=y(r);return o.reduce((function(e,t){return p()({},e,n[t])}),t)}function w(e){return e.join(" ")}function x(e){var t=e.node,n=e.stylesheet,r=e.style,o=void 0===r?{}:r,a=e.useInlineStyles,i=e.key,u=t.properties,s=t.type,c=t.tagName,l=t.value;if("text"===s)return l;if(c){var f,h=function(e,t){var n=0;return function(r){return n+=1,r.map((function(r,o){return x({node:r,stylesheet:e,useInlineStyles:t,key:"code-segment-".concat(n,"-").concat(o)})}))}}(n,a);if(a){var m=Object.keys(n).reduce((function(e,t){return t.split(".").forEach((function(t){e.includes(t)||e.push(t)})),e}),[]),g=u.className&&u.className.includes("token")?["token"]:[],y=u.className&&g.concat(u.className.filter((function(e){return!m.includes(e)})));f=p()({},u,{className:w(y)||void 0,style:b(u.className,Object.assign({},u.style,o),n)})}else f=p()({},u,{className:w(u.className)});var E=h(t.children);return d.a.createElement(c,v()({key:i},f),E)}}var E=/\n/g;function _(e){var t=e.codeString,n=e.codeStyle,r=e.containerStyle,o=void 0===r?{float:"left",paddingRight:"10px"}:r,a=e.numberStyle,i=void 0===a?{}:a,u=e.startingLineNumber;return d.a.createElement("code",{style:Object.assign({},n,o)},function(e){var t=e.lines,n=e.startingLineNumber,r=e.style;return t.map((function(e,t){var o=t+n;return d.a.createElement("span",{key:"line-".concat(t),className:"react-syntax-highlighter-line-number",style:"function"==typeof r?r(o):r},"".concat(o,"\n"))}))}({lines:t.replace(/\n$/,"").split("\n"),style:i,startingLineNumber:u}))}function S(e,t){return{type:"element",tagName:"span",properties:{key:"line-number--".concat(e),className:["comment","linenumber","react-syntax-highlighter-line-number"],style:t},children:[{type:"text",value:e}]}}function k(e,t,n){var r,o={display:"inline-block",minWidth:(r=n,"".concat(r.toString().length,".25em")),paddingRight:"1em",textAlign:"right",userSelect:"none"},a="function"==typeof e?e(t):e;return p()({},o,a)}function A(e){var t=e.children,n=e.lineNumber,r=e.lineNumberStyle,o=e.largestLineNumber,a=e.showInlineLineNumbers,i=e.lineProps,u=void 0===i?{}:i,s=e.className,c=void 0===s?[]:s,l=e.showLineNumbers,f=e.wrapLongLines,h="function"==typeof u?u(n):u;if(h.className=c,n&&a){var d=k(r,n,o);t.unshift(S(n,d))}return f&l&&(h.style=p()({},h.style,{display:"flex"})),{type:"element",tagName:"span",properties:h,children:t}}function O(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[],n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:[],r=0;r2&&void 0!==arguments[2]?arguments[2]:[];return A({children:e,lineNumber:t,lineNumberStyle:u,largestLineNumber:i,showInlineLineNumbers:o,lineProps:n,className:a,showLineNumbers:r,wrapLongLines:s})}function m(e,t){if(r&&t&&o){var n=k(u,t,i);e.unshift(S(t,n))}return e}function v(e,n){var r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:[];return t||r.length>0?d(e,n,r):m(e,n)}for(var g=function(){var e=l[h],t=e.children[0].value;if(t.match(E)){var n=t.split("\n");n.forEach((function(t,o){var i=r&&f.length+a,u={type:"text",value:"".concat(t,"\n")};if(0===o){var s=v(l.slice(p+1,h).concat(A({children:[u],className:e.properties.className})),i);f.push(s)}else if(o===n.length-1){if(l[h+1]&&l[h+1].children&&l[h+1].children[0]){var c=A({children:[{type:"text",value:"".concat(t)}],className:e.properties.className});l.splice(h+1,0,c)}else{var d=v([u],i,e.properties.className);f.push(d)}}else{var m=v([u],i,e.properties.className);f.push(m)}})),p=h}h++};h .hljs-title":{color:"#88C0D0"},"hljs-keyword":{color:"#81A1C1"},"hljs-literal":{color:"#81A1C1"},"hljs-symbol":{color:"#81A1C1"},"hljs-number":{color:"#B48EAD"},"hljs-regexp":{color:"#EBCB8B"},"hljs-string":{color:"#A3BE8C"},"hljs-title":{color:"#8FBCBB"},"hljs-params":{color:"#D8DEE9"},"hljs-bullet":{color:"#81A1C1"},"hljs-code":{color:"#8FBCBB"},"hljs-emphasis":{fontStyle:"italic"},"hljs-formula":{color:"#8FBCBB"},"hljs-strong":{fontWeight:"bold"},"hljs-link:hover":{textDecoration:"underline"},"hljs-quote":{color:"#4C566A"},"hljs-comment":{color:"#4C566A"},"hljs-doctag":{color:"#8FBCBB"},"hljs-meta":{color:"#5E81AC"},"hljs-meta-keyword":{color:"#5E81AC"},"hljs-meta-string":{color:"#A3BE8C"},"hljs-attr":{color:"#8FBCBB"},"hljs-attribute":{color:"#D8DEE9"},"hljs-builtin-name":{color:"#81A1C1"},"hljs-name":{color:"#81A1C1"},"hljs-section":{color:"#88C0D0"},"hljs-tag":{color:"#81A1C1"},"hljs-variable":{color:"#D8DEE9"},"hljs-template-variable":{color:"#D8DEE9"},"hljs-template-tag":{color:"#5E81AC"},"abnf .hljs-attribute":{color:"#88C0D0"},"abnf .hljs-symbol":{color:"#EBCB8B"},"apache .hljs-attribute":{color:"#88C0D0"},"apache .hljs-section":{color:"#81A1C1"},"arduino .hljs-built_in":{color:"#88C0D0"},"aspectj .hljs-meta":{color:"#D08770"},"aspectj > .hljs-title":{color:"#88C0D0"},"bnf .hljs-attribute":{color:"#8FBCBB"},"clojure .hljs-name":{color:"#88C0D0"},"clojure .hljs-symbol":{color:"#EBCB8B"},"coq .hljs-built_in":{color:"#88C0D0"},"cpp .hljs-meta-string":{color:"#8FBCBB"},"css .hljs-built_in":{color:"#88C0D0"},"css .hljs-keyword":{color:"#D08770"},"diff .hljs-meta":{color:"#8FBCBB"},"ebnf .hljs-attribute":{color:"#8FBCBB"},"glsl .hljs-built_in":{color:"#88C0D0"},"groovy .hljs-meta:not(:first-child)":{color:"#D08770"},"haxe .hljs-meta":{color:"#D08770"},"java .hljs-meta":{color:"#D08770"},"ldif .hljs-attribute":{color:"#8FBCBB"},"lisp .hljs-name":{color:"#88C0D0"},"lua .hljs-built_in":{color:"#88C0D0"},"moonscript .hljs-built_in":{color:"#88C0D0"},"nginx .hljs-attribute":{color:"#88C0D0"},"nginx .hljs-section":{color:"#5E81AC"},"pf .hljs-built_in":{color:"#88C0D0"},"processing .hljs-built_in":{color:"#88C0D0"},"scss .hljs-keyword":{color:"#81A1C1"},"stylus .hljs-keyword":{color:"#81A1C1"},"swift .hljs-meta":{color:"#D08770"},"vim .hljs-built_in":{color:"#88C0D0",fontStyle:"italic"},"yaml .hljs-meta":{color:"#D08770"}},obsidian:{hljs:{display:"block",overflowX:"auto",padding:"0.5em",background:"#282b2e",color:"#e0e2e4"},"hljs-keyword":{color:"#93c763",fontWeight:"bold"},"hljs-selector-tag":{color:"#93c763",fontWeight:"bold"},"hljs-literal":{color:"#93c763",fontWeight:"bold"},"hljs-selector-id":{color:"#93c763"},"hljs-number":{color:"#ffcd22"},"hljs-attribute":{color:"#668bb0"},"hljs-code":{color:"white"},"hljs-class .hljs-title":{color:"white"},"hljs-section":{color:"white",fontWeight:"bold"},"hljs-regexp":{color:"#d39745"},"hljs-link":{color:"#d39745"},"hljs-meta":{color:"#557182"},"hljs-tag":{color:"#8cbbad"},"hljs-name":{color:"#8cbbad",fontWeight:"bold"},"hljs-bullet":{color:"#8cbbad"},"hljs-subst":{color:"#8cbbad"},"hljs-emphasis":{color:"#8cbbad"},"hljs-type":{color:"#8cbbad",fontWeight:"bold"},"hljs-built_in":{color:"#8cbbad"},"hljs-selector-attr":{color:"#8cbbad"},"hljs-selector-pseudo":{color:"#8cbbad"},"hljs-addition":{color:"#8cbbad"},"hljs-variable":{color:"#8cbbad"},"hljs-template-tag":{color:"#8cbbad"},"hljs-template-variable":{color:"#8cbbad"},"hljs-string":{color:"#ec7600"},"hljs-symbol":{color:"#ec7600"},"hljs-comment":{color:"#818e96"},"hljs-quote":{color:"#818e96"},"hljs-deletion":{color:"#818e96"},"hljs-selector-class":{color:"#A082BD"},"hljs-doctag":{fontWeight:"bold"},"hljs-title":{fontWeight:"bold"},"hljs-strong":{fontWeight:"bold"}},"tomorrow-night":{"hljs-comment":{color:"#969896"},"hljs-quote":{color:"#969896"},"hljs-variable":{color:"#cc6666"},"hljs-template-variable":{color:"#cc6666"},"hljs-tag":{color:"#cc6666"},"hljs-name":{color:"#cc6666"},"hljs-selector-id":{color:"#cc6666"},"hljs-selector-class":{color:"#cc6666"},"hljs-regexp":{color:"#cc6666"},"hljs-deletion":{color:"#cc6666"},"hljs-number":{color:"#de935f"},"hljs-built_in":{color:"#de935f"},"hljs-builtin-name":{color:"#de935f"},"hljs-literal":{color:"#de935f"},"hljs-type":{color:"#de935f"},"hljs-params":{color:"#de935f"},"hljs-meta":{color:"#de935f"},"hljs-link":{color:"#de935f"},"hljs-attribute":{color:"#f0c674"},"hljs-string":{color:"#b5bd68"},"hljs-symbol":{color:"#b5bd68"},"hljs-bullet":{color:"#b5bd68"},"hljs-addition":{color:"#b5bd68"},"hljs-title":{color:"#81a2be"},"hljs-section":{color:"#81a2be"},"hljs-keyword":{color:"#b294bb"},"hljs-selector-tag":{color:"#b294bb"},hljs:{display:"block",overflowX:"auto",background:"#1d1f21",color:"#c5c8c6",padding:"0.5em"},"hljs-emphasis":{fontStyle:"italic"},"hljs-strong":{fontWeight:"bold"}}},X=o()(Z),ee=function(e){return i()(X).call(X,e)?Z[e]:(console.warn("Request style '".concat(e,"' is not available, returning default instead")),Q)}},function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.File=t.Blob=t.FormData=void 0;const r="undefined"!=typeof globalThis?globalThis:"undefined"!=typeof self?self:window;t.FormData=r.FormData,t.Blob=r.Blob,t.File=r.File},function(e,t,n){var r=n(147),o=Function.prototype,a=o.apply,i=o.call;e.exports="object"==typeof Reflect&&Reflect.apply||(r?i.bind(a):function(){return i.apply(a,arguments)})},function(e,t,n){var r=n(51),o=n(37),a=n(175),i=n(98),u=n(66),s=n(177),c=n(47),l=n(334),f=Object.getOwnPropertyDescriptor;t.f=r?f:function(e,t){if(e=u(e),t=s(t),l)try{return f(e,t)}catch(e){}if(c(e,t))return i(!o(a.f,e,t),e[t])}},function(e,t){e.exports=function(e,t){return{enumerable:!(1&e),configurable:!(2&e),writable:!(4&e),value:t}}},function(e,t,n){var r=n(57);e.exports=r("navigator","userAgent")||""},function(e,t){},function(e,t,n){var r,o=n(33),a=n(181),i=n(228),u=n(152),s=n(340),c=n(223),l=n(182),f=l("IE_PROTO"),p=function(){},h=function(e){return"